1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "code-stubs.h"
38 static LChunk* OptimizeGraph(HGraph* graph) {
39 DisallowHeapAllocation no_allocation;
40 DisallowHandleAllocation no_handles;
41 DisallowHandleDereference no_deref;
43 ASSERT(graph != NULL);
44 BailoutReason bailout_reason = kNoReason;
45 if (!graph->Optimize(&bailout_reason)) {
46 FATAL(GetBailoutReason(bailout_reason));
48 LChunk* chunk = LChunk::NewChunk(graph);
50 FATAL(GetBailoutReason(graph->info()->bailout_reason()));
56 class CodeStubGraphBuilderBase : public HGraphBuilder {
58 CodeStubGraphBuilderBase(Isolate* isolate, HydrogenCodeStub* stub)
59 : HGraphBuilder(&info_),
60 arguments_length_(NULL),
63 descriptor_ = stub->GetInterfaceDescriptor(isolate);
64 parameters_.Reset(new HParameter*[descriptor_->register_param_count_]);
66 virtual bool BuildGraph();
69 virtual HValue* BuildCodeStub() = 0;
70 HParameter* GetParameter(int parameter) {
71 ASSERT(parameter < descriptor_->register_param_count_);
72 return parameters_[parameter];
74 HValue* GetArgumentsLength() {
75 // This is initialized in BuildGraph()
76 ASSERT(arguments_length_ != NULL);
77 return arguments_length_;
79 CompilationInfo* info() { return &info_; }
80 HydrogenCodeStub* stub() { return info_.code_stub(); }
81 HContext* context() { return context_; }
82 Isolate* isolate() { return info_.isolate(); }
90 HValue* BuildArrayConstructor(ElementsKind kind,
91 AllocationSiteOverrideMode override_mode,
92 ArgumentClass argument_class);
93 HValue* BuildInternalArrayConstructor(ElementsKind kind,
94 ArgumentClass argument_class);
96 void BuildInstallOptimizedCode(HValue* js_function, HValue* native_context,
98 void BuildInstallCode(HValue* js_function, HValue* shared_info);
99 void BuildInstallFromOptimizedCodeMap(HValue* js_function,
101 HValue* native_context);
104 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
105 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
108 SmartArrayPointer<HParameter*> parameters_;
109 HValue* arguments_length_;
110 CompilationInfoWithZone info_;
111 CodeStubInterfaceDescriptor* descriptor_;
116 bool CodeStubGraphBuilderBase::BuildGraph() {
117 // Update the static counter each time a new code stub is generated.
118 isolate()->counters()->code_stubs()->Increment();
120 if (FLAG_trace_hydrogen_stubs) {
121 const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
122 PrintF("-----------------------------------------------------------\n");
123 PrintF("Compiling stub %s using hydrogen\n", name);
124 isolate()->GetHTracer()->TraceCompilation(&info_);
127 int param_count = descriptor_->register_param_count_;
128 HEnvironment* start_environment = graph()->start_environment();
129 HBasicBlock* next_block = CreateBasicBlock(start_environment);
131 next_block->SetJoinId(BailoutId::StubEntry());
132 set_current_block(next_block);
134 bool runtime_stack_params = descriptor_->stack_parameter_count_.is_valid();
135 HInstruction* stack_parameter_count = NULL;
136 for (int i = 0; i < param_count; ++i) {
137 Representation r = descriptor_->IsParameterCountRegister(i)
138 ? Representation::Integer32()
139 : Representation::Tagged();
140 HParameter* param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
141 start_environment->Bind(i, param);
142 parameters_[i] = param;
143 if (descriptor_->IsParameterCountRegister(i)) {
144 param->set_type(HType::Smi());
145 stack_parameter_count = param;
146 arguments_length_ = stack_parameter_count;
150 ASSERT(!runtime_stack_params || arguments_length_ != NULL);
151 if (!runtime_stack_params) {
152 stack_parameter_count = graph()->GetConstantMinus1();
153 arguments_length_ = graph()->GetConstant0();
156 context_ = Add<HContext>();
157 start_environment->BindContext(context_);
159 Add<HSimulate>(BailoutId::StubEntry());
161 NoObservableSideEffectsScope no_effects(this);
163 HValue* return_value = BuildCodeStub();
165 // We might have extra expressions to pop from the stack in addition to the
167 HInstruction* stack_pop_count = stack_parameter_count;
168 if (descriptor_->function_mode_ == JS_FUNCTION_STUB_MODE) {
169 if (!stack_parameter_count->IsConstant() &&
170 descriptor_->hint_stack_parameter_count_ < 0) {
171 HInstruction* constant_one = graph()->GetConstant1();
172 stack_pop_count = AddUncasted<HAdd>(stack_parameter_count, constant_one);
173 stack_pop_count->ClearFlag(HValue::kCanOverflow);
174 // TODO(mvstanton): verify that stack_parameter_count+1 really fits in a
177 int count = descriptor_->hint_stack_parameter_count_;
178 stack_pop_count = Add<HConstant>(count);
182 if (current_block() != NULL) {
183 HReturn* hreturn_instruction = New<HReturn>(return_value,
185 FinishCurrentBlock(hreturn_instruction);
191 template <class Stub>
192 class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
194 CodeStubGraphBuilder(Isolate* isolate, Stub* stub)
195 : CodeStubGraphBuilderBase(isolate, stub) {}
198 virtual HValue* BuildCodeStub() {
199 if (casted_stub()->IsUninitialized()) {
200 return BuildCodeUninitializedStub();
202 return BuildCodeInitializedStub();
206 virtual HValue* BuildCodeInitializedStub() {
211 virtual HValue* BuildCodeUninitializedStub() {
212 // Force a deopt that falls back to the runtime.
213 HValue* undefined = graph()->GetConstantUndefined();
214 IfBuilder builder(this);
215 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
217 builder.ElseDeopt("Forced deopt to runtime");
221 Stub* casted_stub() { return static_cast<Stub*>(stub()); }
225 Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(Isolate* isolate) {
226 Factory* factory = isolate->factory();
228 // Generate the new code.
229 MacroAssembler masm(isolate, NULL, 256);
232 // Update the static counter each time a new code stub is generated.
233 isolate->counters()->code_stubs()->Increment();
235 // Generate the code for the stub.
236 masm.set_generating_stub(true);
237 NoCurrentFrameScope scope(&masm);
238 GenerateLightweightMiss(&masm);
241 // Create the code object.
245 // Copy the generated code into a heap object.
246 Code::Flags flags = Code::ComputeFlags(
252 Handle<Code> new_object = factory->NewCode(
253 desc, flags, masm.CodeObject(), NeedsImmovableCode());
258 template <class Stub>
259 static Handle<Code> DoGenerateCode(Isolate* isolate, Stub* stub) {
260 CodeStub::Major major_key =
261 static_cast<HydrogenCodeStub*>(stub)->MajorKey();
262 CodeStubInterfaceDescriptor* descriptor =
263 isolate->code_stub_interface_descriptor(major_key);
264 if (descriptor->register_param_count_ < 0) {
265 stub->InitializeInterfaceDescriptor(isolate, descriptor);
268 // If we are uninitialized we can use a light-weight stub to enter
269 // the runtime that is significantly faster than using the standard
270 // stub-failure deopt mechanism.
271 if (stub->IsUninitialized() && descriptor->has_miss_handler()) {
272 ASSERT(!descriptor->stack_parameter_count_.is_valid());
273 return stub->GenerateLightweightMissCode(isolate);
276 if (FLAG_profile_hydrogen_code_stub_compilation) {
279 CodeStubGraphBuilder<Stub> builder(isolate, stub);
280 LChunk* chunk = OptimizeGraph(builder.CreateGraph());
281 Handle<Code> code = chunk->Codegen();
282 if (FLAG_profile_hydrogen_code_stub_compilation) {
283 double ms = timer.Elapsed().InMillisecondsF();
284 PrintF("[Lazy compilation of %s took %0.3f ms]\n",
285 stub->GetName().get(), ms);
292 HValue* CodeStubGraphBuilder<ToNumberStub>::BuildCodeStub() {
293 HValue* value = GetParameter(0);
295 // Check if the parameter is already a SMI or heap number.
296 IfBuilder if_number(this);
297 if_number.If<HIsSmiAndBranch>(value);
298 if_number.OrIf<HCompareMap>(value, isolate()->factory()->heap_number_map());
301 // Return the number.
306 // Convert the parameter to number using the builtin.
307 HValue* function = AddLoadJSBuiltin(Builtins::TO_NUMBER);
308 Add<HPushArgument>(value);
309 Push(Add<HInvokeFunction>(function, 1));
317 Handle<Code> ToNumberStub::GenerateCode(Isolate* isolate) {
318 return DoGenerateCode(isolate, this);
323 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
324 info()->MarkAsSavesCallerDoubles();
325 HValue* number = GetParameter(NumberToStringStub::kNumber);
326 return BuildNumberToString(number, Type::Number(zone()));
330 Handle<Code> NumberToStringStub::GenerateCode(Isolate* isolate) {
331 return DoGenerateCode(isolate, this);
336 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
337 Factory* factory = isolate()->factory();
338 HValue* undefined = graph()->GetConstantUndefined();
339 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
340 FastCloneShallowArrayStub::Mode mode = casted_stub()->mode();
341 int length = casted_stub()->length();
343 HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
345 static_cast<HValue*>(NULL),
347 IfBuilder checker(this);
348 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
352 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
353 AllocationSite::kTransitionInfoOffset);
354 HInstruction* boilerplate = Add<HLoadNamedField>(
355 allocation_site, static_cast<HValue*>(NULL), access);
357 if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) {
358 HValue* elements = AddLoadElements(boilerplate);
360 IfBuilder if_fixed_cow(this);
361 if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
363 push_value = BuildCloneShallowArray(boilerplate,
368 environment()->Push(push_value);
371 IfBuilder if_fixed(this);
372 if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
374 push_value = BuildCloneShallowArray(boilerplate,
379 environment()->Push(push_value);
381 push_value = BuildCloneShallowArray(boilerplate,
384 FAST_DOUBLE_ELEMENTS,
386 environment()->Push(push_value);
388 ElementsKind elements_kind = casted_stub()->ComputeElementsKind();
389 push_value = BuildCloneShallowArray(boilerplate,
394 environment()->Push(push_value);
397 checker.ElseDeopt("Uninitialized boilerplate literals");
400 return environment()->Pop();
404 Handle<Code> FastCloneShallowArrayStub::GenerateCode(Isolate* isolate) {
405 return DoGenerateCode(isolate, this);
410 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
411 HValue* undefined = graph()->GetConstantUndefined();
413 HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
415 static_cast<HValue*>(NULL),
418 IfBuilder checker(this);
419 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
423 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
424 AllocationSite::kTransitionInfoOffset);
425 HInstruction* boilerplate = Add<HLoadNamedField>(
426 allocation_site, static_cast<HValue*>(NULL), access);
428 int size = JSObject::kHeaderSize + casted_stub()->length() * kPointerSize;
429 int object_size = size;
430 if (FLAG_allocation_site_pretenuring) {
431 size += AllocationMemento::kSize;
434 HValue* boilerplate_map = Add<HLoadNamedField>(
435 boilerplate, static_cast<HValue*>(NULL),
436 HObjectAccess::ForMap());
437 HValue* boilerplate_size = Add<HLoadNamedField>(
438 boilerplate_map, static_cast<HValue*>(NULL),
439 HObjectAccess::ForMapInstanceSize());
440 HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
441 checker.If<HCompareNumericAndBranch>(boilerplate_size,
442 size_in_words, Token::EQ);
445 HValue* size_in_bytes = Add<HConstant>(size);
447 HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
448 NOT_TENURED, JS_OBJECT_TYPE);
450 for (int i = 0; i < object_size; i += kPointerSize) {
451 HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
452 Add<HStoreNamedField>(
453 object, access, Add<HLoadNamedField>(
454 boilerplate, static_cast<HValue*>(NULL), access));
457 ASSERT(FLAG_allocation_site_pretenuring || (size == object_size));
458 if (FLAG_allocation_site_pretenuring) {
459 BuildCreateAllocationMemento(
460 object, Add<HConstant>(object_size), allocation_site);
463 environment()->Push(object);
464 checker.ElseDeopt("Uninitialized boilerplate in fast clone");
467 return environment()->Pop();
471 Handle<Code> FastCloneShallowObjectStub::GenerateCode(Isolate* isolate) {
472 return DoGenerateCode(isolate, this);
477 HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
478 HValue* size = Add<HConstant>(AllocationSite::kSize);
479 HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
483 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
484 AddStoreMapConstant(object, allocation_site_map);
486 // Store the payload (smi elements kind)
487 HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
488 Add<HStoreNamedField>(object,
489 HObjectAccess::ForAllocationSiteOffset(
490 AllocationSite::kTransitionInfoOffset),
491 initial_elements_kind);
493 // Unlike literals, constructed arrays don't have nested sites
494 Add<HStoreNamedField>(object,
495 HObjectAccess::ForAllocationSiteOffset(
496 AllocationSite::kNestedSiteOffset),
497 graph()->GetConstant0());
499 // Pretenuring calculation field.
500 Add<HStoreNamedField>(object,
501 HObjectAccess::ForAllocationSiteOffset(
502 AllocationSite::kPretenureDataOffset),
503 graph()->GetConstant0());
505 // Pretenuring memento creation count field.
506 Add<HStoreNamedField>(object,
507 HObjectAccess::ForAllocationSiteOffset(
508 AllocationSite::kPretenureCreateCountOffset),
509 graph()->GetConstant0());
511 // Store an empty fixed array for the code dependency.
512 HConstant* empty_fixed_array =
513 Add<HConstant>(isolate()->factory()->empty_fixed_array());
514 HStoreNamedField* store = Add<HStoreNamedField>(
516 HObjectAccess::ForAllocationSiteOffset(
517 AllocationSite::kDependentCodeOffset),
520 // Link the object to the allocation site list
521 HValue* site_list = Add<HConstant>(
522 ExternalReference::allocation_sites_list_address(isolate()));
523 HValue* site = Add<HLoadNamedField>(
524 site_list, static_cast<HValue*>(NULL),
525 HObjectAccess::ForAllocationSiteList());
526 store = Add<HStoreNamedField>(object,
527 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
529 store->SkipWriteBarrier();
530 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
533 // We use a hammer (SkipWriteBarrier()) to indicate that we know the input
534 // cell is really a Cell, and so no write barrier is needed.
535 // TODO(mvstanton): Add a debug_code check to verify the input cell is really
536 // a cell. (perhaps with a new instruction, HAssert).
537 HInstruction* cell = GetParameter(0);
538 HObjectAccess access = HObjectAccess::ForCellValue();
539 store = Add<HStoreNamedField>(cell, access, object);
540 store->SkipWriteBarrier();
545 Handle<Code> CreateAllocationSiteStub::GenerateCode(Isolate* isolate) {
546 return DoGenerateCode(isolate, this);
551 HValue* CodeStubGraphBuilder<KeyedLoadFastElementStub>::BuildCodeStub() {
552 HInstruction* load = BuildUncheckedMonomorphicElementAccess(
553 GetParameter(0), GetParameter(1), NULL,
554 casted_stub()->is_js_array(), casted_stub()->elements_kind(),
555 false, NEVER_RETURN_HOLE, STANDARD_STORE);
560 Handle<Code> KeyedLoadFastElementStub::GenerateCode(Isolate* isolate) {
561 return DoGenerateCode(isolate, this);
566 HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
567 Representation rep = casted_stub()->representation();
568 int offset = casted_stub()->offset();
569 HObjectAccess access = casted_stub()->is_inobject() ?
570 HObjectAccess::ForObservableJSObjectOffset(offset, rep) :
571 HObjectAccess::ForBackingStoreOffset(offset, rep);
572 return AddLoadNamedField(GetParameter(0), access);
576 Handle<Code> LoadFieldStub::GenerateCode(Isolate* isolate) {
577 return DoGenerateCode(isolate, this);
582 HValue* CodeStubGraphBuilder<KeyedLoadFieldStub>::BuildCodeStub() {
583 Representation rep = casted_stub()->representation();
584 int offset = casted_stub()->offset();
585 HObjectAccess access = casted_stub()->is_inobject() ?
586 HObjectAccess::ForObservableJSObjectOffset(offset, rep) :
587 HObjectAccess::ForBackingStoreOffset(offset, rep);
588 return AddLoadNamedField(GetParameter(0), access);
592 Handle<Code> KeyedLoadFieldStub::GenerateCode(Isolate* isolate) {
593 return DoGenerateCode(isolate, this);
598 HValue* CodeStubGraphBuilder<KeyedStoreFastElementStub>::BuildCodeStub() {
599 BuildUncheckedMonomorphicElementAccess(
600 GetParameter(0), GetParameter(1), GetParameter(2),
601 casted_stub()->is_js_array(), casted_stub()->elements_kind(),
602 true, NEVER_RETURN_HOLE, casted_stub()->store_mode());
604 return GetParameter(2);
608 Handle<Code> KeyedStoreFastElementStub::GenerateCode(Isolate* isolate) {
609 return DoGenerateCode(isolate, this);
614 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
615 info()->MarkAsSavesCallerDoubles();
617 BuildTransitionElementsKind(GetParameter(0),
619 casted_stub()->from_kind(),
620 casted_stub()->to_kind(),
623 return GetParameter(0);
627 Handle<Code> TransitionElementsKindStub::GenerateCode(Isolate* isolate) {
628 return DoGenerateCode(isolate, this);
631 HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
633 AllocationSiteOverrideMode override_mode,
634 ArgumentClass argument_class) {
635 HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
636 HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite);
637 JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
639 HValue* result = NULL;
640 switch (argument_class) {
642 result = array_builder.AllocateEmptyArray();
645 result = BuildArraySingleArgumentConstructor(&array_builder);
648 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
656 HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
657 ElementsKind kind, ArgumentClass argument_class) {
658 HValue* constructor = GetParameter(
659 InternalArrayConstructorStubBase::kConstructor);
660 JSArrayBuilder array_builder(this, kind, constructor);
662 HValue* result = NULL;
663 switch (argument_class) {
665 result = array_builder.AllocateEmptyArray();
668 result = BuildArraySingleArgumentConstructor(&array_builder);
671 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
678 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
679 JSArrayBuilder* array_builder) {
680 // Smi check and range check on the input arg.
681 HValue* constant_one = graph()->GetConstant1();
682 HValue* constant_zero = graph()->GetConstant0();
684 HInstruction* elements = Add<HArgumentsElements>(false);
685 HInstruction* argument = Add<HAccessArgumentsAt>(
686 elements, constant_one, constant_zero);
688 return BuildAllocateArrayFromLength(array_builder, argument);
692 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
693 JSArrayBuilder* array_builder, ElementsKind kind) {
694 // Insert a bounds check because the number of arguments might exceed
695 // the kInitialMaxFastElementArray limit. This cannot happen for code
696 // that was parsed, but calling via Array.apply(thisArg, [...]) might
698 HValue* length = GetArgumentsLength();
699 HConstant* max_alloc_length =
700 Add<HConstant>(JSObject::kInitialMaxFastElementArray);
701 HValue* checked_length = Add<HBoundsCheck>(length, max_alloc_length);
703 // We need to fill with the hole if it's a smi array in the multi-argument
704 // case because we might have to bail out while copying arguments into
705 // the array because they aren't compatible with a smi array.
706 // If it's a double array, no problem, and if it's fast then no
707 // problem either because doubles are boxed.
709 // TODO(mvstanton): consider an instruction to memset fill the array
710 // with zero in this case instead.
711 JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
712 ? JSArrayBuilder::FILL_WITH_HOLE
713 : JSArrayBuilder::DONT_FILL_WITH_HOLE;
714 HValue* new_object = array_builder->AllocateArray(checked_length,
717 HValue* elements = array_builder->GetElementsLocation();
718 ASSERT(elements != NULL);
720 // Now populate the elements correctly.
721 LoopBuilder builder(this,
723 LoopBuilder::kPostIncrement);
724 HValue* start = graph()->GetConstant0();
725 HValue* key = builder.BeginBody(start, checked_length, Token::LT);
726 HInstruction* argument_elements = Add<HArgumentsElements>(false);
727 HInstruction* argument = Add<HAccessArgumentsAt>(
728 argument_elements, checked_length, key);
730 Add<HStoreKeyed>(elements, key, argument, kind);
737 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
738 ElementsKind kind = casted_stub()->elements_kind();
739 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
740 return BuildArrayConstructor(kind, override_mode, NONE);
744 Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode(Isolate* isolate) {
745 return DoGenerateCode(isolate, this);
750 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
752 ElementsKind kind = casted_stub()->elements_kind();
753 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
754 return BuildArrayConstructor(kind, override_mode, SINGLE);
758 Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode(
760 return DoGenerateCode(isolate, this);
765 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
766 ElementsKind kind = casted_stub()->elements_kind();
767 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
768 return BuildArrayConstructor(kind, override_mode, MULTIPLE);
772 Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode(Isolate* isolate) {
773 return DoGenerateCode(isolate, this);
778 HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
780 ElementsKind kind = casted_stub()->elements_kind();
781 return BuildInternalArrayConstructor(kind, NONE);
785 Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode(
787 return DoGenerateCode(isolate, this);
792 HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
794 ElementsKind kind = casted_stub()->elements_kind();
795 return BuildInternalArrayConstructor(kind, SINGLE);
799 Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode(
801 return DoGenerateCode(isolate, this);
806 HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
808 ElementsKind kind = casted_stub()->elements_kind();
809 return BuildInternalArrayConstructor(kind, MULTIPLE);
813 Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode(
815 return DoGenerateCode(isolate, this);
820 HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
821 Isolate* isolate = graph()->isolate();
822 CompareNilICStub* stub = casted_stub();
823 HIfContinuation continuation;
824 Handle<Map> sentinel_map(isolate->heap()->meta_map());
825 Type* type = stub->GetType(zone(), sentinel_map);
826 BuildCompareNil(GetParameter(0), type, &continuation);
827 IfBuilder if_nil(this, &continuation);
829 if (continuation.IsFalseReachable()) {
831 if_nil.Return(graph()->GetConstant0());
834 return continuation.IsTrueReachable()
835 ? graph()->GetConstant1()
836 : graph()->GetConstantUndefined();
840 Handle<Code> CompareNilICStub::GenerateCode(Isolate* isolate) {
841 return DoGenerateCode(isolate, this);
846 HValue* CodeStubGraphBuilder<BinaryOpICStub>::BuildCodeInitializedStub() {
847 BinaryOpIC::State state = casted_stub()->state();
849 HValue* left = GetParameter(BinaryOpICStub::kLeft);
850 HValue* right = GetParameter(BinaryOpICStub::kRight);
852 Type* left_type = state.GetLeftType(zone());
853 Type* right_type = state.GetRightType(zone());
854 Type* result_type = state.GetResultType(zone());
856 ASSERT(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
857 (state.HasSideEffects() || !result_type->Is(Type::None())));
859 HValue* result = NULL;
860 HAllocationMode allocation_mode(NOT_TENURED);
861 if (state.op() == Token::ADD &&
862 (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
863 !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
864 // For the generic add stub a fast case for string addition is performance
866 if (left_type->Maybe(Type::String())) {
867 IfBuilder if_leftisstring(this);
868 if_leftisstring.If<HIsStringAndBranch>(left);
869 if_leftisstring.Then();
871 Push(BuildBinaryOperation(
872 state.op(), left, right,
873 Type::String(zone()), right_type,
874 result_type, state.fixed_right_arg(),
877 if_leftisstring.Else();
879 Push(BuildBinaryOperation(
880 state.op(), left, right,
881 left_type, right_type, result_type,
882 state.fixed_right_arg(), allocation_mode));
884 if_leftisstring.End();
887 IfBuilder if_rightisstring(this);
888 if_rightisstring.If<HIsStringAndBranch>(right);
889 if_rightisstring.Then();
891 Push(BuildBinaryOperation(
892 state.op(), left, right,
893 left_type, Type::String(zone()),
894 result_type, state.fixed_right_arg(),
897 if_rightisstring.Else();
899 Push(BuildBinaryOperation(
900 state.op(), left, right,
901 left_type, right_type, result_type,
902 state.fixed_right_arg(), allocation_mode));
904 if_rightisstring.End();
908 result = BuildBinaryOperation(
909 state.op(), left, right,
910 left_type, right_type, result_type,
911 state.fixed_right_arg(), allocation_mode);
914 // If we encounter a generic argument, the number conversion is
915 // observable, thus we cannot afford to bail out after the fact.
916 if (!state.HasSideEffects()) {
917 if (result_type->Is(Type::Smi())) {
918 if (state.op() == Token::SHR) {
919 // TODO(olivf) Replace this by a SmiTagU Instruction.
920 // 0x40000000: this number would convert to negative when interpreting
921 // the register as signed value;
922 IfBuilder if_of(this);
923 if_of.IfNot<HCompareNumericAndBranch>(result,
924 Add<HConstant>(static_cast<int>(SmiValuesAre32Bits()
925 ? 0x80000000 : 0x40000000)), Token::EQ_STRICT);
927 if_of.ElseDeopt("UInt->Smi oveflow");
931 result = EnforceNumberType(result, result_type);
934 // Reuse the double box of one of the operands if we are allowed to (i.e.
936 if (state.CanReuseDoubleBox()) {
937 HValue* operand = (state.mode() == OVERWRITE_LEFT) ? left : right;
938 IfBuilder if_heap_number(this);
939 if_heap_number.IfNot<HIsSmiAndBranch>(operand);
940 if_heap_number.Then();
941 Add<HStoreNamedField>(operand, HObjectAccess::ForHeapNumberValue(), result);
943 if_heap_number.Else();
945 if_heap_number.End();
953 Handle<Code> BinaryOpICStub::GenerateCode(Isolate* isolate) {
954 return DoGenerateCode(isolate, this);
959 HValue* CodeStubGraphBuilder<BinaryOpWithAllocationSiteStub>::BuildCodeStub() {
960 BinaryOpIC::State state = casted_stub()->state();
962 HValue* allocation_site = GetParameter(
963 BinaryOpWithAllocationSiteStub::kAllocationSite);
964 HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft);
965 HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight);
967 Type* left_type = state.GetLeftType(zone());
968 Type* right_type = state.GetRightType(zone());
969 Type* result_type = state.GetResultType(zone());
970 HAllocationMode allocation_mode(allocation_site);
972 return BuildBinaryOperation(state.op(), left, right,
973 left_type, right_type, result_type,
974 state.fixed_right_arg(), allocation_mode);
978 Handle<Code> BinaryOpWithAllocationSiteStub::GenerateCode(Isolate* isolate) {
979 return DoGenerateCode(isolate, this);
984 HValue* CodeStubGraphBuilder<StringAddStub>::BuildCodeInitializedStub() {
985 StringAddStub* stub = casted_stub();
986 StringAddFlags flags = stub->flags();
987 PretenureFlag pretenure_flag = stub->pretenure_flag();
989 HValue* left = GetParameter(StringAddStub::kLeft);
990 HValue* right = GetParameter(StringAddStub::kRight);
992 // Make sure that both arguments are strings if not known in advance.
993 if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
994 left = BuildCheckString(left);
996 if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
997 right = BuildCheckString(right);
1000 return BuildStringAdd(left, right, HAllocationMode(pretenure_flag));
1004 Handle<Code> StringAddStub::GenerateCode(Isolate* isolate) {
1005 return DoGenerateCode(isolate, this);
1010 HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
1011 ToBooleanStub* stub = casted_stub();
1013 IfBuilder if_true(this);
1014 if_true.If<HBranch>(GetParameter(0), stub->GetTypes());
1016 if_true.Return(graph()->GetConstant1());
1019 return graph()->GetConstant0();
1023 Handle<Code> ToBooleanStub::GenerateCode(Isolate* isolate) {
1024 return DoGenerateCode(isolate, this);
1029 HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
1030 StoreGlobalStub* stub = casted_stub();
1031 Handle<Object> hole(isolate()->heap()->the_hole_value(), isolate());
1032 Handle<Object> placeholer_value(Smi::FromInt(0), isolate());
1033 Handle<PropertyCell> placeholder_cell =
1034 isolate()->factory()->NewPropertyCell(placeholer_value);
1036 HParameter* receiver = GetParameter(0);
1037 HParameter* value = GetParameter(2);
1039 // Check that the map of the global has not changed: use a placeholder map
1040 // that will be replaced later with the global object's map.
1041 Handle<Map> placeholder_map = isolate()->factory()->meta_map();
1042 Add<HCheckMaps>(receiver, placeholder_map, top_info());
1044 HValue* cell = Add<HConstant>(placeholder_cell);
1045 HObjectAccess access(HObjectAccess::ForCellPayload(isolate()));
1046 HValue* cell_contents = Add<HLoadNamedField>(
1047 cell, static_cast<HValue*>(NULL), access);
1049 if (stub->is_constant()) {
1050 IfBuilder builder(this);
1051 builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1053 builder.ElseDeopt("Unexpected cell contents in constant global store");
1056 // Load the payload of the global parameter cell. A hole indicates that the
1057 // property has been deleted and that the store must be handled by the
1059 IfBuilder builder(this);
1060 HValue* hole_value = Add<HConstant>(hole);
1061 builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1063 builder.Deopt("Unexpected cell contents in global store");
1065 Add<HStoreNamedField>(cell, access, value);
1073 Handle<Code> StoreGlobalStub::GenerateCode(Isolate* isolate) {
1074 return DoGenerateCode(isolate, this);
1079 HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
1080 HValue* value = GetParameter(0);
1081 HValue* map = GetParameter(1);
1082 HValue* key = GetParameter(2);
1083 HValue* object = GetParameter(3);
1085 if (FLAG_trace_elements_transitions) {
1086 // Tracing elements transitions is the job of the runtime.
1087 Add<HDeoptimize>("Tracing elements transitions", Deoptimizer::EAGER);
1089 info()->MarkAsSavesCallerDoubles();
1091 BuildTransitionElementsKind(object, map,
1092 casted_stub()->from_kind(),
1093 casted_stub()->to_kind(),
1094 casted_stub()->is_jsarray());
1096 BuildUncheckedMonomorphicElementAccess(object, key, value,
1097 casted_stub()->is_jsarray(),
1098 casted_stub()->to_kind(),
1099 true, ALLOW_RETURN_HOLE,
1100 casted_stub()->store_mode());
1107 Handle<Code> ElementsTransitionAndStoreStub::GenerateCode(Isolate* isolate) {
1108 return DoGenerateCode(isolate, this);
1112 void CodeStubGraphBuilderBase::BuildInstallOptimizedCode(
1113 HValue* js_function,
1114 HValue* native_context,
1115 HValue* code_object) {
1116 Counters* counters = isolate()->counters();
1117 AddIncrementCounter(counters->fast_new_closure_install_optimized());
1119 // TODO(fschneider): Idea: store proper code pointers in the optimized code
1120 // map and either unmangle them on marking or do nothing as the whole map is
1121 // discarded on major GC anyway.
1122 Add<HStoreCodeEntry>(js_function, code_object);
1124 // Now link a function into a list of optimized functions.
1125 HValue* optimized_functions_list = Add<HLoadNamedField>(
1126 native_context, static_cast<HValue*>(NULL),
1127 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1128 Add<HStoreNamedField>(js_function,
1129 HObjectAccess::ForNextFunctionLinkPointer(),
1130 optimized_functions_list);
1132 // This store is the only one that should have a write barrier.
1133 Add<HStoreNamedField>(native_context,
1134 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1139 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1140 HValue* shared_info) {
1141 Add<HStoreNamedField>(js_function,
1142 HObjectAccess::ForNextFunctionLinkPointer(),
1143 graph()->GetConstantUndefined());
1144 HValue* code_object = Add<HLoadNamedField>(
1145 shared_info, static_cast<HValue*>(NULL), HObjectAccess::ForCodeOffset());
1146 Add<HStoreCodeEntry>(js_function, code_object);
1150 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1151 HValue* js_function,
1152 HValue* shared_info,
1153 HValue* native_context) {
1154 Counters* counters = isolate()->counters();
1155 IfBuilder is_optimized(this);
1156 HInstruction* optimized_map = Add<HLoadNamedField>(
1157 shared_info, static_cast<HValue*>(NULL),
1158 HObjectAccess::ForOptimizedCodeMap());
1159 HValue* null_constant = Add<HConstant>(0);
1160 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1161 is_optimized.Then();
1163 BuildInstallCode(js_function, shared_info);
1165 is_optimized.Else();
1167 AddIncrementCounter(counters->fast_new_closure_try_optimized());
1168 // optimized_map points to fixed array of 3-element entries
1169 // (native context, optimized code, literals).
1170 // Map must never be empty, so check the first elements.
1171 Label install_optimized;
1172 HValue* first_context_slot = Add<HLoadNamedField>(
1173 optimized_map, static_cast<HValue*>(NULL),
1174 HObjectAccess::ForFirstContextSlot());
1175 HValue* first_osr_ast_slot = Add<HLoadNamedField>(
1176 optimized_map, static_cast<HValue*>(NULL),
1177 HObjectAccess::ForFirstOsrAstIdSlot());
1178 HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
1179 IfBuilder already_in(this);
1180 already_in.If<HCompareObjectEqAndBranch>(native_context,
1181 first_context_slot);
1182 already_in.AndIf<HCompareObjectEqAndBranch>(first_osr_ast_slot,
1186 HValue* code_object = Add<HLoadNamedField>(
1187 optimized_map, static_cast<HValue*>(NULL),
1188 HObjectAccess::ForFirstCodeSlot());
1189 BuildInstallOptimizedCode(js_function, native_context, code_object);
1193 HValue* shared_function_entry_length =
1194 Add<HConstant>(SharedFunctionInfo::kEntryLength);
1195 LoopBuilder loop_builder(this,
1197 LoopBuilder::kPostDecrement,
1198 shared_function_entry_length);
1199 HValue* array_length = Add<HLoadNamedField>(
1200 optimized_map, static_cast<HValue*>(NULL),
1201 HObjectAccess::ForFixedArrayLength());
1202 HValue* slot_iterator = loop_builder.BeginBody(array_length,
1203 graph()->GetConstant0(),
1206 // Iterate through the rest of map backwards.
1207 // Do not double check first entry.
1208 HValue* second_entry_index =
1209 Add<HConstant>(SharedFunctionInfo::kSecondEntryIndex);
1210 IfBuilder restore_check(this);
1211 restore_check.If<HCompareNumericAndBranch>(
1212 slot_iterator, second_entry_index, Token::EQ);
1213 restore_check.Then();
1215 // Store the unoptimized code
1216 BuildInstallCode(js_function, shared_info);
1217 loop_builder.Break();
1219 restore_check.Else();
1221 STATIC_ASSERT(SharedFunctionInfo::kContextOffset == 0);
1222 STATIC_ASSERT(SharedFunctionInfo::kEntryLength -
1223 SharedFunctionInfo::kOsrAstIdOffset == 1);
1224 HValue* native_context_slot = AddUncasted<HSub>(
1225 slot_iterator, shared_function_entry_length);
1226 HValue* osr_ast_id_slot = AddUncasted<HSub>(
1227 slot_iterator, graph()->GetConstant1());
1228 HInstruction* native_context_entry = Add<HLoadKeyed>(optimized_map,
1229 native_context_slot, static_cast<HValue*>(NULL), FAST_ELEMENTS);
1230 HInstruction* osr_ast_id_entry = Add<HLoadKeyed>(optimized_map,
1231 osr_ast_id_slot, static_cast<HValue*>(NULL), FAST_ELEMENTS);
1232 IfBuilder done_check(this);
1233 done_check.If<HCompareObjectEqAndBranch>(native_context,
1234 native_context_entry);
1235 done_check.AndIf<HCompareObjectEqAndBranch>(osr_ast_id_entry,
1239 // Hit: fetch the optimized code.
1240 HValue* code_slot = AddUncasted<HAdd>(
1241 native_context_slot, graph()->GetConstant1());
1242 HValue* code_object = Add<HLoadKeyed>(optimized_map,
1243 code_slot, static_cast<HValue*>(NULL), FAST_ELEMENTS);
1244 BuildInstallOptimizedCode(js_function, native_context, code_object);
1246 // Fall out of the loop
1247 loop_builder.Break();
1252 restore_check.End();
1254 loop_builder.EndBody();
1263 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
1264 Counters* counters = isolate()->counters();
1265 Factory* factory = isolate()->factory();
1266 HInstruction* empty_fixed_array =
1267 Add<HConstant>(factory->empty_fixed_array());
1268 HValue* shared_info = GetParameter(0);
1270 AddIncrementCounter(counters->fast_new_closure_total());
1272 // Create a new closure from the given function info in new space
1273 HValue* size = Add<HConstant>(JSFunction::kSize);
1274 HInstruction* js_function = Add<HAllocate>(size, HType::JSObject(),
1275 NOT_TENURED, JS_FUNCTION_TYPE);
1277 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
1278 casted_stub()->is_generator());
1280 // Compute the function map in the current native context and set that
1281 // as the map of the allocated object.
1282 HInstruction* native_context = BuildGetNativeContext();
1283 HInstruction* map_slot_value = Add<HLoadNamedField>(
1284 native_context, static_cast<HValue*>(NULL),
1285 HObjectAccess::ForContextSlot(map_index));
1286 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1288 // Initialize the rest of the function.
1289 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1291 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1293 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1295 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1296 graph()->GetConstantHole());
1297 Add<HStoreNamedField>(js_function,
1298 HObjectAccess::ForSharedFunctionInfoPointer(),
1300 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1303 // Initialize the code pointer in the function to be the one
1304 // found in the shared function info object.
1305 // But first check if there is an optimized version for our context.
1306 if (FLAG_cache_optimized_code) {
1307 BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
1309 BuildInstallCode(js_function, shared_info);
1316 Handle<Code> FastNewClosureStub::GenerateCode(Isolate* isolate) {
1317 return DoGenerateCode(isolate, this);
1322 HValue* CodeStubGraphBuilder<FastNewContextStub>::BuildCodeStub() {
1323 int length = casted_stub()->slots() + Context::MIN_CONTEXT_SLOTS;
1325 // Get the function.
1326 HParameter* function = GetParameter(FastNewContextStub::kFunction);
1328 // Allocate the context in new space.
1329 HAllocate* function_context = Add<HAllocate>(
1330 Add<HConstant>(length * kPointerSize + FixedArray::kHeaderSize),
1331 HType::Tagged(), NOT_TENURED, FIXED_ARRAY_TYPE);
1333 // Set up the object header.
1334 AddStoreMapConstant(function_context,
1335 isolate()->factory()->function_context_map());
1336 Add<HStoreNamedField>(function_context,
1337 HObjectAccess::ForFixedArrayLength(),
1338 Add<HConstant>(length));
1340 // Set up the fixed slots.
1341 Add<HStoreNamedField>(function_context,
1342 HObjectAccess::ForContextSlot(Context::CLOSURE_INDEX),
1344 Add<HStoreNamedField>(function_context,
1345 HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX),
1347 Add<HStoreNamedField>(function_context,
1348 HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX),
1349 graph()->GetConstant0());
1351 // Copy the global object from the previous context.
1352 HValue* global_object = Add<HLoadNamedField>(
1353 context(), static_cast<HValue*>(NULL),
1354 HObjectAccess::ForContextSlot(Context::GLOBAL_OBJECT_INDEX));
1355 Add<HStoreNamedField>(function_context,
1356 HObjectAccess::ForContextSlot(
1357 Context::GLOBAL_OBJECT_INDEX),
1360 // Initialize the rest of the slots to undefined.
1361 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; ++i) {
1362 Add<HStoreNamedField>(function_context,
1363 HObjectAccess::ForContextSlot(i),
1364 graph()->GetConstantUndefined());
1367 return function_context;
1371 Handle<Code> FastNewContextStub::GenerateCode(Isolate* isolate) {
1372 return DoGenerateCode(isolate, this);
1377 HValue* CodeStubGraphBuilder<KeyedLoadDictionaryElementStub>::BuildCodeStub() {
1378 HValue* receiver = GetParameter(0);
1379 HValue* key = GetParameter(1);
1381 Add<HCheckSmi>(key);
1383 return BuildUncheckedDictionaryElementLoad(receiver, key);
1387 Handle<Code> KeyedLoadDictionaryElementStub::GenerateCode(Isolate* isolate) {
1388 return DoGenerateCode(isolate, this);
1393 HValue* CodeStubGraphBuilder<RegExpConstructResultStub>::BuildCodeStub() {
1394 // Determine the parameters.
1395 HValue* length = GetParameter(RegExpConstructResultStub::kLength);
1396 HValue* index = GetParameter(RegExpConstructResultStub::kIndex);
1397 HValue* input = GetParameter(RegExpConstructResultStub::kInput);
1399 return BuildRegExpConstructResult(length, index, input);
1403 Handle<Code> RegExpConstructResultStub::GenerateCode(Isolate* isolate) {
1404 return DoGenerateCode(isolate, this);
1408 } } // namespace v8::internal