1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "code-stubs.h"
38 static LChunk* OptimizeGraph(HGraph* graph) {
39 DisallowHeapAllocation no_allocation;
40 DisallowHandleAllocation no_handles;
41 DisallowHandleDereference no_deref;
43 ASSERT(graph != NULL);
44 BailoutReason bailout_reason = kNoReason;
45 if (!graph->Optimize(&bailout_reason)) {
46 FATAL(GetBailoutReason(bailout_reason));
48 LChunk* chunk = LChunk::NewChunk(graph);
50 FATAL(GetBailoutReason(graph->info()->bailout_reason()));
56 class CodeStubGraphBuilderBase : public HGraphBuilder {
58 CodeStubGraphBuilderBase(Isolate* isolate, HydrogenCodeStub* stub)
59 : HGraphBuilder(&info_),
60 arguments_length_(NULL),
63 descriptor_ = stub->GetInterfaceDescriptor(isolate);
64 parameters_.Reset(new HParameter*[descriptor_->register_param_count_]);
66 virtual bool BuildGraph();
69 virtual HValue* BuildCodeStub() = 0;
70 HParameter* GetParameter(int parameter) {
71 ASSERT(parameter < descriptor_->register_param_count_);
72 return parameters_[parameter];
74 HValue* GetArgumentsLength() {
75 // This is initialized in BuildGraph()
76 ASSERT(arguments_length_ != NULL);
77 return arguments_length_;
79 CompilationInfo* info() { return &info_; }
80 HydrogenCodeStub* stub() { return info_.code_stub(); }
81 HContext* context() { return context_; }
82 Isolate* isolate() { return info_.isolate(); }
84 class ArrayContextChecker {
86 ArrayContextChecker(HGraphBuilder* builder, HValue* constructor,
87 HValue* array_function)
89 checker_.If<HCompareObjectEqAndBranch, HValue*>(constructor,
94 ~ArrayContextChecker() {
95 checker_.ElseDeopt("Array constructor called from different context");
108 HValue* BuildArrayConstructor(ElementsKind kind,
109 ContextCheckMode context_mode,
110 AllocationSiteOverrideMode override_mode,
111 ArgumentClass argument_class);
112 HValue* BuildInternalArrayConstructor(ElementsKind kind,
113 ArgumentClass argument_class);
115 void BuildInstallOptimizedCode(HValue* js_function, HValue* native_context,
116 HValue* code_object);
117 void BuildInstallCode(HValue* js_function, HValue* shared_info);
118 void BuildInstallFromOptimizedCodeMap(HValue* js_function,
120 HValue* native_context);
123 HValue* BuildArraySingleArgumentConstructor(JSArrayBuilder* builder);
124 HValue* BuildArrayNArgumentsConstructor(JSArrayBuilder* builder,
127 SmartArrayPointer<HParameter*> parameters_;
128 HValue* arguments_length_;
129 CompilationInfoWithZone info_;
130 CodeStubInterfaceDescriptor* descriptor_;
135 bool CodeStubGraphBuilderBase::BuildGraph() {
136 // Update the static counter each time a new code stub is generated.
137 isolate()->counters()->code_stubs()->Increment();
139 if (FLAG_trace_hydrogen_stubs) {
140 const char* name = CodeStub::MajorName(stub()->MajorKey(), false);
141 PrintF("-----------------------------------------------------------\n");
142 PrintF("Compiling stub %s using hydrogen\n", name);
143 isolate()->GetHTracer()->TraceCompilation(&info_);
146 int param_count = descriptor_->register_param_count_;
147 HEnvironment* start_environment = graph()->start_environment();
148 HBasicBlock* next_block = CreateBasicBlock(start_environment);
150 next_block->SetJoinId(BailoutId::StubEntry());
151 set_current_block(next_block);
153 for (int i = 0; i < param_count; ++i) {
155 Add<HParameter>(i, HParameter::REGISTER_PARAMETER);
156 start_environment->Bind(i, param);
157 parameters_[i] = param;
160 HInstruction* stack_parameter_count;
161 if (descriptor_->stack_parameter_count_.is_valid()) {
162 ASSERT(descriptor_->environment_length() == (param_count + 1));
163 stack_parameter_count = New<HParameter>(param_count,
164 HParameter::REGISTER_PARAMETER,
165 Representation::Integer32());
166 stack_parameter_count->set_type(HType::Smi());
167 // It's essential to bind this value to the environment in case of deopt.
168 AddInstruction(stack_parameter_count);
169 start_environment->Bind(param_count, stack_parameter_count);
170 arguments_length_ = stack_parameter_count;
172 ASSERT(descriptor_->environment_length() == param_count);
173 stack_parameter_count = graph()->GetConstantMinus1();
174 arguments_length_ = graph()->GetConstant0();
177 context_ = Add<HContext>();
178 start_environment->BindContext(context_);
180 Add<HSimulate>(BailoutId::StubEntry());
182 NoObservableSideEffectsScope no_effects(this);
184 HValue* return_value = BuildCodeStub();
186 // We might have extra expressions to pop from the stack in addition to the
188 HInstruction* stack_pop_count = stack_parameter_count;
189 if (descriptor_->function_mode_ == JS_FUNCTION_STUB_MODE) {
190 if (!stack_parameter_count->IsConstant() &&
191 descriptor_->hint_stack_parameter_count_ < 0) {
192 HInstruction* amount = graph()->GetConstant1();
193 stack_pop_count = Add<HAdd>(stack_parameter_count, amount);
194 stack_pop_count->ChangeRepresentation(Representation::Integer32());
195 stack_pop_count->ClearFlag(HValue::kCanOverflow);
197 int count = descriptor_->hint_stack_parameter_count_;
198 stack_pop_count = Add<HConstant>(count);
202 if (current_block() != NULL) {
203 HReturn* hreturn_instruction = New<HReturn>(return_value,
205 FinishCurrentBlock(hreturn_instruction);
211 template <class Stub>
212 class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
214 explicit CodeStubGraphBuilder(Isolate* isolate, Stub* stub)
215 : CodeStubGraphBuilderBase(isolate, stub) {}
218 virtual HValue* BuildCodeStub() {
219 if (casted_stub()->IsUninitialized()) {
220 return BuildCodeUninitializedStub();
222 return BuildCodeInitializedStub();
226 virtual HValue* BuildCodeInitializedStub() {
231 virtual HValue* BuildCodeUninitializedStub() {
232 // Force a deopt that falls back to the runtime.
233 HValue* undefined = graph()->GetConstantUndefined();
234 IfBuilder builder(this);
235 builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
237 builder.ElseDeopt("Forced deopt to runtime");
241 Stub* casted_stub() { return static_cast<Stub*>(stub()); }
245 Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(Isolate* isolate) {
246 Factory* factory = isolate->factory();
248 // Generate the new code.
249 MacroAssembler masm(isolate, NULL, 256);
252 // Update the static counter each time a new code stub is generated.
253 isolate->counters()->code_stubs()->Increment();
255 // Nested stubs are not allowed for leaves.
256 AllowStubCallsScope allow_scope(&masm, false);
258 // Generate the code for the stub.
259 masm.set_generating_stub(true);
260 NoCurrentFrameScope scope(&masm);
261 GenerateLightweightMiss(&masm);
264 // Create the code object.
268 // Copy the generated code into a heap object.
269 Code::Flags flags = Code::ComputeFlags(
275 Handle<Code> new_object = factory->NewCode(
276 desc, flags, masm.CodeObject(), NeedsImmovableCode());
281 template <class Stub>
282 static Handle<Code> DoGenerateCode(Isolate* isolate, Stub* stub) {
283 CodeStub::Major major_key =
284 static_cast<HydrogenCodeStub*>(stub)->MajorKey();
285 CodeStubInterfaceDescriptor* descriptor =
286 isolate->code_stub_interface_descriptor(major_key);
287 if (descriptor->register_param_count_ < 0) {
288 stub->InitializeInterfaceDescriptor(isolate, descriptor);
291 // If we are uninitialized we can use a light-weight stub to enter
292 // the runtime that is significantly faster than using the standard
293 // stub-failure deopt mechanism.
294 if (stub->IsUninitialized() && descriptor->has_miss_handler()) {
295 ASSERT(!descriptor->stack_parameter_count_.is_valid());
296 return stub->GenerateLightweightMissCode(isolate);
299 if (FLAG_profile_hydrogen_code_stub_compilation) {
302 CodeStubGraphBuilder<Stub> builder(isolate, stub);
303 LChunk* chunk = OptimizeGraph(builder.CreateGraph());
304 Handle<Code> code = chunk->Codegen();
305 if (FLAG_profile_hydrogen_code_stub_compilation) {
306 double ms = timer.Elapsed().InMillisecondsF();
307 PrintF("[Lazy compilation of %s took %0.3f ms]\n", *stub->GetName(), ms);
314 HValue* CodeStubGraphBuilder<ToNumberStub>::BuildCodeStub() {
315 HValue* value = GetParameter(0);
317 // Check if the parameter is already a SMI or heap number.
318 IfBuilder if_number(this);
319 if_number.If<HIsSmiAndBranch>(value);
320 if_number.OrIf<HCompareMap>(value, isolate()->factory()->heap_number_map());
323 // Return the number.
328 // Convert the parameter to number using the builtin.
329 HValue* function = AddLoadJSBuiltin(Builtins::TO_NUMBER);
330 Add<HPushArgument>(value);
331 Push(Add<HInvokeFunction>(function, 1));
339 Handle<Code> ToNumberStub::GenerateCode(Isolate* isolate) {
340 return DoGenerateCode(isolate, this);
345 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
346 info()->MarkAsSavesCallerDoubles();
347 HValue* number = GetParameter(NumberToStringStub::kNumber);
348 return BuildNumberToString(number, handle(Type::Number(), isolate()));
352 Handle<Code> NumberToStringStub::GenerateCode(Isolate* isolate) {
353 return DoGenerateCode(isolate, this);
358 HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
359 Factory* factory = isolate()->factory();
360 HValue* undefined = graph()->GetConstantUndefined();
361 AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
362 FastCloneShallowArrayStub::Mode mode = casted_stub()->mode();
363 int length = casted_stub()->length();
365 HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
367 static_cast<HValue*>(NULL),
369 IfBuilder checker(this);
370 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
374 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
375 AllocationSite::kTransitionInfoOffset);
376 HInstruction* boilerplate = Add<HLoadNamedField>(allocation_site, access);
378 if (mode == FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS) {
379 HValue* elements = AddLoadElements(boilerplate);
381 IfBuilder if_fixed_cow(this);
382 if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
384 push_value = BuildCloneShallowArray(boilerplate,
389 environment()->Push(push_value);
392 IfBuilder if_fixed(this);
393 if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
395 push_value = BuildCloneShallowArray(boilerplate,
400 environment()->Push(push_value);
402 push_value = BuildCloneShallowArray(boilerplate,
405 FAST_DOUBLE_ELEMENTS,
407 environment()->Push(push_value);
409 ElementsKind elements_kind = casted_stub()->ComputeElementsKind();
410 push_value = BuildCloneShallowArray(boilerplate,
415 environment()->Push(push_value);
418 checker.ElseDeopt("Uninitialized boilerplate literals");
421 return environment()->Pop();
425 Handle<Code> FastCloneShallowArrayStub::GenerateCode(Isolate* isolate) {
426 return DoGenerateCode(isolate, this);
431 HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
432 HValue* undefined = graph()->GetConstantUndefined();
434 HInstruction* allocation_site = Add<HLoadKeyed>(GetParameter(0),
436 static_cast<HValue*>(NULL),
439 IfBuilder checker(this);
440 checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
444 HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
445 AllocationSite::kTransitionInfoOffset);
446 HInstruction* boilerplate = Add<HLoadNamedField>(allocation_site, access);
448 int size = JSObject::kHeaderSize + casted_stub()->length() * kPointerSize;
449 int object_size = size;
450 if (FLAG_allocation_site_pretenuring) {
451 size += AllocationMemento::kSize;
454 HValue* boilerplate_map = Add<HLoadNamedField>(
455 boilerplate, HObjectAccess::ForMap());
456 HValue* boilerplate_size = Add<HLoadNamedField>(
457 boilerplate_map, HObjectAccess::ForMapInstanceSize());
458 HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
459 checker.If<HCompareNumericAndBranch>(boilerplate_size,
460 size_in_words, Token::EQ);
463 HValue* size_in_bytes = Add<HConstant>(size);
465 HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
466 isolate()->heap()->GetPretenureMode(), JS_OBJECT_TYPE);
468 for (int i = 0; i < object_size; i += kPointerSize) {
469 HObjectAccess access = HObjectAccess::ForJSObjectOffset(i);
470 Add<HStoreNamedField>(object, access,
471 Add<HLoadNamedField>(boilerplate, access));
474 ASSERT(FLAG_allocation_site_pretenuring || (size == object_size));
475 if (FLAG_allocation_site_pretenuring) {
476 BuildCreateAllocationMemento(object, object_size, allocation_site);
479 environment()->Push(object);
480 checker.ElseDeopt("Uninitialized boilerplate in fast clone");
483 return environment()->Pop();
487 Handle<Code> FastCloneShallowObjectStub::GenerateCode(Isolate* isolate) {
488 return DoGenerateCode(isolate, this);
493 HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
494 HValue* size = Add<HConstant>(AllocationSite::kSize);
495 HInstruction* object = Add<HAllocate>(size, HType::JSObject(), TENURED,
499 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
500 AddStoreMapConstant(object, allocation_site_map);
502 // Store the payload (smi elements kind)
503 HValue* initial_elements_kind = Add<HConstant>(GetInitialFastElementsKind());
504 Add<HStoreNamedField>(object,
505 HObjectAccess::ForAllocationSiteOffset(
506 AllocationSite::kTransitionInfoOffset),
507 initial_elements_kind);
509 // Unlike literals, constructed arrays don't have nested sites
510 Add<HStoreNamedField>(object,
511 HObjectAccess::ForAllocationSiteOffset(
512 AllocationSite::kNestedSiteOffset),
513 graph()->GetConstant0());
515 // Store an empty fixed array for the code dependency.
516 HConstant* empty_fixed_array =
517 Add<HConstant>(isolate()->factory()->empty_fixed_array());
518 HStoreNamedField* store = Add<HStoreNamedField>(
520 HObjectAccess::ForAllocationSiteOffset(
521 AllocationSite::kDependentCodeOffset),
524 // Link the object to the allocation site list
525 HValue* site_list = Add<HConstant>(
526 ExternalReference::allocation_sites_list_address(isolate()));
527 HValue* site = Add<HLoadNamedField>(site_list,
528 HObjectAccess::ForAllocationSiteList());
529 store = Add<HStoreNamedField>(object,
530 HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset),
532 store->SkipWriteBarrier();
533 Add<HStoreNamedField>(site_list, HObjectAccess::ForAllocationSiteList(),
536 // We use a hammer (SkipWriteBarrier()) to indicate that we know the input
537 // cell is really a Cell, and so no write barrier is needed.
538 // TODO(mvstanton): Add a debug_code check to verify the input cell is really
539 // a cell. (perhaps with a new instruction, HAssert).
540 HInstruction* cell = GetParameter(0);
541 HObjectAccess access = HObjectAccess::ForCellValue();
542 store = Add<HStoreNamedField>(cell, access, object);
543 store->SkipWriteBarrier();
548 Handle<Code> CreateAllocationSiteStub::GenerateCode(Isolate* isolate) {
549 return DoGenerateCode(isolate, this);
554 HValue* CodeStubGraphBuilder<KeyedLoadFastElementStub>::BuildCodeStub() {
555 HInstruction* load = BuildUncheckedMonomorphicElementAccess(
556 GetParameter(0), GetParameter(1), NULL,
557 casted_stub()->is_js_array(), casted_stub()->elements_kind(),
558 false, NEVER_RETURN_HOLE, STANDARD_STORE);
563 Handle<Code> KeyedLoadFastElementStub::GenerateCode(Isolate* isolate) {
564 return DoGenerateCode(isolate, this);
569 HValue* CodeStubGraphBuilder<LoadFieldStub>::BuildCodeStub() {
570 Representation rep = casted_stub()->representation();
571 HObjectAccess access = casted_stub()->is_inobject() ?
572 HObjectAccess::ForJSObjectOffset(casted_stub()->offset(), rep) :
573 HObjectAccess::ForBackingStoreOffset(casted_stub()->offset(), rep);
574 return AddLoadNamedField(GetParameter(0), access);
578 Handle<Code> LoadFieldStub::GenerateCode(Isolate* isolate) {
579 return DoGenerateCode(isolate, this);
584 HValue* CodeStubGraphBuilder<KeyedLoadFieldStub>::BuildCodeStub() {
585 Representation rep = casted_stub()->representation();
586 HObjectAccess access = casted_stub()->is_inobject() ?
587 HObjectAccess::ForJSObjectOffset(casted_stub()->offset(), rep) :
588 HObjectAccess::ForBackingStoreOffset(casted_stub()->offset(), rep);
589 return AddLoadNamedField(GetParameter(0), access);
593 Handle<Code> KeyedLoadFieldStub::GenerateCode(Isolate* isolate) {
594 return DoGenerateCode(isolate, this);
599 HValue* CodeStubGraphBuilder<KeyedStoreFastElementStub>::BuildCodeStub() {
600 BuildUncheckedMonomorphicElementAccess(
601 GetParameter(0), GetParameter(1), GetParameter(2),
602 casted_stub()->is_js_array(), casted_stub()->elements_kind(),
603 true, NEVER_RETURN_HOLE, casted_stub()->store_mode());
605 return GetParameter(2);
609 Handle<Code> KeyedStoreFastElementStub::GenerateCode(Isolate* isolate) {
610 return DoGenerateCode(isolate, this);
615 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
616 info()->MarkAsSavesCallerDoubles();
618 BuildTransitionElementsKind(GetParameter(0),
620 casted_stub()->from_kind(),
621 casted_stub()->to_kind(),
624 return GetParameter(0);
628 Handle<Code> TransitionElementsKindStub::GenerateCode(Isolate* isolate) {
629 return DoGenerateCode(isolate, this);
632 HValue* CodeStubGraphBuilderBase::BuildArrayConstructor(
634 ContextCheckMode context_mode,
635 AllocationSiteOverrideMode override_mode,
636 ArgumentClass argument_class) {
637 HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor);
638 if (context_mode == CONTEXT_CHECK_REQUIRED) {
639 HInstruction* array_function = BuildGetArrayFunction();
640 ArrayContextChecker checker(this, constructor, array_function);
643 HValue* property_cell = GetParameter(ArrayConstructorStubBase::kPropertyCell);
644 // Walk through the property cell to the AllocationSite
645 HValue* alloc_site = Add<HLoadNamedField>(property_cell,
646 HObjectAccess::ForCellValue());
647 JSArrayBuilder array_builder(this, kind, alloc_site, constructor,
649 HValue* result = NULL;
650 switch (argument_class) {
652 result = array_builder.AllocateEmptyArray();
655 result = BuildArraySingleArgumentConstructor(&array_builder);
658 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
666 HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor(
667 ElementsKind kind, ArgumentClass argument_class) {
668 HValue* constructor = GetParameter(
669 InternalArrayConstructorStubBase::kConstructor);
670 JSArrayBuilder array_builder(this, kind, constructor);
672 HValue* result = NULL;
673 switch (argument_class) {
675 result = array_builder.AllocateEmptyArray();
678 result = BuildArraySingleArgumentConstructor(&array_builder);
681 result = BuildArrayNArgumentsConstructor(&array_builder, kind);
688 HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor(
689 JSArrayBuilder* array_builder) {
690 // Smi check and range check on the input arg.
691 HValue* constant_one = graph()->GetConstant1();
692 HValue* constant_zero = graph()->GetConstant0();
694 HInstruction* elements = Add<HArgumentsElements>(false);
695 HInstruction* argument = Add<HAccessArgumentsAt>(
696 elements, constant_one, constant_zero);
698 HConstant* max_alloc_length =
699 Add<HConstant>(JSObject::kInitialMaxFastElementArray);
700 const int initial_capacity = JSArray::kPreallocatedArrayElements;
701 HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
703 HInstruction* checked_arg = Add<HBoundsCheck>(argument, max_alloc_length);
704 IfBuilder if_builder(this);
705 if_builder.If<HCompareNumericAndBranch>(checked_arg, constant_zero,
708 Push(initial_capacity_node); // capacity
709 Push(constant_zero); // length
711 Push(checked_arg); // capacity
712 Push(checked_arg); // length
715 // Figure out total size
716 HValue* length = Pop();
717 HValue* capacity = Pop();
718 return array_builder->AllocateArray(capacity, length, true);
722 HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor(
723 JSArrayBuilder* array_builder, ElementsKind kind) {
724 // We need to fill with the hole if it's a smi array in the multi-argument
725 // case because we might have to bail out while copying arguments into
726 // the array because they aren't compatible with a smi array.
727 // If it's a double array, no problem, and if it's fast then no
728 // problem either because doubles are boxed.
729 HValue* length = GetArgumentsLength();
730 bool fill_with_hole = IsFastSmiElementsKind(kind);
731 HValue* new_object = array_builder->AllocateArray(length,
734 HValue* elements = array_builder->GetElementsLocation();
735 ASSERT(elements != NULL);
737 // Now populate the elements correctly.
738 LoopBuilder builder(this,
740 LoopBuilder::kPostIncrement);
741 HValue* start = graph()->GetConstant0();
742 HValue* key = builder.BeginBody(start, length, Token::LT);
743 HInstruction* argument_elements = Add<HArgumentsElements>(false);
744 HInstruction* argument = Add<HAccessArgumentsAt>(
745 argument_elements, length, key);
747 Add<HStoreKeyed>(elements, key, argument, kind);
754 HValue* CodeStubGraphBuilder<ArrayNoArgumentConstructorStub>::BuildCodeStub() {
755 ElementsKind kind = casted_stub()->elements_kind();
756 ContextCheckMode context_mode = casted_stub()->context_mode();
757 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
758 return BuildArrayConstructor(kind, context_mode, override_mode, NONE);
762 Handle<Code> ArrayNoArgumentConstructorStub::GenerateCode(Isolate* isolate) {
763 return DoGenerateCode(isolate, this);
768 HValue* CodeStubGraphBuilder<ArraySingleArgumentConstructorStub>::
770 ElementsKind kind = casted_stub()->elements_kind();
771 ContextCheckMode context_mode = casted_stub()->context_mode();
772 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
773 return BuildArrayConstructor(kind, context_mode, override_mode, SINGLE);
777 Handle<Code> ArraySingleArgumentConstructorStub::GenerateCode(
779 return DoGenerateCode(isolate, this);
784 HValue* CodeStubGraphBuilder<ArrayNArgumentsConstructorStub>::BuildCodeStub() {
785 ElementsKind kind = casted_stub()->elements_kind();
786 ContextCheckMode context_mode = casted_stub()->context_mode();
787 AllocationSiteOverrideMode override_mode = casted_stub()->override_mode();
788 return BuildArrayConstructor(kind, context_mode, override_mode, MULTIPLE);
792 Handle<Code> ArrayNArgumentsConstructorStub::GenerateCode(Isolate* isolate) {
793 return DoGenerateCode(isolate, this);
798 HValue* CodeStubGraphBuilder<InternalArrayNoArgumentConstructorStub>::
800 ElementsKind kind = casted_stub()->elements_kind();
801 return BuildInternalArrayConstructor(kind, NONE);
805 Handle<Code> InternalArrayNoArgumentConstructorStub::GenerateCode(
807 return DoGenerateCode(isolate, this);
812 HValue* CodeStubGraphBuilder<InternalArraySingleArgumentConstructorStub>::
814 ElementsKind kind = casted_stub()->elements_kind();
815 return BuildInternalArrayConstructor(kind, SINGLE);
819 Handle<Code> InternalArraySingleArgumentConstructorStub::GenerateCode(
821 return DoGenerateCode(isolate, this);
826 HValue* CodeStubGraphBuilder<InternalArrayNArgumentsConstructorStub>::
828 ElementsKind kind = casted_stub()->elements_kind();
829 return BuildInternalArrayConstructor(kind, MULTIPLE);
833 Handle<Code> InternalArrayNArgumentsConstructorStub::GenerateCode(
835 return DoGenerateCode(isolate, this);
840 HValue* CodeStubGraphBuilder<CompareNilICStub>::BuildCodeInitializedStub() {
841 Isolate* isolate = graph()->isolate();
842 CompareNilICStub* stub = casted_stub();
843 HIfContinuation continuation;
844 Handle<Map> sentinel_map(isolate->heap()->meta_map());
845 Handle<Type> type = stub->GetType(isolate, sentinel_map);
846 BuildCompareNil(GetParameter(0), type, &continuation);
847 IfBuilder if_nil(this, &continuation);
849 if (continuation.IsFalseReachable()) {
851 if_nil.Return(graph()->GetConstant0());
854 return continuation.IsTrueReachable()
855 ? graph()->GetConstant1()
856 : graph()->GetConstantUndefined();
860 Handle<Code> CompareNilICStub::GenerateCode(Isolate* isolate) {
861 return DoGenerateCode(isolate, this);
866 HValue* CodeStubGraphBuilder<BinaryOpStub>::BuildCodeInitializedStub() {
867 BinaryOpStub* stub = casted_stub();
868 HValue* left = GetParameter(0);
869 HValue* right = GetParameter(1);
871 Handle<Type> left_type = stub->GetLeftType(isolate());
872 Handle<Type> right_type = stub->GetRightType(isolate());
873 Handle<Type> result_type = stub->GetResultType(isolate());
875 ASSERT(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
876 (stub->HasSideEffects(isolate()) || !result_type->Is(Type::None())));
878 HValue* result = NULL;
879 if (stub->operation() == Token::ADD &&
880 (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
881 !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
882 // For the generic add stub a fast case for string addition is performance
884 if (left_type->Maybe(Type::String())) {
885 IfBuilder if_leftisstring(this);
886 if_leftisstring.If<HIsStringAndBranch>(left);
887 if_leftisstring.Then();
889 Push(AddInstruction(BuildBinaryOperation(
890 stub->operation(), left, right,
891 handle(Type::String(), isolate()), right_type,
892 result_type, stub->fixed_right_arg(), true)));
894 if_leftisstring.Else();
896 Push(AddInstruction(BuildBinaryOperation(
897 stub->operation(), left, right,
898 left_type, right_type, result_type,
899 stub->fixed_right_arg(), true)));
901 if_leftisstring.End();
904 IfBuilder if_rightisstring(this);
905 if_rightisstring.If<HIsStringAndBranch>(right);
906 if_rightisstring.Then();
908 Push(AddInstruction(BuildBinaryOperation(
909 stub->operation(), left, right,
910 left_type, handle(Type::String(), isolate()),
911 result_type, stub->fixed_right_arg(), true)));
913 if_rightisstring.Else();
915 Push(AddInstruction(BuildBinaryOperation(
916 stub->operation(), left, right,
917 left_type, right_type, result_type,
918 stub->fixed_right_arg(), true)));
920 if_rightisstring.End();
924 result = AddInstruction(BuildBinaryOperation(
925 stub->operation(), left, right,
926 left_type, right_type, result_type,
927 stub->fixed_right_arg(), true));
930 // If we encounter a generic argument, the number conversion is
931 // observable, thus we cannot afford to bail out after the fact.
932 if (!stub->HasSideEffects(isolate())) {
933 if (result_type->Is(Type::Smi())) {
934 if (stub->operation() == Token::SHR) {
935 // TODO(olivf) Replace this by a SmiTagU Instruction.
936 // 0x40000000: this number would convert to negative when interpreting
937 // the register as signed value;
938 IfBuilder if_of(this);
939 if_of.IfNot<HCompareNumericAndBranch>(result,
940 Add<HConstant>(static_cast<int>(SmiValuesAre32Bits()
941 ? 0x80000000 : 0x40000000)), Token::EQ_STRICT);
943 if_of.ElseDeopt("UInt->Smi oveflow");
947 result = EnforceNumberType(result, result_type);
950 // Reuse the double box of one of the operands if we are allowed to (i.e.
952 if (stub->CanReuseDoubleBox()) {
953 HValue* operand = (stub->mode() == OVERWRITE_LEFT) ? left : right;
954 IfBuilder if_heap_number(this);
955 if_heap_number.IfNot<HIsSmiAndBranch>(operand);
956 if_heap_number.Then();
957 Add<HStoreNamedField>(operand, HObjectAccess::ForHeapNumberValue(), result);
959 if_heap_number.Else();
961 if_heap_number.End();
969 Handle<Code> BinaryOpStub::GenerateCode(Isolate* isolate) {
970 return DoGenerateCode(isolate, this);
975 HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
976 ToBooleanStub* stub = casted_stub();
978 IfBuilder if_true(this);
979 if_true.If<HBranch>(GetParameter(0), stub->GetTypes());
981 if_true.Return(graph()->GetConstant1());
984 return graph()->GetConstant0();
988 Handle<Code> ToBooleanStub::GenerateCode(Isolate* isolate) {
989 return DoGenerateCode(isolate, this);
994 HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
995 StoreGlobalStub* stub = casted_stub();
996 Handle<Object> hole(isolate()->heap()->the_hole_value(), isolate());
997 Handle<Object> placeholer_value(Smi::FromInt(0), isolate());
998 Handle<PropertyCell> placeholder_cell =
999 isolate()->factory()->NewPropertyCell(placeholer_value);
1001 HParameter* receiver = GetParameter(0);
1002 HParameter* value = GetParameter(2);
1004 // Check that the map of the global has not changed: use a placeholder map
1005 // that will be replaced later with the global object's map.
1006 Handle<Map> placeholder_map = isolate()->factory()->meta_map();
1007 Add<HCheckMaps>(receiver, placeholder_map, top_info());
1009 HValue* cell = Add<HConstant>(placeholder_cell);
1010 HObjectAccess access(HObjectAccess::ForCellPayload(isolate()));
1011 HValue* cell_contents = Add<HLoadNamedField>(cell, access);
1013 if (stub->is_constant()) {
1014 IfBuilder builder(this);
1015 builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
1017 builder.ElseDeopt("Unexpected cell contents in constant global store");
1020 // Load the payload of the global parameter cell. A hole indicates that the
1021 // property has been deleted and that the store must be handled by the
1023 IfBuilder builder(this);
1024 HValue* hole_value = Add<HConstant>(hole);
1025 builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
1027 builder.Deopt("Unexpected cell contents in global store");
1029 Add<HStoreNamedField>(cell, access, value);
1037 Handle<Code> StoreGlobalStub::GenerateCode(Isolate* isolate) {
1038 return DoGenerateCode(isolate, this);
1043 HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
1044 HValue* value = GetParameter(0);
1045 HValue* map = GetParameter(1);
1046 HValue* key = GetParameter(2);
1047 HValue* object = GetParameter(3);
1049 if (FLAG_trace_elements_transitions) {
1050 // Tracing elements transitions is the job of the runtime.
1051 Add<HDeoptimize>("Tracing elements transitions", Deoptimizer::EAGER);
1053 info()->MarkAsSavesCallerDoubles();
1055 BuildTransitionElementsKind(object, map,
1056 casted_stub()->from_kind(),
1057 casted_stub()->to_kind(),
1058 casted_stub()->is_jsarray());
1060 BuildUncheckedMonomorphicElementAccess(object, key, value,
1061 casted_stub()->is_jsarray(),
1062 casted_stub()->to_kind(),
1063 true, ALLOW_RETURN_HOLE,
1064 casted_stub()->store_mode());
1071 Handle<Code> ElementsTransitionAndStoreStub::GenerateCode(Isolate* isolate) {
1072 return DoGenerateCode(isolate, this);
1076 void CodeStubGraphBuilderBase::BuildInstallOptimizedCode(
1077 HValue* js_function,
1078 HValue* native_context,
1079 HValue* code_object) {
1080 Counters* counters = isolate()->counters();
1081 AddIncrementCounter(counters->fast_new_closure_install_optimized());
1083 // TODO(fschneider): Idea: store proper code pointers in the optimized code
1084 // map and either unmangle them on marking or do nothing as the whole map is
1085 // discarded on major GC anyway.
1086 Add<HStoreCodeEntry>(js_function, code_object);
1088 // Now link a function into a list of optimized functions.
1089 HValue* optimized_functions_list = Add<HLoadNamedField>(native_context,
1090 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST));
1091 Add<HStoreNamedField>(js_function,
1092 HObjectAccess::ForNextFunctionLinkPointer(),
1093 optimized_functions_list);
1095 // This store is the only one that should have a write barrier.
1096 Add<HStoreNamedField>(native_context,
1097 HObjectAccess::ForContextSlot(Context::OPTIMIZED_FUNCTIONS_LIST),
1102 void CodeStubGraphBuilderBase::BuildInstallCode(HValue* js_function,
1103 HValue* shared_info) {
1104 Add<HStoreNamedField>(js_function,
1105 HObjectAccess::ForNextFunctionLinkPointer(),
1106 graph()->GetConstantUndefined());
1107 HValue* code_object = Add<HLoadNamedField>(shared_info,
1108 HObjectAccess::ForCodeOffset());
1109 Add<HStoreCodeEntry>(js_function, code_object);
1113 void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
1114 HValue* js_function,
1115 HValue* shared_info,
1116 HValue* native_context) {
1117 Counters* counters = isolate()->counters();
1118 IfBuilder is_optimized(this);
1119 HInstruction* optimized_map = Add<HLoadNamedField>(shared_info,
1120 HObjectAccess::ForOptimizedCodeMap());
1121 HValue* null_constant = Add<HConstant>(0);
1122 is_optimized.If<HCompareObjectEqAndBranch>(optimized_map, null_constant);
1123 is_optimized.Then();
1125 BuildInstallCode(js_function, shared_info);
1127 is_optimized.Else();
1129 AddIncrementCounter(counters->fast_new_closure_try_optimized());
1130 // optimized_map points to fixed array of 3-element entries
1131 // (native context, optimized code, literals).
1132 // Map must never be empty, so check the first elements.
1133 Label install_optimized;
1134 HValue* first_context_slot = Add<HLoadNamedField>(optimized_map,
1135 HObjectAccess::ForFirstContextSlot());
1136 IfBuilder already_in(this);
1137 already_in.If<HCompareObjectEqAndBranch>(native_context,
1138 first_context_slot);
1141 HValue* code_object = Add<HLoadNamedField>(optimized_map,
1142 HObjectAccess::ForFirstCodeSlot());
1143 BuildInstallOptimizedCode(js_function, native_context, code_object);
1147 HValue* shared_function_entry_length =
1148 Add<HConstant>(SharedFunctionInfo::kEntryLength);
1149 LoopBuilder loop_builder(this,
1151 LoopBuilder::kPostDecrement,
1152 shared_function_entry_length);
1153 HValue* array_length = Add<HLoadNamedField>(optimized_map,
1154 HObjectAccess::ForFixedArrayLength());
1155 HValue* key = loop_builder.BeginBody(array_length,
1156 graph()->GetConstant0(),
1159 // Iterate through the rest of map backwards.
1160 // Do not double check first entry.
1161 HValue* second_entry_index =
1162 Add<HConstant>(SharedFunctionInfo::kSecondEntryIndex);
1163 IfBuilder restore_check(this);
1164 restore_check.If<HCompareNumericAndBranch>(key, second_entry_index,
1166 restore_check.Then();
1168 // Store the unoptimized code
1169 BuildInstallCode(js_function, shared_info);
1170 loop_builder.Break();
1172 restore_check.Else();
1174 HValue* keyed_minus = AddUncasted<HSub>(
1175 key, shared_function_entry_length);
1176 HInstruction* keyed_lookup = Add<HLoadKeyed>(optimized_map,
1177 keyed_minus, static_cast<HValue*>(NULL), FAST_ELEMENTS);
1178 IfBuilder done_check(this);
1179 done_check.If<HCompareObjectEqAndBranch>(native_context,
1183 // Hit: fetch the optimized code.
1184 HValue* keyed_plus = AddUncasted<HAdd>(
1185 keyed_minus, graph()->GetConstant1());
1186 HValue* code_object = Add<HLoadKeyed>(optimized_map,
1187 keyed_plus, static_cast<HValue*>(NULL), FAST_ELEMENTS);
1188 BuildInstallOptimizedCode(js_function, native_context, code_object);
1190 // Fall out of the loop
1191 loop_builder.Break();
1196 restore_check.End();
1198 loop_builder.EndBody();
1207 HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
1208 Counters* counters = isolate()->counters();
1209 Factory* factory = isolate()->factory();
1210 HInstruction* empty_fixed_array =
1211 Add<HConstant>(factory->empty_fixed_array());
1212 HValue* shared_info = GetParameter(0);
1214 AddIncrementCounter(counters->fast_new_closure_total());
1216 // Create a new closure from the given function info in new space
1217 HValue* size = Add<HConstant>(JSFunction::kSize);
1218 HInstruction* js_function = Add<HAllocate>(size, HType::JSObject(),
1219 NOT_TENURED, JS_FUNCTION_TYPE);
1221 int map_index = Context::FunctionMapIndex(casted_stub()->language_mode(),
1222 casted_stub()->is_generator());
1224 // Compute the function map in the current native context and set that
1225 // as the map of the allocated object.
1226 HInstruction* native_context = BuildGetNativeContext();
1227 HInstruction* map_slot_value = Add<HLoadNamedField>(native_context,
1228 HObjectAccess::ForContextSlot(map_index));
1229 Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
1231 // Initialize the rest of the function.
1232 Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
1234 Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
1236 Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
1238 Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
1239 graph()->GetConstantHole());
1240 Add<HStoreNamedField>(js_function,
1241 HObjectAccess::ForSharedFunctionInfoPointer(),
1243 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1245 Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
1248 // Initialize the code pointer in the function to be the one
1249 // found in the shared function info object.
1250 // But first check if there is an optimized version for our context.
1251 if (FLAG_cache_optimized_code) {
1252 BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
1254 BuildInstallCode(js_function, shared_info);
1261 Handle<Code> FastNewClosureStub::GenerateCode(Isolate* isolate) {
1262 return DoGenerateCode(isolate, this);
1266 } } // namespace v8::internal