1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "src/compiler/pipeline.h"
7 #include <fstream> // NOLINT(readability/streams)
10 #include "src/base/platform/elapsed-timer.h"
11 #include "src/bootstrapper.h" // TODO(mstarzinger): Only temporary.
12 #include "src/compiler/ast-graph-builder.h"
13 #include "src/compiler/ast-loop-assignment-analyzer.h"
14 #include "src/compiler/basic-block-instrumentor.h"
15 #include "src/compiler/change-lowering.h"
16 #include "src/compiler/code-generator.h"
17 #include "src/compiler/common-operator-reducer.h"
18 #include "src/compiler/control-flow-optimizer.h"
19 #include "src/compiler/control-reducer.h"
20 #include "src/compiler/graph-replay.h"
21 #include "src/compiler/graph-visualizer.h"
22 #include "src/compiler/instruction.h"
23 #include "src/compiler/instruction-selector.h"
24 #include "src/compiler/js-builtin-reducer.h"
25 #include "src/compiler/js-context-specialization.h"
26 #include "src/compiler/js-generic-lowering.h"
27 #include "src/compiler/js-inlining.h"
28 #include "src/compiler/js-intrinsic-lowering.h"
29 #include "src/compiler/js-typed-lowering.h"
30 #include "src/compiler/jump-threading.h"
31 #include "src/compiler/load-elimination.h"
32 #include "src/compiler/loop-analysis.h"
33 #include "src/compiler/loop-peeling.h"
34 #include "src/compiler/machine-operator-reducer.h"
35 #include "src/compiler/move-optimizer.h"
36 #include "src/compiler/osr.h"
37 #include "src/compiler/pipeline-statistics.h"
38 #include "src/compiler/register-allocator.h"
39 #include "src/compiler/register-allocator-verifier.h"
40 #include "src/compiler/schedule.h"
41 #include "src/compiler/scheduler.h"
42 #include "src/compiler/select-lowering.h"
43 #include "src/compiler/simplified-lowering.h"
44 #include "src/compiler/simplified-operator-reducer.h"
45 #include "src/compiler/typer.h"
46 #include "src/compiler/value-numbering-reducer.h"
47 #include "src/compiler/verifier.h"
48 #include "src/compiler/zone-pool.h"
49 #include "src/ostreams.h"
50 #include "src/utils.h"
58 // For main entry point.
59 PipelineData(ZonePool* zone_pool, CompilationInfo* info,
60 PipelineStatistics* pipeline_statistics)
61 : isolate_(info->isolate()),
63 outer_zone_(info_->zone()),
64 zone_pool_(zone_pool),
65 pipeline_statistics_(pipeline_statistics),
66 compilation_failed_(false),
67 code_(Handle<Code>::null()),
68 graph_zone_scope_(zone_pool_),
69 graph_zone_(graph_zone_scope_.zone()),
71 loop_assignment_(nullptr),
78 instruction_zone_scope_(zone_pool_),
79 instruction_zone_(instruction_zone_scope_.zone()),
82 register_allocator_(nullptr) {
83 PhaseScope scope(pipeline_statistics, "init pipeline data");
84 graph_ = new (graph_zone_) Graph(graph_zone_);
85 source_positions_.Reset(new SourcePositionTable(graph_));
86 machine_ = new (graph_zone_) MachineOperatorBuilder(
87 graph_zone_, kMachPtr,
88 InstructionSelector::SupportedMachineOperatorFlags());
89 common_ = new (graph_zone_) CommonOperatorBuilder(graph_zone_);
90 javascript_ = new (graph_zone_) JSOperatorBuilder(graph_zone_);
91 jsgraph_ = new (graph_zone_)
92 JSGraph(isolate_, graph_, common_, javascript_, machine_);
93 typer_.Reset(new Typer(isolate_, graph_, info_->context()));
96 // For machine graph testing entry point.
97 PipelineData(ZonePool* zone_pool, CompilationInfo* info, Graph* graph,
99 : isolate_(info->isolate()),
101 outer_zone_(nullptr),
102 zone_pool_(zone_pool),
103 pipeline_statistics_(nullptr),
104 compilation_failed_(false),
105 code_(Handle<Code>::null()),
106 graph_zone_scope_(zone_pool_),
107 graph_zone_(nullptr),
109 source_positions_(new SourcePositionTable(graph_)),
110 loop_assignment_(nullptr),
113 javascript_(nullptr),
117 instruction_zone_scope_(zone_pool_),
118 instruction_zone_(instruction_zone_scope_.zone()),
121 register_allocator_(nullptr) {}
123 // For register allocation testing entry point.
124 PipelineData(ZonePool* zone_pool, CompilationInfo* info,
125 InstructionSequence* sequence)
126 : isolate_(info->isolate()),
128 outer_zone_(nullptr),
129 zone_pool_(zone_pool),
130 pipeline_statistics_(nullptr),
131 compilation_failed_(false),
132 code_(Handle<Code>::null()),
133 graph_zone_scope_(zone_pool_),
134 graph_zone_(nullptr),
136 loop_assignment_(nullptr),
139 javascript_(nullptr),
143 instruction_zone_scope_(zone_pool_),
144 instruction_zone_(sequence->zone()),
147 register_allocator_(nullptr) {}
150 DeleteInstructionZone();
154 Isolate* isolate() const { return isolate_; }
155 CompilationInfo* info() const { return info_; }
156 ZonePool* zone_pool() const { return zone_pool_; }
157 PipelineStatistics* pipeline_statistics() { return pipeline_statistics_; }
158 bool compilation_failed() const { return compilation_failed_; }
159 void set_compilation_failed() { compilation_failed_ = true; }
160 Handle<Code> code() { return code_; }
161 void set_code(Handle<Code> code) {
162 DCHECK(code_.is_null());
166 // RawMachineAssembler generally produces graphs which cannot be verified.
167 bool MayHaveUnverifiableGraph() const { return outer_zone_ == nullptr; }
169 Zone* graph_zone() const { return graph_zone_; }
170 Graph* graph() const { return graph_; }
171 SourcePositionTable* source_positions() const {
172 return source_positions_.get();
174 MachineOperatorBuilder* machine() const { return machine_; }
175 CommonOperatorBuilder* common() const { return common_; }
176 JSOperatorBuilder* javascript() const { return javascript_; }
177 JSGraph* jsgraph() const { return jsgraph_; }
178 Typer* typer() const { return typer_.get(); }
180 LoopAssignmentAnalysis* loop_assignment() const { return loop_assignment_; }
181 void set_loop_assignment(LoopAssignmentAnalysis* loop_assignment) {
182 DCHECK(!loop_assignment_);
183 loop_assignment_ = loop_assignment;
186 Schedule* schedule() const { return schedule_; }
187 void set_schedule(Schedule* schedule) {
189 schedule_ = schedule;
192 Zone* instruction_zone() const { return instruction_zone_; }
193 InstructionSequence* sequence() const { return sequence_; }
194 Frame* frame() const { return frame_; }
195 RegisterAllocator* register_allocator() const { return register_allocator_; }
197 void DeleteGraphZone() {
198 // Destroy objects with destructors first.
199 source_positions_.Reset(nullptr);
200 typer_.Reset(nullptr);
201 if (graph_zone_ == nullptr) return;
202 // Destroy zone and clear pointers.
203 graph_zone_scope_.Destroy();
204 graph_zone_ = nullptr;
206 loop_assignment_ = nullptr;
209 javascript_ = nullptr;
214 void DeleteInstructionZone() {
215 if (instruction_zone_ == nullptr) return;
216 instruction_zone_scope_.Destroy();
217 instruction_zone_ = nullptr;
220 register_allocator_ = nullptr;
223 void InitializeInstructionSequence() {
225 InstructionBlocks* instruction_blocks =
226 InstructionSequence::InstructionBlocksFor(instruction_zone(),
228 sequence_ = new (instruction_zone()) InstructionSequence(
229 info()->isolate(), instruction_zone(), instruction_blocks);
232 void InitializeRegisterAllocator(Zone* local_zone,
233 const RegisterConfiguration* config,
234 const char* debug_name) {
235 DCHECK(!register_allocator_);
237 frame_ = new (instruction_zone()) Frame();
238 register_allocator_ = new (instruction_zone())
239 RegisterAllocator(config, local_zone, frame(), sequence(), debug_name);
244 CompilationInfo* info_;
246 ZonePool* const zone_pool_;
247 PipelineStatistics* pipeline_statistics_;
248 bool compilation_failed_;
251 // All objects in the following group of fields are allocated in graph_zone_.
252 // They are all set to NULL when the graph_zone_ is destroyed.
253 ZonePool::Scope graph_zone_scope_;
256 // TODO(dcarney): make this into a ZoneObject.
257 SmartPointer<SourcePositionTable> source_positions_;
258 LoopAssignmentAnalysis* loop_assignment_;
259 MachineOperatorBuilder* machine_;
260 CommonOperatorBuilder* common_;
261 JSOperatorBuilder* javascript_;
263 // TODO(dcarney): make this into a ZoneObject.
264 SmartPointer<Typer> typer_;
267 // All objects in the following group of fields are allocated in
268 // instruction_zone_. They are all set to NULL when the instruction_zone_ is
270 ZonePool::Scope instruction_zone_scope_;
271 Zone* instruction_zone_;
272 InstructionSequence* sequence_;
274 RegisterAllocator* register_allocator_;
276 DISALLOW_COPY_AND_ASSIGN(PipelineData);
280 struct TurboCfgFile : public std::ofstream {
281 explicit TurboCfgFile(Isolate* isolate)
282 : std::ofstream(isolate->GetTurboCfgFileName().c_str(),
283 std::ios_base::app) {}
287 static void TraceSchedule(Schedule* schedule) {
288 if (!FLAG_trace_turbo_graph && !FLAG_trace_turbo_scheduler) return;
290 os << "-- Schedule --------------------------------------\n" << *schedule;
294 static SmartArrayPointer<char> GetDebugName(CompilationInfo* info) {
295 SmartArrayPointer<char> name;
296 if (info->IsStub()) {
297 if (info->code_stub() != NULL) {
298 CodeStub::Major major_key = info->code_stub()->MajorKey();
299 const char* major_name = CodeStub::MajorName(major_key, false);
300 size_t len = strlen(major_name);
301 name.Reset(new char[len]);
302 memcpy(name.get(), major_name, len);
305 AllowHandleDereference allow_deref;
306 name = info->function()->debug_name()->ToCString();
312 class AstGraphBuilderWithPositions : public AstGraphBuilder {
314 AstGraphBuilderWithPositions(Zone* local_zone, CompilationInfo* info,
316 LoopAssignmentAnalysis* loop_assignment,
317 SourcePositionTable* source_positions)
318 : AstGraphBuilder(local_zone, info, jsgraph, loop_assignment),
319 source_positions_(source_positions),
320 start_position_(info->shared_info()->start_position()) {}
322 bool CreateGraph(bool constant_context) {
323 SourcePositionTable::Scope pos_scope(source_positions_, start_position_);
324 return AstGraphBuilder::CreateGraph(constant_context);
327 #define DEF_VISIT(type) \
328 void Visit##type(type* node) OVERRIDE { \
329 SourcePositionTable::Scope pos(source_positions_, \
330 SourcePosition(node->position())); \
331 AstGraphBuilder::Visit##type(node); \
333 AST_NODE_LIST(DEF_VISIT)
337 SourcePositionTable* source_positions_;
338 SourcePosition start_position_;
344 class SourcePositionWrapper : public Reducer {
346 SourcePositionWrapper(Reducer* reducer, SourcePositionTable* table)
347 : reducer_(reducer), table_(table) {}
348 virtual ~SourcePositionWrapper() {}
350 virtual Reduction Reduce(Node* node) {
351 SourcePosition pos = table_->GetSourcePosition(node);
352 SourcePositionTable::Scope position(table_, pos);
353 return reducer_->Reduce(node);
358 SourcePositionTable* table_;
360 DISALLOW_COPY_AND_ASSIGN(SourcePositionWrapper);
364 static void AddReducer(PipelineData* data, GraphReducer* graph_reducer,
366 if (FLAG_turbo_source_positions) {
367 void* buffer = data->graph_zone()->New(sizeof(SourcePositionWrapper));
368 SourcePositionWrapper* wrapper =
369 new (buffer) SourcePositionWrapper(reducer, data->source_positions());
370 graph_reducer->AddReducer(wrapper);
372 graph_reducer->AddReducer(reducer);
377 class PipelineRunScope {
379 PipelineRunScope(PipelineData* data, const char* phase_name)
381 phase_name == nullptr ? nullptr : data->pipeline_statistics(),
383 zone_scope_(data->zone_pool()) {}
385 Zone* zone() { return zone_scope_.zone(); }
388 PhaseScope phase_scope_;
389 ZonePool::Scope zone_scope_;
393 template <typename Phase>
394 void Pipeline::Run() {
395 PipelineRunScope scope(this->data_, Phase::phase_name());
397 phase.Run(this->data_, scope.zone());
401 template <typename Phase, typename Arg0>
402 void Pipeline::Run(Arg0 arg_0) {
403 PipelineRunScope scope(this->data_, Phase::phase_name());
405 phase.Run(this->data_, scope.zone(), arg_0);
409 struct LoopAssignmentAnalysisPhase {
410 static const char* phase_name() { return "loop assignment analysis"; }
412 void Run(PipelineData* data, Zone* temp_zone) {
413 AstLoopAssignmentAnalyzer analyzer(data->graph_zone(), data->info());
414 LoopAssignmentAnalysis* loop_assignment = analyzer.Analyze();
415 data->set_loop_assignment(loop_assignment);
420 struct GraphBuilderPhase {
421 static const char* phase_name() { return "graph builder"; }
423 void Run(PipelineData* data, Zone* temp_zone, bool constant_context) {
424 AstGraphBuilderWithPositions graph_builder(
425 temp_zone, data->info(), data->jsgraph(), data->loop_assignment(),
426 data->source_positions());
427 if (!graph_builder.CreateGraph(constant_context)) {
428 data->set_compilation_failed();
434 struct ContextSpecializerPhase {
435 static const char* phase_name() { return "context specializing"; }
437 void Run(PipelineData* data, Zone* temp_zone) {
438 SourcePositionTable::Scope pos(data->source_positions(),
439 SourcePosition::Unknown());
440 JSContextSpecializer spec(data->jsgraph());
441 GraphReducer graph_reducer(data->graph(), temp_zone);
442 AddReducer(data, &graph_reducer, &spec);
443 graph_reducer.ReduceGraph();
448 struct InliningPhase {
449 static const char* phase_name() { return "inlining"; }
451 void Run(PipelineData* data, Zone* temp_zone) {
452 SourcePositionTable::Scope pos(data->source_positions(),
453 SourcePosition::Unknown());
454 JSInliner inliner(temp_zone, data->info(), data->jsgraph());
455 GraphReducer graph_reducer(data->graph(), temp_zone);
456 AddReducer(data, &graph_reducer, &inliner);
457 graph_reducer.ReduceGraph();
463 static const char* phase_name() { return "typer"; }
465 void Run(PipelineData* data, Zone* temp_zone) { data->typer()->Run(); }
469 struct OsrDeconstructionPhase {
470 static const char* phase_name() { return "OSR deconstruction"; }
472 void Run(PipelineData* data, Zone* temp_zone) {
473 SourcePositionTable::Scope pos(data->source_positions(),
474 SourcePosition::Unknown());
475 OsrHelper osr_helper(data->info());
477 osr_helper.Deconstruct(data->jsgraph(), data->common(), temp_zone);
478 if (!success) data->info()->RetryOptimization(kOsrCompileFailed);
483 struct TypedLoweringPhase {
484 static const char* phase_name() { return "typed lowering"; }
486 void Run(PipelineData* data, Zone* temp_zone) {
487 SourcePositionTable::Scope pos(data->source_positions(),
488 SourcePosition::Unknown());
489 ValueNumberingReducer vn_reducer(temp_zone);
490 LoadElimination load_elimination;
491 JSBuiltinReducer builtin_reducer(data->jsgraph());
492 JSTypedLowering typed_lowering(data->jsgraph(), temp_zone);
493 JSIntrinsicLowering intrinsic_lowering(data->jsgraph());
494 SimplifiedOperatorReducer simple_reducer(data->jsgraph());
495 CommonOperatorReducer common_reducer;
496 GraphReducer graph_reducer(data->graph(), temp_zone);
497 AddReducer(data, &graph_reducer, &vn_reducer);
498 AddReducer(data, &graph_reducer, &builtin_reducer);
499 AddReducer(data, &graph_reducer, &typed_lowering);
500 AddReducer(data, &graph_reducer, &intrinsic_lowering);
501 AddReducer(data, &graph_reducer, &load_elimination);
502 AddReducer(data, &graph_reducer, &simple_reducer);
503 AddReducer(data, &graph_reducer, &common_reducer);
504 graph_reducer.ReduceGraph();
509 struct SimplifiedLoweringPhase {
510 static const char* phase_name() { return "simplified lowering"; }
512 void Run(PipelineData* data, Zone* temp_zone) {
513 SourcePositionTable::Scope pos(data->source_positions(),
514 SourcePosition::Unknown());
515 SimplifiedLowering lowering(data->jsgraph(), temp_zone,
516 data->source_positions());
517 lowering.LowerAllNodes();
518 ValueNumberingReducer vn_reducer(temp_zone);
519 SimplifiedOperatorReducer simple_reducer(data->jsgraph());
520 MachineOperatorReducer machine_reducer(data->jsgraph());
521 CommonOperatorReducer common_reducer;
522 GraphReducer graph_reducer(data->graph(), temp_zone);
523 AddReducer(data, &graph_reducer, &vn_reducer);
524 AddReducer(data, &graph_reducer, &simple_reducer);
525 AddReducer(data, &graph_reducer, &machine_reducer);
526 AddReducer(data, &graph_reducer, &common_reducer);
527 graph_reducer.ReduceGraph();
532 struct ControlFlowOptimizationPhase {
533 static const char* phase_name() { return "control flow optimization"; }
535 void Run(PipelineData* data, Zone* temp_zone) {
536 ControlFlowOptimizer optimizer(data->jsgraph(), temp_zone);
537 optimizer.Optimize();
542 struct ChangeLoweringPhase {
543 static const char* phase_name() { return "change lowering"; }
545 void Run(PipelineData* data, Zone* temp_zone) {
546 SourcePositionTable::Scope pos(data->source_positions(),
547 SourcePosition::Unknown());
548 ValueNumberingReducer vn_reducer(temp_zone);
549 SimplifiedOperatorReducer simple_reducer(data->jsgraph());
550 ChangeLowering lowering(data->jsgraph());
551 MachineOperatorReducer machine_reducer(data->jsgraph());
552 CommonOperatorReducer common_reducer;
553 GraphReducer graph_reducer(data->graph(), temp_zone);
554 AddReducer(data, &graph_reducer, &vn_reducer);
555 AddReducer(data, &graph_reducer, &simple_reducer);
556 AddReducer(data, &graph_reducer, &lowering);
557 AddReducer(data, &graph_reducer, &machine_reducer);
558 AddReducer(data, &graph_reducer, &common_reducer);
559 graph_reducer.ReduceGraph();
564 struct ControlReductionPhase {
565 void Run(PipelineData* data, Zone* temp_zone) {
566 SourcePositionTable::Scope pos(data->source_positions(),
567 SourcePosition::Unknown());
568 ControlReducer::ReduceGraph(temp_zone, data->jsgraph(), data->common());
573 struct EarlyControlReductionPhase : ControlReductionPhase {
574 static const char* phase_name() { return "early control reduction"; }
578 struct LateControlReductionPhase : ControlReductionPhase {
579 static const char* phase_name() { return "late control reduction"; }
583 struct StressLoopPeelingPhase {
584 static const char* phase_name() { return "stress loop peeling"; }
586 void Run(PipelineData* data, Zone* temp_zone) {
587 SourcePositionTable::Scope pos(data->source_positions(),
588 SourcePosition::Unknown());
589 // Peel the first outer loop for testing.
590 // TODO(titzer): peel all loops? the N'th loop? Innermost loops?
591 LoopTree* loop_tree = LoopFinder::BuildLoopTree(data->graph(), temp_zone);
592 if (loop_tree != NULL && loop_tree->outer_loops().size() > 0) {
593 LoopPeeler::Peel(data->graph(), data->common(), loop_tree,
594 loop_tree->outer_loops()[0], temp_zone);
600 struct GenericLoweringPhase {
601 static const char* phase_name() { return "generic lowering"; }
603 void Run(PipelineData* data, Zone* temp_zone) {
604 SourcePositionTable::Scope pos(data->source_positions(),
605 SourcePosition::Unknown());
606 JSGenericLowering generic(data->info()->is_typing_enabled(),
608 SelectLowering select(data->jsgraph()->graph(), data->jsgraph()->common());
609 GraphReducer graph_reducer(data->graph(), temp_zone);
610 AddReducer(data, &graph_reducer, &generic);
611 AddReducer(data, &graph_reducer, &select);
612 graph_reducer.ReduceGraph();
617 struct ComputeSchedulePhase {
618 static const char* phase_name() { return "scheduling"; }
620 void Run(PipelineData* data, Zone* temp_zone) {
621 Schedule* schedule = Scheduler::ComputeSchedule(
622 temp_zone, data->graph(), data->info()->is_splitting_enabled()
623 ? Scheduler::kSplitNodes
624 : Scheduler::kNoFlags);
625 if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
626 data->set_schedule(schedule);
631 struct InstructionSelectionPhase {
632 static const char* phase_name() { return "select instructions"; }
634 void Run(PipelineData* data, Zone* temp_zone, Linkage* linkage) {
635 InstructionSelector selector(temp_zone, data->graph()->NodeCount(), linkage,
636 data->sequence(), data->schedule(),
637 data->source_positions());
638 selector.SelectInstructions();
643 struct MeetRegisterConstraintsPhase {
644 static const char* phase_name() { return "meet register constraints"; }
646 void Run(PipelineData* data, Zone* temp_zone) {
647 data->register_allocator()->MeetRegisterConstraints();
652 struct ResolvePhisPhase {
653 static const char* phase_name() { return "resolve phis"; }
655 void Run(PipelineData* data, Zone* temp_zone) {
656 data->register_allocator()->ResolvePhis();
661 struct BuildLiveRangesPhase {
662 static const char* phase_name() { return "build live ranges"; }
664 void Run(PipelineData* data, Zone* temp_zone) {
665 data->register_allocator()->BuildLiveRanges();
670 struct AllocateGeneralRegistersPhase {
671 static const char* phase_name() { return "allocate general registers"; }
673 void Run(PipelineData* data, Zone* temp_zone) {
674 data->register_allocator()->AllocateGeneralRegisters();
679 struct AllocateDoubleRegistersPhase {
680 static const char* phase_name() { return "allocate double registers"; }
682 void Run(PipelineData* data, Zone* temp_zone) {
683 data->register_allocator()->AllocateDoubleRegisters();
688 struct AssignSpillSlotsPhase {
689 static const char* phase_name() { return "assign spill slots"; }
691 void Run(PipelineData* data, Zone* temp_zone) {
692 data->register_allocator()->AssignSpillSlots();
697 struct CommitAssignmentPhase {
698 static const char* phase_name() { return "commit assignment"; }
700 void Run(PipelineData* data, Zone* temp_zone) {
701 data->register_allocator()->CommitAssignment();
706 struct PopulatePointerMapsPhase {
707 static const char* phase_name() { return "populate pointer maps"; }
709 void Run(PipelineData* data, Zone* temp_zone) {
710 data->register_allocator()->PopulatePointerMaps();
715 struct ConnectRangesPhase {
716 static const char* phase_name() { return "connect ranges"; }
718 void Run(PipelineData* data, Zone* temp_zone) {
719 data->register_allocator()->ConnectRanges();
724 struct ResolveControlFlowPhase {
725 static const char* phase_name() { return "resolve control flow"; }
727 void Run(PipelineData* data, Zone* temp_zone) {
728 data->register_allocator()->ResolveControlFlow();
733 struct OptimizeMovesPhase {
734 static const char* phase_name() { return "optimize moves"; }
736 void Run(PipelineData* data, Zone* temp_zone) {
737 MoveOptimizer move_optimizer(temp_zone, data->sequence());
738 move_optimizer.Run();
743 struct JumpThreadingPhase {
744 static const char* phase_name() { return "jump threading"; }
746 void Run(PipelineData* data, Zone* temp_zone) {
747 ZoneVector<BasicBlock::RpoNumber> result(temp_zone);
748 if (JumpThreading::ComputeForwarding(temp_zone, result, data->sequence())) {
749 JumpThreading::ApplyForwarding(result, data->sequence());
755 struct GenerateCodePhase {
756 static const char* phase_name() { return "generate code"; }
758 void Run(PipelineData* data, Zone* temp_zone, Linkage* linkage) {
759 CodeGenerator generator(data->frame(), linkage, data->sequence(),
761 data->set_code(generator.GenerateCode());
766 struct PrintGraphPhase {
767 static const char* phase_name() { return nullptr; }
769 void Run(PipelineData* data, Zone* temp_zone, const char* phase) {
770 CompilationInfo* info = data->info();
771 Graph* graph = data->graph();
774 FILE* dot_file = OpenVisualizerLogFile(info, phase, "dot", "w+");
775 if (dot_file == nullptr) return;
776 OFStream dot_of(dot_file);
777 dot_of << AsDOT(*graph);
782 FILE* json_file = OpenVisualizerLogFile(info, NULL, "json", "a+");
783 if (json_file == nullptr) return;
784 OFStream json_of(json_file);
785 json_of << "{\"name\":\"" << phase << "\",\"type\":\"graph\",\"data\":"
786 << AsJSON(*graph, data->source_positions()) << "},\n";
790 if (FLAG_trace_turbo_graph) { // Simple textual RPO.
792 os << "-- Graph after " << phase << " -- " << std::endl;
799 struct VerifyGraphPhase {
800 static const char* phase_name() { return nullptr; }
802 void Run(PipelineData* data, Zone* temp_zone, const bool untyped) {
803 Verifier::Run(data->graph(), FLAG_turbo_types && !untyped
805 : Verifier::UNTYPED);
810 void Pipeline::BeginPhaseKind(const char* phase_kind_name) {
811 if (data_->pipeline_statistics() != NULL) {
812 data_->pipeline_statistics()->BeginPhaseKind(phase_kind_name);
817 void Pipeline::RunPrintAndVerify(const char* phase, bool untyped) {
818 if (FLAG_trace_turbo) {
819 Run<PrintGraphPhase>(phase);
821 if (FLAG_turbo_verify) {
822 Run<VerifyGraphPhase>(untyped);
827 Handle<Code> Pipeline::GenerateCode() {
828 if (info()->is_osr() && !FLAG_turbo_osr) {
829 // TODO(turbofan): remove this flag and always handle OSR
830 info()->RetryOptimization(kOsrCompileFailed);
831 return Handle<Code>::null();
834 // TODO(mstarzinger): This is just a temporary hack to make TurboFan work,
835 // the correct solution is to restore the context register after invoking
836 // builtins from full-codegen.
837 Handle<SharedFunctionInfo> shared = info()->shared_info();
838 if (isolate()->bootstrapper()->IsActive() ||
839 shared->disable_optimization_reason() ==
840 kBuiltinFunctionCannotBeOptimized) {
841 shared->DisableOptimization(kBuiltinFunctionCannotBeOptimized);
842 return Handle<Code>::null();
845 // TODO(dslomov): support turbo optimization of subclass constructors.
846 if (IsSubclassConstructor(shared->kind())) {
847 shared->DisableOptimization(kSuperReference);
848 return Handle<Code>::null();
852 SmartPointer<PipelineStatistics> pipeline_statistics;
854 if (FLAG_turbo_stats) {
855 pipeline_statistics.Reset(new PipelineStatistics(info(), &zone_pool));
856 pipeline_statistics->BeginPhaseKind("initializing");
859 if (FLAG_trace_turbo) {
860 FILE* json_file = OpenVisualizerLogFile(info(), NULL, "json", "w+");
861 if (json_file != nullptr) {
862 OFStream json_of(json_file);
863 Handle<Script> script = info()->script();
864 FunctionLiteral* function = info()->function();
865 SmartArrayPointer<char> function_name =
866 info()->shared_info()->DebugName()->ToCString();
867 int pos = info()->shared_info()->start_position();
868 json_of << "{\"function\":\"" << function_name.get()
869 << "\", \"sourcePosition\":" << pos << ", \"source\":\"";
870 if (!script->IsUndefined() && !script->source()->IsUndefined()) {
871 DisallowHeapAllocation no_allocation;
872 int start = function->start_position();
873 int len = function->end_position() - start;
874 String::SubStringRange source(String::cast(script->source()), start,
876 for (const auto& c : source) {
877 json_of << AsEscapedUC16ForJSON(c);
880 json_of << "\",\n\"phases\":[";
885 PipelineData data(&zone_pool, info(), pipeline_statistics.get());
888 BeginPhaseKind("graph creation");
890 if (FLAG_trace_turbo) {
892 os << "---------------------------------------------------\n"
893 << "Begin compiling method " << GetDebugName(info()).get()
894 << " using Turbofan" << std::endl;
895 TurboCfgFile tcf(isolate());
896 tcf << AsC1VCompilation(info());
899 data.source_positions()->AddDecorator();
901 if (FLAG_loop_assignment_analysis) {
902 Run<LoopAssignmentAnalysisPhase>();
905 Run<GraphBuilderPhase>(info()->is_context_specializing());
906 if (data.compilation_failed()) return Handle<Code>::null();
907 RunPrintAndVerify("Initial untyped", true);
909 Run<EarlyControlReductionPhase>();
910 RunPrintAndVerify("Early Control reduced", true);
912 if (info()->is_context_specializing()) {
913 // Specialize the code to the context as aggressively as possible.
914 Run<ContextSpecializerPhase>();
915 RunPrintAndVerify("Context specialized", true);
918 if (info()->is_inlining_enabled()) {
919 Run<InliningPhase>();
920 RunPrintAndVerify("Inlined", true);
923 if (FLAG_print_turbo_replay) {
924 // Print a replay of the initial graph.
925 GraphReplayPrinter::PrintReplay(data.graph());
928 // Bailout here in case target architecture is not supported.
929 if (!SupportedTarget()) return Handle<Code>::null();
931 if (info()->is_typing_enabled()) {
934 RunPrintAndVerify("Typed");
937 BeginPhaseKind("lowering");
939 if (info()->is_typing_enabled()) {
940 // Lower JSOperators where we can determine types.
941 Run<TypedLoweringPhase>();
942 RunPrintAndVerify("Lowered typed");
944 if (FLAG_turbo_stress_loop_peeling) {
945 Run<StressLoopPeelingPhase>();
946 RunPrintAndVerify("Loop peeled", true);
949 if (info()->is_osr()) {
950 Run<OsrDeconstructionPhase>();
951 if (info()->bailout_reason() != kNoReason) return Handle<Code>::null();
952 RunPrintAndVerify("OSR deconstruction");
955 // Lower simplified operators and insert changes.
956 Run<SimplifiedLoweringPhase>();
957 RunPrintAndVerify("Lowered simplified");
959 // Optimize control flow.
960 if (FLAG_turbo_switch) {
961 Run<ControlFlowOptimizationPhase>();
962 RunPrintAndVerify("Control flow optimized");
965 // Lower changes that have been inserted before.
966 Run<ChangeLoweringPhase>();
967 // // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
968 RunPrintAndVerify("Lowered changes", true);
970 Run<LateControlReductionPhase>();
971 RunPrintAndVerify("Late Control reduced");
973 if (info()->is_osr()) {
974 Run<OsrDeconstructionPhase>();
975 if (info()->bailout_reason() != kNoReason) return Handle<Code>::null();
976 RunPrintAndVerify("OSR deconstruction");
980 // Lower any remaining generic JSOperators.
981 Run<GenericLoweringPhase>();
982 // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
983 RunPrintAndVerify("Lowered generic", true);
985 BeginPhaseKind("block building");
987 data.source_positions()->RemoveDecorator();
989 return ScheduleAndGenerateCode(
990 Linkage::ComputeIncoming(data.instruction_zone(), info()));
994 Handle<Code> Pipeline::GenerateCodeForTesting(CompilationInfo* info,
996 Schedule* schedule) {
997 CallDescriptor* call_descriptor =
998 Linkage::ComputeIncoming(info->zone(), info);
999 return GenerateCodeForTesting(info, call_descriptor, graph, schedule);
1003 Handle<Code> Pipeline::GenerateCodeForTesting(Isolate* isolate,
1004 CallDescriptor* call_descriptor,
1006 Schedule* schedule) {
1007 FakeStubForTesting stub(isolate);
1008 CompilationInfo info(&stub, isolate, graph->zone());
1009 return GenerateCodeForTesting(&info, call_descriptor, graph, schedule);
1013 Handle<Code> Pipeline::GenerateCodeForTesting(CompilationInfo* info,
1014 CallDescriptor* call_descriptor,
1016 Schedule* schedule) {
1017 // Construct a pipeline for scheduling and code generation.
1019 PipelineData data(&zone_pool, info, graph, schedule);
1020 Pipeline pipeline(info);
1021 pipeline.data_ = &data;
1022 if (data.schedule() == nullptr) {
1023 // TODO(rossberg): Should this really be untyped?
1024 pipeline.RunPrintAndVerify("Machine", true);
1027 return pipeline.ScheduleAndGenerateCode(call_descriptor);
1031 bool Pipeline::AllocateRegistersForTesting(const RegisterConfiguration* config,
1032 InstructionSequence* sequence,
1033 bool run_verifier) {
1034 FakeStubForTesting stub(sequence->isolate());
1035 CompilationInfo info(&stub, sequence->isolate(), sequence->zone());
1037 PipelineData data(&zone_pool, &info, sequence);
1038 Pipeline pipeline(&info);
1039 pipeline.data_ = &data;
1040 pipeline.AllocateRegisters(config, run_verifier);
1041 return !data.compilation_failed();
1045 Handle<Code> Pipeline::ScheduleAndGenerateCode(
1046 CallDescriptor* call_descriptor) {
1047 PipelineData* data = this->data_;
1049 DCHECK_NOT_NULL(data->graph());
1050 CHECK(SupportedBackend());
1052 if (data->schedule() == nullptr) Run<ComputeSchedulePhase>();
1053 TraceSchedule(data->schedule());
1055 BasicBlockProfiler::Data* profiler_data = NULL;
1056 if (FLAG_turbo_profiling) {
1057 profiler_data = BasicBlockInstrumentor::Instrument(info(), data->graph(),
1061 data->InitializeInstructionSequence();
1063 // Select and schedule instructions covering the scheduled graph.
1064 Linkage linkage(call_descriptor);
1065 Run<InstructionSelectionPhase>(&linkage);
1067 if (FLAG_trace_turbo && !data->MayHaveUnverifiableGraph()) {
1068 TurboCfgFile tcf(isolate());
1069 tcf << AsC1V("CodeGen", data->schedule(), data->source_positions(),
1073 data->DeleteGraphZone();
1075 BeginPhaseKind("register allocation");
1077 bool run_verifier = FLAG_turbo_verify_allocation;
1078 // Allocate registers.
1079 AllocateRegisters(RegisterConfiguration::ArchDefault(), run_verifier);
1080 if (data->compilation_failed()) {
1081 info()->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
1082 return Handle<Code>();
1085 BeginPhaseKind("code generation");
1087 // Optimimize jumps.
1088 if (FLAG_turbo_jt) {
1089 Run<JumpThreadingPhase>();
1092 // Generate final machine code.
1093 Run<GenerateCodePhase>(&linkage);
1095 Handle<Code> code = data->code();
1096 if (profiler_data != NULL) {
1097 #if ENABLE_DISASSEMBLER
1098 std::ostringstream os;
1099 code->Disassemble(NULL, os);
1100 profiler_data->SetCode(&os);
1104 info()->SetCode(code);
1105 v8::internal::CodeGenerator::PrintCode(code, info());
1107 if (FLAG_trace_turbo) {
1108 FILE* json_file = OpenVisualizerLogFile(info(), NULL, "json", "a+");
1109 if (json_file != nullptr) {
1110 OFStream json_of(json_file);
1112 << "{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
1113 #if ENABLE_DISASSEMBLER
1114 std::stringstream disassembly_stream;
1115 code->Disassemble(NULL, disassembly_stream);
1116 std::string disassembly_string(disassembly_stream.str());
1117 for (const auto& c : disassembly_string) {
1118 json_of << AsEscapedUC16ForJSON(c);
1120 #endif // ENABLE_DISASSEMBLER
1121 json_of << "\"}\n]}";
1124 OFStream os(stdout);
1125 os << "---------------------------------------------------\n"
1126 << "Finished compiling method " << GetDebugName(info()).get()
1127 << " using Turbofan" << std::endl;
1134 void Pipeline::AllocateRegisters(const RegisterConfiguration* config,
1135 bool run_verifier) {
1136 PipelineData* data = this->data_;
1138 // Don't track usage for this zone in compiler stats.
1139 SmartPointer<Zone> verifier_zone;
1140 RegisterAllocatorVerifier* verifier = nullptr;
1142 verifier_zone.Reset(new Zone());
1143 verifier = new (verifier_zone.get()) RegisterAllocatorVerifier(
1144 verifier_zone.get(), config, data->sequence());
1147 SmartArrayPointer<char> debug_name;
1149 debug_name = GetDebugName(data->info());
1152 ZonePool::Scope zone_scope(data->zone_pool());
1153 data->InitializeRegisterAllocator(zone_scope.zone(), config,
1155 if (info()->is_osr()) {
1156 OsrHelper osr_helper(info());
1157 osr_helper.SetupFrame(data->frame());
1160 Run<MeetRegisterConstraintsPhase>();
1161 Run<ResolvePhisPhase>();
1162 Run<BuildLiveRangesPhase>();
1163 if (FLAG_trace_turbo_graph) {
1164 OFStream os(stdout);
1165 PrintableInstructionSequence printable = {config, data->sequence()};
1166 os << "----- Instruction sequence before register allocation -----\n"
1169 if (verifier != nullptr) {
1170 CHECK(!data->register_allocator()->ExistsUseWithoutDefinition());
1172 Run<AllocateGeneralRegistersPhase>();
1173 Run<AllocateDoubleRegistersPhase>();
1174 Run<AssignSpillSlotsPhase>();
1176 Run<CommitAssignmentPhase>();
1177 Run<PopulatePointerMapsPhase>();
1178 Run<ConnectRangesPhase>();
1179 Run<ResolveControlFlowPhase>();
1180 if (FLAG_turbo_move_optimization) {
1181 Run<OptimizeMovesPhase>();
1184 if (FLAG_trace_turbo_graph) {
1185 OFStream os(stdout);
1186 PrintableInstructionSequence printable = {config, data->sequence()};
1187 os << "----- Instruction sequence after register allocation -----\n"
1191 if (verifier != nullptr) {
1192 verifier->VerifyAssignment();
1193 verifier->VerifyGapMoves();
1196 if (FLAG_trace_turbo && !data->MayHaveUnverifiableGraph()) {
1197 TurboCfgFile tcf(data->isolate());
1198 tcf << AsC1VAllocator("CodeGen", data->register_allocator());
1202 } // namespace compiler
1203 } // namespace internal