"src/compiler/value-numbering-reducer.h",
"src/compiler/verifier.cc",
"src/compiler/verifier.h",
+ 'src/compiler/zone-pool.cc',
+ 'src/compiler/zone-pool.h',
"src/compiler.cc",
"src/compiler.h",
"src/contexts.cc",
#include "src/compiler/typer.h"
#include "src/compiler/value-numbering-reducer.h"
#include "src/compiler/verifier.h"
+#include "src/compiler/zone-pool.h"
#include "src/hydrogen.h"
#include "src/ostreams.h"
#include "src/utils.h"
public:
enum PhaseKind { CREATE_GRAPH, OPTIMIZATION, CODEGEN };
- PhaseStats(CompilationInfo* info, PhaseKind kind, const char* name)
+ PhaseStats(CompilationInfo* info, ZonePool* zone_pool, PhaseKind kind,
+ const char* name)
: info_(info),
+ stats_scope_(zone_pool),
kind_(kind),
name_(name),
- size_(info->zone()->allocation_size()) {
+ size_(0) {
if (FLAG_turbo_stats) {
timer_.Start();
+ size_ = info_->zone()->allocation_size();
}
}
~PhaseStats() {
if (FLAG_turbo_stats) {
base::TimeDelta delta = timer_.Elapsed();
- size_t bytes = info_->zone()->allocation_size() - size_;
+ size_t bytes = info_->zone()->allocation_size() +
+ stats_scope_.GetMaxAllocatedBytes() - size_;
HStatistics* stats = info_->isolate()->GetTStatistics();
stats->SaveTiming(name_, delta, static_cast<int>(bytes));
private:
CompilationInfo* info_;
+ ZonePool::StatsScope stats_scope_;
PhaseKind kind_;
const char* name_;
size_t size_;
PrintCompilationStart();
}
+ ZonePool zone_pool(isolate());
+
// Build the graph.
Graph graph(zone());
SourcePositionTable source_positions(&graph);
JSGraph jsgraph(&graph, &common, &javascript, &machine);
Node* context_node;
{
- PhaseStats graph_builder_stats(info(), PhaseStats::CREATE_GRAPH,
+ PhaseStats graph_builder_stats(info(), &zone_pool, PhaseStats::CREATE_GRAPH,
"graph builder");
AstGraphBuilderWithPositions graph_builder(info(), &jsgraph,
&source_positions);
context_node = graph_builder.GetFunctionContext();
}
{
- PhaseStats phi_reducer_stats(info(), PhaseStats::CREATE_GRAPH,
+ PhaseStats phi_reducer_stats(info(), &zone_pool, PhaseStats::CREATE_GRAPH,
"phi reduction");
PhiReducer phi_reducer;
GraphReducer graph_reducer(&graph);
if (info()->is_typing_enabled()) {
{
// Type the graph.
- PhaseStats typer_stats(info(), PhaseStats::CREATE_GRAPH, "typer");
+ PhaseStats typer_stats(info(), &zone_pool, PhaseStats::CREATE_GRAPH,
+ "typer");
typer.Run();
VerifyAndPrintGraph(&graph, "Typed");
}
{
// Lower JSOperators where we can determine types.
- PhaseStats lowering_stats(info(), PhaseStats::CREATE_GRAPH,
+ PhaseStats lowering_stats(info(), &zone_pool, PhaseStats::CREATE_GRAPH,
"typed lowering");
SourcePositionTable::Scope pos(&source_positions,
SourcePosition::Unknown());
}
{
// Lower simplified operators and insert changes.
- PhaseStats lowering_stats(info(), PhaseStats::CREATE_GRAPH,
+ PhaseStats lowering_stats(info(), &zone_pool, PhaseStats::CREATE_GRAPH,
"simplified lowering");
SourcePositionTable::Scope pos(&source_positions,
SourcePosition::Unknown());
}
{
// Lower changes that have been inserted before.
- PhaseStats lowering_stats(info(), PhaseStats::OPTIMIZATION,
+ PhaseStats lowering_stats(info(), &zone_pool, PhaseStats::OPTIMIZATION,
"change lowering");
SourcePositionTable::Scope pos(&source_positions,
SourcePosition::Unknown());
{
SourcePositionTable::Scope pos(&source_positions,
SourcePosition::Unknown());
- PhaseStats control_reducer_stats(info(), PhaseStats::CREATE_GRAPH,
- "control reduction");
+ PhaseStats control_reducer_stats(
+ info(), &zone_pool, PhaseStats::CREATE_GRAPH, "control reduction");
ControlReducer::ReduceGraph(&jsgraph, &common);
VerifyAndPrintGraph(&graph, "Control reduced");
{
// Lower any remaining generic JSOperators.
- PhaseStats lowering_stats(info(), PhaseStats::CREATE_GRAPH,
+ PhaseStats lowering_stats(info(), &zone_pool, PhaseStats::CREATE_GRAPH,
"generic lowering");
SourcePositionTable::Scope pos(&source_positions,
SourcePosition::Unknown());
Handle<Code> code = Handle<Code>::null();
{
// Compute a schedule.
- Schedule* schedule = ComputeSchedule(&graph);
+ Schedule* schedule = ComputeSchedule(&zone_pool, &graph);
// Generate optimized code.
- PhaseStats codegen_stats(info(), PhaseStats::CODEGEN, "codegen");
+ PhaseStats codegen_stats(info(), &zone_pool, PhaseStats::CODEGEN,
+ "codegen");
Linkage linkage(info());
code = GenerateCode(&linkage, &graph, schedule, &source_positions);
info()->SetCode(code);
}
-Schedule* Pipeline::ComputeSchedule(Graph* graph) {
- PhaseStats schedule_stats(info(), PhaseStats::CODEGEN, "scheduling");
- Schedule* schedule = Scheduler::ComputeSchedule(graph);
+Schedule* Pipeline::ComputeSchedule(ZonePool* zone_pool, Graph* graph) {
+ PhaseStats schedule_stats(info(), zone_pool, PhaseStats::CODEGEN,
+ "scheduling");
+ Schedule* schedule = Scheduler::ComputeSchedule(zone_pool, graph);
TraceSchedule(schedule);
if (VerifyGraphs()) ScheduleVerifier::Run(schedule);
return schedule;
if (schedule == NULL) {
// TODO(rossberg): Should this really be untyped?
VerifyAndPrintGraph(graph, "Machine", true);
- schedule = ComputeSchedule(graph);
+ ZonePool zone_pool(isolate());
+ schedule = ComputeSchedule(&zone_pool, graph);
}
TraceSchedule(schedule);
class RegisterAllocator;
class Schedule;
class SourcePositionTable;
+class ZonePool;
class Pipeline {
public:
Isolate* isolate() { return info_->isolate(); }
Zone* zone() { return info_->zone(); }
- Schedule* ComputeSchedule(Graph* graph);
+ Schedule* ComputeSchedule(ZonePool* zone_pool, Graph* graph);
void OpenTurboCfgFile(std::ofstream* stream);
void PrintCompilationStart();
void PrintScheduleAndInstructions(const char* phase, const Schedule* schedule,
Schedule* RawMachineAssembler::Export() {
// Compute the correct codegen order.
DCHECK(schedule_->rpo_order()->empty());
- Scheduler::ComputeSpecialRPO(schedule_);
+ ZonePool zone_pool(isolate());
+ Scheduler::ComputeSpecialRPO(&zone_pool, schedule_);
// Invalidate MachineAssembler.
Schedule* schedule = schedule_;
schedule_ = NULL;
}
-Scheduler::Scheduler(Zone* zone, Graph* graph, Schedule* schedule)
- : zone_(zone),
+Scheduler::Scheduler(ZonePool* zone_pool, Zone* zone, Graph* graph,
+ Schedule* schedule)
+ : zone_pool_(zone_pool),
+ zone_(zone),
graph_(graph),
schedule_(schedule),
scheduled_nodes_(zone),
has_floating_control_(false) {}
-Schedule* Scheduler::ComputeSchedule(Graph* graph) {
+Schedule* Scheduler::ComputeSchedule(ZonePool* zone_pool, Graph* graph) {
Schedule* schedule;
bool had_floating_control = false;
do {
- Zone tmp_zone(graph->zone()->isolate());
+ ZonePool::Scope zone_scope(zone_pool);
schedule = new (graph->zone())
Schedule(graph->zone(), static_cast<size_t>(graph->NodeCount()));
- Scheduler scheduler(&tmp_zone, graph, schedule);
+ Scheduler scheduler(zone_pool, zone_scope.zone(), graph, schedule);
scheduler.BuildCFG();
- Scheduler::ComputeSpecialRPO(schedule);
+ Scheduler::ComputeSpecialRPO(zone_pool, schedule);
scheduler.GenerateImmediateDominatorTree();
scheduler.PrepareUses();
ScheduleLateNodeVisitor schedule_late_visitor(this);
{
- Zone zone(zone_->isolate());
+ ZonePool::Scope zone_scope(zone_pool_);
+ Zone* zone = zone_scope.zone();
GenericGraphVisit::Visit<ScheduleLateNodeVisitor,
NodeInputIterationTraits<Node> >(
- graph_, &zone, schedule_root_nodes_.begin(), schedule_root_nodes_.end(),
+ graph_, zone, schedule_root_nodes_.begin(), schedule_root_nodes_.end(),
&schedule_late_visitor);
}
// 2. All loops are contiguous in the order (i.e. no intervening blocks that
// do not belong to the loop.)
// Note a simple RPO traversal satisfies (1) but not (3).
-BasicBlockVector* Scheduler::ComputeSpecialRPO(Schedule* schedule) {
- Zone tmp_zone(schedule->zone()->isolate());
- Zone* zone = &tmp_zone;
+BasicBlockVector* Scheduler::ComputeSpecialRPO(ZonePool* zone_pool,
+ Schedule* schedule) {
+ ZonePool::Scope zone_scope(zone_pool);
+ Zone* zone = zone_scope.zone();
Trace("--- COMPUTING SPECIAL RPO ----------------------------------\n");
// RPO should not have been computed for this schedule yet.
CHECK_EQ(kBlockUnvisited1, schedule->start()->rpo_number());
#include "src/compiler/opcodes.h"
#include "src/compiler/schedule.h"
+#include "src/compiler/zone-pool.h"
#include "src/zone-containers.h"
namespace v8 {
public:
// The complete scheduling algorithm. Creates a new schedule and places all
// nodes from the graph into it.
- static Schedule* ComputeSchedule(Graph* graph);
+ static Schedule* ComputeSchedule(ZonePool* zone_pool, Graph* graph);
// Compute the RPO of blocks in an existing schedule.
- static BasicBlockVector* ComputeSpecialRPO(Schedule* schedule);
+ static BasicBlockVector* ComputeSpecialRPO(ZonePool* zone_pool,
+ Schedule* schedule);
private:
enum Placement { kUnknown, kSchedulable, kFixed };
// or not yet known.
};
+ ZonePool* zone_pool_;
Zone* zone_;
Graph* graph_;
Schedule* schedule_;
ZoneVector<SchedulerData> node_data_;
bool has_floating_control_;
- Scheduler(Zone* zone, Graph* graph, Schedule* schedule);
+ Scheduler(ZonePool* zone_pool, Zone* zone, Graph* graph, Schedule* schedule);
SchedulerData DefaultSchedulerData();
--- /dev/null
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/compiler/zone-pool.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+ZonePool::StatsScope::StatsScope(ZonePool* zone_pool)
+ : zone_pool_(zone_pool), max_allocated_bytes_(0) {
+ zone_pool_->stats_.push_back(this);
+ for (auto zone : zone_pool_->used_) {
+ size_t size = static_cast<size_t>(zone->allocation_size());
+ std::pair<InitialValues::iterator, bool> res =
+ initial_values_.insert(std::make_pair(zone, size));
+ USE(res);
+ DCHECK(res.second);
+ }
+}
+
+
+ZonePool::StatsScope::~StatsScope() {
+ DCHECK_EQ(zone_pool_->stats_.back(), this);
+ zone_pool_->stats_.pop_back();
+}
+
+
+size_t ZonePool::StatsScope::GetMaxAllocatedBytes() {
+ return std::max(max_allocated_bytes_, GetCurrentAllocatedBytes());
+}
+
+
+size_t ZonePool::StatsScope::GetCurrentAllocatedBytes() {
+ size_t total = 0;
+ for (Zone* zone : zone_pool_->used_) {
+ total += static_cast<size_t>(zone->allocation_size());
+ // Adjust for initial values.
+ InitialValues::iterator it = initial_values_.find(zone);
+ if (it != initial_values_.end()) {
+ total -= it->second;
+ }
+ }
+ return total;
+}
+
+
+void ZonePool::StatsScope::ZoneReturned(Zone* zone) {
+ size_t current_total = GetCurrentAllocatedBytes();
+ // Update max.
+ max_allocated_bytes_ = std::max(max_allocated_bytes_, current_total);
+ // Drop zone from initial value map.
+ InitialValues::iterator it = initial_values_.find(zone);
+ if (it != initial_values_.end()) {
+ initial_values_.erase(it);
+ }
+}
+
+
+ZonePool::ZonePool(Isolate* isolate)
+ : isolate_(isolate), max_allocated_bytes_(0), total_deleted_bytes_(0) {}
+
+
+ZonePool::~ZonePool() {
+ DCHECK(used_.empty());
+ DCHECK(stats_.empty());
+ for (Zone* zone : unused_) {
+ delete zone;
+ }
+}
+
+
+size_t ZonePool::GetMaxAllocatedBytes() {
+ return std::max(max_allocated_bytes_, GetCurrentAllocatedBytes());
+}
+
+
+size_t ZonePool::GetCurrentAllocatedBytes() {
+ size_t total = 0;
+ for (Zone* zone : used_) {
+ total += static_cast<size_t>(zone->allocation_size());
+ }
+ return total;
+}
+
+
+size_t ZonePool::GetTotalAllocatedBytes() {
+ return total_deleted_bytes_ + GetCurrentAllocatedBytes();
+}
+
+
+Zone* ZonePool::NewEmptyZone() {
+ Zone* zone;
+ // Grab a zone from pool if possible.
+ if (!unused_.empty()) {
+ zone = unused_.back();
+ unused_.pop_back();
+ } else {
+ zone = new Zone(isolate_);
+ }
+ used_.push_back(zone);
+ DCHECK_EQ(0, zone->allocation_size());
+ return zone;
+}
+
+
+void ZonePool::ReturnZone(Zone* zone) {
+ size_t current_total = GetCurrentAllocatedBytes();
+ // Update max.
+ max_allocated_bytes_ = std::max(max_allocated_bytes_, current_total);
+ // Update stats.
+ for (auto stat_scope : stats_) {
+ stat_scope->ZoneReturned(zone);
+ }
+ // Remove from used.
+ Used::iterator it = std::find(used_.begin(), used_.end(), zone);
+ DCHECK(it != used_.end());
+ used_.erase(it);
+ total_deleted_bytes_ += static_cast<size_t>(zone->allocation_size());
+ // Delete zone or clear and stash on unused_.
+ if (unused_.size() >= kMaxUnusedSize) {
+ delete zone;
+ } else {
+ zone->DeleteAll();
+ DCHECK_EQ(0, zone->allocation_size());
+ unused_.push_back(zone);
+ }
+}
+
+} // namespace compiler
+} // namespace internal
+} // namespace v8
--- /dev/null
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_COMPILER_ZONE_POOL_H_
+#define V8_COMPILER_ZONE_POOL_H_
+
+#include <map>
+#include <set>
+#include <vector>
+
+#include "src/v8.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+class ZonePool FINAL {
+ public:
+ class Scope FINAL {
+ public:
+ explicit Scope(ZonePool* zone_pool) : zone_pool_(zone_pool), zone_(NULL) {}
+ ~Scope() { Destroy(); }
+
+ Zone* zone() {
+ if (zone_ == NULL) zone_ = zone_pool_->NewEmptyZone();
+ return zone_;
+ }
+ void Destroy() {
+ if (zone_ != NULL) zone_pool_->ReturnZone(zone_);
+ zone_ = NULL;
+ }
+
+ private:
+ ZonePool* const zone_pool_;
+ Zone* zone_;
+ DISALLOW_COPY_AND_ASSIGN(Scope);
+ };
+
+ class StatsScope FINAL {
+ public:
+ explicit StatsScope(ZonePool* zone_pool);
+ ~StatsScope();
+
+ size_t GetMaxAllocatedBytes();
+ size_t GetCurrentAllocatedBytes();
+
+ private:
+ friend class ZonePool;
+ void ZoneReturned(Zone* zone);
+
+ typedef std::map<Zone*, size_t> InitialValues;
+
+ ZonePool* const zone_pool_;
+ InitialValues initial_values_;
+ size_t max_allocated_bytes_;
+
+ DISALLOW_COPY_AND_ASSIGN(StatsScope);
+ };
+
+ explicit ZonePool(Isolate* isolate);
+ ~ZonePool();
+
+ size_t GetMaxAllocatedBytes();
+ size_t GetTotalAllocatedBytes();
+ size_t GetCurrentAllocatedBytes();
+
+ private:
+ Zone* NewEmptyZone();
+ void ReturnZone(Zone* zone);
+
+ static const size_t kMaxUnusedSize = 3;
+ typedef std::vector<Zone*> Unused;
+ typedef std::vector<Zone*> Used;
+ typedef std::vector<StatsScope*> Stats;
+
+ Isolate* const isolate_;
+ Unused unused_;
+ Used used_;
+ Stats stats_;
+ size_t max_allocated_bytes_;
+ size_t total_deleted_bytes_;
+
+ DISALLOW_COPY_AND_ASSIGN(ZonePool);
+};
+
+} // namespace compiler
+} // namespace internal
+} // namespace v8
+
+#endif
position_ = limit_ = 0;
}
+ allocation_size_ = 0;
// Update the head segment to be the kept segment (if any).
segment_head_ = keep;
}
void allocCode() {
if (schedule.rpo_order()->size() == 0) {
// Compute the RPO order.
- Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(isolate);
+ Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
DCHECK(schedule.rpo_order()->size() > 0);
}
code = new TestInstrSeq(main_zone(), &graph, &schedule);
os << AsDOT(*graph);
}
- Schedule* schedule = Scheduler::ComputeSchedule(graph);
+ ZonePool zone_pool(graph->zone()->isolate());
+ Schedule* schedule = Scheduler::ComputeSchedule(&zone_pool, graph);
if (FLAG_trace_turbo_scheduler) {
OFStream os(stdout);
HandleAndZoneScope scope;
Schedule schedule(scope.main_zone());
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, 1, false);
CHECK_EQ(schedule.start(), order->at(0));
}
Schedule schedule(scope.main_zone());
schedule.AddGoto(schedule.start(), schedule.end());
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, 2, false);
CHECK_EQ(schedule.start(), order->at(0));
CHECK_EQ(schedule.end(), order->at(1));
schedule.AddGoto(last, block);
last = block;
}
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order =
+ Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, 1 + i, false);
for (size_t i = 0; i < schedule.BasicBlockCount(); i++) {
HandleAndZoneScope scope;
Schedule schedule(scope.main_zone());
schedule.AddSuccessor(schedule.start(), schedule.start());
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, 1, true);
BasicBlock* loop[] = {schedule.start()};
CheckLoopContains(loop, 1);
Schedule schedule(scope.main_zone());
schedule.AddSuccessor(schedule.start(), schedule.end());
schedule.AddSuccessor(schedule.end(), schedule.start());
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, 2, true);
BasicBlock* loop[] = {schedule.start(), schedule.end()};
CheckLoopContains(loop, 2);
Schedule schedule(scope.main_zone());
SmartPointer<TestLoop> loop1(CreateLoop(&schedule, 2));
schedule.AddSuccessor(schedule.start(), loop1->header());
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, 3, true);
CheckLoopContains(loop1->nodes, loop1->count);
}
SmartPointer<TestLoop> loop1(CreateLoop(&schedule, 2));
schedule.AddSuccessor(schedule.start(), loop1->header());
schedule.AddSuccessor(loop1->last(), schedule.start());
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, 3, true);
CheckLoopContains(loop1->nodes, loop1->count);
}
schedule.AddSuccessor(B, D);
schedule.AddSuccessor(C, D);
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, 4, false);
CHECK_EQ(0, A->rpo_number());
schedule.AddSuccessor(C, B);
schedule.AddSuccessor(C, D);
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, 4, true);
BasicBlock* loop[] = {B, C};
CheckLoopContains(loop, 2);
schedule.AddSuccessor(C, B);
schedule.AddSuccessor(B, D);
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, 4, true);
BasicBlock* loop[] = {B, C};
CheckLoopContains(loop, 2);
if (i == 9) schedule.AddSuccessor(E, G);
if (i == 10) schedule.AddSuccessor(F, G);
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order =
+ Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, 7, true);
BasicBlock* loop[] = {B, C, D, E, F};
CheckLoopContains(loop, 5);
schedule.AddSuccessor(E, B);
schedule.AddSuccessor(E, F);
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, 6, true);
BasicBlock* loop1[] = {B, C, D, E};
CheckLoopContains(loop1, 4);
schedule.AddSuccessor(F, C);
schedule.AddSuccessor(G, B);
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, 8, true);
BasicBlock* loop1[] = {B, C, D, E, F, G};
CheckLoopContains(loop1, 6);
schedule.AddSuccessor(loop1->header(), loop2->header());
schedule.AddSuccessor(loop2->last(), E);
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckLoopContains(loop1->nodes, loop1->count);
schedule.AddSuccessor(S, loop2->header());
schedule.AddSuccessor(loop2->last(), E);
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckLoopContains(loop1->nodes, loop1->count);
schedule.AddSuccessor(A, loop1->header());
schedule.AddSuccessor(loop1->nodes[exit], loop2->header());
schedule.AddSuccessor(loop2->nodes[exit], E);
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order =
+ Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckLoopContains(loop1->nodes, loop1->count);
CHECK_EQ(static_cast<int>(schedule.BasicBlockCount()),
schedule.AddSuccessor(C, E);
schedule.AddSuccessor(C, B);
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckLoopContains(loop1->nodes, loop1->count);
schedule.AddSuccessor(loop1->nodes[i], loop1->header());
schedule.AddSuccessor(loop1->nodes[j], E);
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order =
+ Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, schedule.BasicBlockCount(), true);
CheckLoopContains(loop1->nodes, loop1->count);
}
schedule.AddSuccessor(loop1->nodes[j], D);
schedule.AddSuccessor(D, E);
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order =
+ Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, schedule.BasicBlockCount(), true);
CheckLoopContains(loop1->nodes, loop1->count);
}
schedule.AddSuccessor(O, E);
}
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order =
+ Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, schedule.BasicBlockCount(), true);
CheckLoopContains(loop1->nodes, loop1->count);
}
schedule.AddSuccessor(loopN[j]->last(), E);
}
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order =
+ Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, schedule.BasicBlockCount(), true);
CheckLoopContains(loop1->nodes, loop1->count);
schedule.AddSuccessor(D, B);
schedule.AddSuccessor(E, B);
- BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&schedule);
+ ZonePool zone_pool(scope.main_isolate());
+ BasicBlockVector* order = Scheduler::ComputeSpecialRPO(&zone_pool, &schedule);
CheckRPONumbers(order, 5, true);
BasicBlock* loop1[] = {B, C, D, E};
graph.SetStart(graph.NewNode(builder.Start(0)));
graph.SetEnd(graph.NewNode(builder.End(), graph.start()));
- USE(Scheduler::ComputeSchedule(&graph));
+ ZonePool zone_pool(scope.main_isolate());
+ USE(Scheduler::ComputeSchedule(&zone_pool, &graph));
}
graph.SetEnd(graph.NewNode(builder.End(), ret));
- USE(Scheduler::ComputeSchedule(&graph));
+ ZonePool zone_pool(scope.main_isolate());
+ USE(Scheduler::ComputeSchedule(&zone_pool, &graph));
}
--- /dev/null
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/base/utils/random-number-generator.h"
+#include "src/compiler/zone-pool.h"
+#include "test/unittests/test-utils.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+class ZonePoolTest : public TestWithIsolate {
+ public:
+ ZonePoolTest() : zone_pool_(isolate()) {}
+
+ protected:
+ ZonePool* zone_pool() { return &zone_pool_; }
+
+ void ExpectForPool(size_t current, size_t max, size_t total) {
+ ASSERT_EQ(current, zone_pool()->GetCurrentAllocatedBytes());
+ ASSERT_EQ(max, zone_pool()->GetMaxAllocatedBytes());
+ ASSERT_EQ(total, zone_pool()->GetTotalAllocatedBytes());
+ }
+
+ void Expect(ZonePool::StatsScope* stats, size_t current, size_t max) {
+ ASSERT_EQ(current, stats->GetCurrentAllocatedBytes());
+ ASSERT_EQ(max, stats->GetMaxAllocatedBytes());
+ }
+
+ size_t Allocate(Zone* zone) {
+ size_t bytes = rng.NextInt(25) + 7;
+ int size_before = zone->allocation_size();
+ zone->New(static_cast<int>(bytes));
+ return static_cast<size_t>(zone->allocation_size() - size_before);
+ }
+
+ private:
+ ZonePool zone_pool_;
+ base::RandomNumberGenerator rng;
+};
+
+
+TEST_F(ZonePoolTest, Empty) {
+ ExpectForPool(0, 0, 0);
+ {
+ ZonePool::StatsScope stats(zone_pool());
+ Expect(&stats, 0, 0);
+ }
+ ExpectForPool(0, 0, 0);
+ {
+ ZonePool::Scope scope(zone_pool());
+ scope.zone();
+ }
+ ExpectForPool(0, 0, 0);
+}
+
+
+TEST_F(ZonePoolTest, MultipleZonesWithDeletion) {
+ static const size_t kArraySize = 10;
+
+ ZonePool::Scope* scopes[kArraySize];
+
+ // Initialize.
+ size_t before_stats = 0;
+ for (size_t i = 0; i < kArraySize; ++i) {
+ scopes[i] = new ZonePool::Scope(zone_pool());
+ before_stats += Allocate(scopes[i]->zone()); // Add some stuff.
+ }
+
+ ExpectForPool(before_stats, before_stats, before_stats);
+
+ ZonePool::StatsScope stats(zone_pool());
+
+ size_t before_deletion = 0;
+ for (size_t i = 0; i < kArraySize; ++i) {
+ before_deletion += Allocate(scopes[i]->zone()); // Add some stuff.
+ }
+
+ Expect(&stats, before_deletion, before_deletion);
+ ExpectForPool(before_stats + before_deletion, before_stats + before_deletion,
+ before_stats + before_deletion);
+
+ // Delete the scopes and create new ones.
+ for (size_t i = 0; i < kArraySize; ++i) {
+ delete scopes[i];
+ scopes[i] = new ZonePool::Scope(zone_pool());
+ }
+
+ Expect(&stats, 0, before_deletion);
+ ExpectForPool(0, before_stats + before_deletion,
+ before_stats + before_deletion);
+
+ size_t after_deletion = 0;
+ for (size_t i = 0; i < kArraySize; ++i) {
+ after_deletion += Allocate(scopes[i]->zone()); // Add some stuff.
+ }
+
+ Expect(&stats, after_deletion, std::max(after_deletion, before_deletion));
+ ExpectForPool(after_deletion,
+ std::max(after_deletion, before_stats + before_deletion),
+ before_stats + before_deletion + after_deletion);
+
+ // Cleanup.
+ for (size_t i = 0; i < kArraySize; ++i) {
+ delete scopes[i];
+ }
+
+ Expect(&stats, 0, std::max(after_deletion, before_deletion));
+ ExpectForPool(0, std::max(after_deletion, before_stats + before_deletion),
+ before_stats + before_deletion + after_deletion);
+}
+
+
+TEST_F(ZonePoolTest, SimpleAllocationLoop) {
+ int runs = 20;
+ size_t total_allocated = 0;
+ size_t max_loop_allocation = 0;
+ ZonePool::StatsScope outer_stats(zone_pool());
+ {
+ ZonePool::Scope outer_scope(zone_pool());
+ size_t outer_allocated = 0;
+ for (int i = 0; i < runs; ++i) {
+ {
+ size_t bytes = Allocate(outer_scope.zone());
+ outer_allocated += bytes;
+ total_allocated += bytes;
+ }
+ ZonePool::StatsScope inner_stats(zone_pool());
+ size_t allocated = 0;
+ {
+ ZonePool::Scope inner_scope(zone_pool());
+ for (int j = 0; j < 20; ++j) {
+ size_t bytes = Allocate(inner_scope.zone());
+ allocated += bytes;
+ total_allocated += bytes;
+ max_loop_allocation =
+ std::max(max_loop_allocation, outer_allocated + allocated);
+ Expect(&inner_stats, allocated, allocated);
+ Expect(&outer_stats, outer_allocated + allocated,
+ max_loop_allocation);
+ ExpectForPool(outer_allocated + allocated, max_loop_allocation,
+ total_allocated);
+ }
+ }
+ Expect(&inner_stats, 0, allocated);
+ Expect(&outer_stats, outer_allocated, max_loop_allocation);
+ ExpectForPool(outer_allocated, max_loop_allocation, total_allocated);
+ }
+ }
+ Expect(&outer_stats, 0, max_loop_allocation);
+ ExpectForPool(0, max_loop_allocation, total_allocated);
+}
+
+} // namespace compiler
+} // namespace internal
+} // namespace v8
'compiler/simplified-operator-reducer-unittest.cc',
'compiler/simplified-operator-unittest.cc',
'compiler/value-numbering-reducer-unittest.cc',
+ 'compiler/zone-pool-unittest.cc',
'libplatform/default-platform-unittest.cc',
'libplatform/task-queue-unittest.cc',
'libplatform/worker-thread-unittest.cc',
'../../src/compiler/value-numbering-reducer.h',
'../../src/compiler/verifier.cc',
'../../src/compiler/verifier.h',
+ '../../src/compiler/zone-pool.cc',
+ '../../src/compiler/zone-pool.h',
'../../src/compiler.cc',
'../../src/compiler.h',
'../../src/contexts.cc',