Expected<InstructionBenchmark> BenchmarkRunner::runConfiguration(
const BenchmarkCode &BC, unsigned NumRepetitions, unsigned LoopBodySize,
- ArrayRef<std::unique_ptr<const SnippetRepetitor>> Repetitors,
- bool DumpObjectToDisk) const {
+ const SnippetRepetitor &Repetitor, bool DumpObjectToDisk) const {
InstructionBenchmark InstrBenchmark;
InstrBenchmark.Mode = Mode;
InstrBenchmark.CpuName = std::string(State.getTargetMachine().getTargetCPU());
InstrBenchmark.Key = BC.Key;
- // If we end up having an error, and we've previously succeeded with
- // some other Repetitor, we want to discard the previous measurements.
- struct ClearBenchmarkOnReturn {
- ClearBenchmarkOnReturn(InstructionBenchmark *IB) : IB(IB) {}
- ~ClearBenchmarkOnReturn() {
- if (Clear)
- IB->Measurements.clear();
- }
- void disarm() { Clear = false; }
-
- private:
- InstructionBenchmark *const IB;
- bool Clear = true;
- };
- ClearBenchmarkOnReturn CBOR(&InstrBenchmark);
-
- for (const std::unique_ptr<const SnippetRepetitor> &Repetitor : Repetitors) {
+ {
// Assemble at least kMinInstructionsForSnippet instructions by repeating
// the snippet for debug/analysis. This is so that the user clearly
// understands that the inside instructions are repeated.
if (Error E = assembleToStream(
State.getExegesisTarget(), State.createTargetMachine(),
BC.LiveIns, BC.Key.RegisterInitialValues,
- Repetitor->Repeat(Instructions, MinInstructionsForSnippet,
- LoopBodySizeForSnippet),
+ Repetitor.Repeat(Instructions, MinInstructionsForSnippet,
+ LoopBodySizeForSnippet),
OS)) {
return std::move(E);
}
// Assemble NumRepetitions instructions repetitions of the snippet for
// measurements.
- const auto Filler = Repetitor->Repeat(
+ const auto Filler = Repetitor.Repeat(
Instructions, InstrBenchmark.NumRepetitions, LoopBodySize);
object::OwningBinary<object::ObjectFile> ObjectFile;
if (BenchmarkSkipMeasurements) {
InstrBenchmark.Error =
"in --skip-measurements mode, actual measurements skipped.";
- continue;
+ return InstrBenchmark;
}
const FunctionExecutorImpl Executor(State, std::move(ObjectFile),
BM.PerSnippetValue *= static_cast<double>(Instructions.size()) /
InstrBenchmark.NumRepetitions;
}
- if (InstrBenchmark.Measurements.empty()) {
- InstrBenchmark.Measurements = std::move(*NewMeasurements);
- continue;
- }
-
- assert(Repetitors.size() > 1 && !InstrBenchmark.Measurements.empty() &&
- "We're in an 'min' repetition mode, and need to aggregate new "
- "result to the existing result.");
- assert(InstrBenchmark.Measurements.size() == NewMeasurements->size() &&
- "Expected to have identical number of measurements.");
- for (auto I : zip(InstrBenchmark.Measurements, *NewMeasurements)) {
- BenchmarkMeasure &Measurement = std::get<0>(I);
- BenchmarkMeasure &NewMeasurement = std::get<1>(I);
- assert(Measurement.Key == NewMeasurement.Key &&
- "Expected measurements to be symmetric");
-
- Measurement.PerInstructionValue = std::min(
- Measurement.PerInstructionValue, NewMeasurement.PerInstructionValue);
- Measurement.PerSnippetValue =
- std::min(Measurement.PerSnippetValue, NewMeasurement.PerSnippetValue);
- }
+ InstrBenchmark.Measurements = std::move(*NewMeasurements);
}
- // We successfully measured everything, so don't discard the results.
- CBOR.disarm();
return InstrBenchmark;
}
Meter.emplace(Configurations.size());
for (const BenchmarkCode &Conf : Configurations) {
ProgressMeter<>::ProgressMeterStep MeterStep(Meter ? &*Meter : nullptr);
- InstructionBenchmark Result = ExitOnErr(Runner->runConfiguration(
- Conf, NumRepetitions, LoopBodySize, Repetitors, DumpObjectToDisk));
+ SmallVector<InstructionBenchmark, 2> AllResults;
+
+ for (const std::unique_ptr<const SnippetRepetitor> &Repetitor :
+ Repetitors) {
+ AllResults.emplace_back(ExitOnErr(Runner->runConfiguration(
+ Conf, NumRepetitions, LoopBodySize, *Repetitor, DumpObjectToDisk)));
+ }
+ InstructionBenchmark &Result = AllResults.front();
+
+ if (RepetitionMode == InstructionBenchmark::RepetitionModeE::AggregateMin) {
+ assert(!Result.Measurements.empty() &&
+ "We're in an 'min' repetition mode, and need to aggregate new "
+ "result to the existing result.");
+ for (const InstructionBenchmark &OtherResult :
+ ArrayRef<InstructionBenchmark>(AllResults).drop_front()) {
+ llvm::append_range(Result.AssembledSnippet,
+ OtherResult.AssembledSnippet);
+ assert(OtherResult.Measurements.size() == Result.Measurements.size() &&
+ "Expected to have identical number of measurements.");
+ for (auto I : zip(Result.Measurements, OtherResult.Measurements)) {
+ BenchmarkMeasure &Measurement = std::get<0>(I);
+ const BenchmarkMeasure &NewMeasurement = std::get<1>(I);
+ assert(Measurement.Key == NewMeasurement.Key &&
+ "Expected measurements to be symmetric");
+
+ Measurement.PerInstructionValue =
+ std::min(Measurement.PerInstructionValue,
+ NewMeasurement.PerInstructionValue);
+ Measurement.PerSnippetValue = std::min(
+ Measurement.PerSnippetValue, NewMeasurement.PerSnippetValue);
+ }
+ }
+ }
+
ExitOnFileError(BenchmarkFile, Result.writeYaml(State, BenchmarkFile));
}
exegesis::pfm::pfmTerminate();