2 using System.Collections.Generic;
3 using System.Diagnostics;
6 using System.Runtime.InteropServices;
8 using System.Threading.Tasks;
10 using Microsoft.Xunit.Performance.Api;
11 using Microsoft.Xunit.Performance.Api.Profilers.Etw;
15 public abstract class Benchmark
17 public Benchmark(string name)
20 EnvironmentVariables = new Dictionary<string, string>();
23 public string Name { get; private set; }
24 public string ExePath { get; protected set; }
25 public string WorkingDirPath { get; protected set; }
26 public string CommandLineArguments { get; protected set; }
27 public Dictionary<string, string> EnvironmentVariables { get; private set; }
29 public BenchmarkRunResult[] Run(TestRun run, ITestOutputHelper output)
31 using (var runSectionOutput = new IndentedTestOutputHelper($"Run {Name} iterations", output))
33 return MeasureIterations(run, runSectionOutput);
37 public abstract Task Setup(DotNetInstallation dotnetInstall, string intermediateOutputDir, bool useExistingSetup, ITestOutputHelper output);
39 protected void RetargetProjects(
40 DotNetInstallation dotNetInstall,
42 IEnumerable<string> projectFileRelativePaths)
44 if (string.IsNullOrWhiteSpace(rootDir))
46 throw new ArgumentNullException(rootDir);
48 if (!Directory.Exists(rootDir))
50 throw new DirectoryNotFoundException($"Root directory was not found: {rootDir}");
53 foreach (string projectFileRelativePath in projectFileRelativePaths)
55 string projectFile = Path.Combine(rootDir, projectFileRelativePath);
56 if (!File.Exists(projectFile))
58 throw new FileNotFoundException($"Project file was not found: {projectFile}");
61 var doc = new XmlDocument();
63 using (var fs = new FileStream(projectFile, FileMode.Open, FileAccess.Read, FileShare.Read))
64 using (var sr = new StreamReader(fs))
66 docEncoding = sr.CurrentEncoding;
69 XmlElement root = doc.DocumentElement;
71 // Comment out all existing TargetFramework and RuntimeFrameworkVersion elements
72 foreach (XmlElement e in root.SelectNodes("PropertyGroup/TargetFramework").OfType<XmlElement>())
74 e.ParentNode.ReplaceChild(doc.CreateComment(e.OuterXml), e);
76 foreach (XmlElement e in root.SelectNodes("PropertyGroup/RuntimeFrameworkVersion").OfType<XmlElement>())
78 e.ParentNode.ReplaceChild(doc.CreateComment(e.OuterXml), e);
81 // Add TargetFramework and RuntimeFrameworkVersion elements with the requested values to the top
83 XmlElement propertyGroupElement = doc.CreateElement("PropertyGroup");
84 root.PrependChild(propertyGroupElement);
86 XmlElement targetFrameworkElement = doc.CreateElement("TargetFramework");
87 XmlElement runtimeFrameworkVersionElement = doc.CreateElement("RuntimeFrameworkVersion");
88 propertyGroupElement.AppendChild(targetFrameworkElement);
89 propertyGroupElement.AppendChild(runtimeFrameworkVersionElement);
91 targetFrameworkElement.InnerText =
92 DotNetSetup.GetTargetFrameworkMonikerForFrameworkVersion(dotNetInstall.FrameworkVersion);
93 runtimeFrameworkVersionElement.InnerText = dotNetInstall.FrameworkVersion;
96 using (var fs = new FileStream(projectFile, FileMode.Truncate, FileAccess.Write, FileShare.Read))
97 using (var sw = new StreamWriter(fs, docEncoding))
104 public virtual Metric[] GetDefaultDisplayMetrics()
106 return new Metric[] { Metric.ElapsedTimeMilliseconds };
110 /// Does this benchmark run properly on a given architecture?
112 public virtual bool IsArchitectureSupported(Architecture arch)
114 return (arch == Architecture.X86 || arch == Architecture.X64);
117 BenchmarkRunResult[] MeasureIterations(TestRun run, ITestOutputHelper output)
119 List<BenchmarkRunResult> results = new List<BenchmarkRunResult>();
120 foreach (BenchmarkConfiguration config in run.Configurations)
122 results.Add(MeasureIterations(run, config, output));
124 return results.ToArray();
127 BenchmarkRunResult MeasureIterations(TestRun run, BenchmarkConfiguration config, ITestOutputHelper output)
129 // The XunitPerformanceHarness is hardcoded to log to the console. It would be nice if the output was configurable somehow
130 // but in lieue of that we can redirect all console output with light hackery.
131 using (var redirector = new ConsoleRedirector(output))
133 // XunitPerformanceHarness expects to do the raw commandline parsing itself, but I really don't like that its default collection
134 // metric requires the use of ETW. Getting an admin console or admin VS instance isn't where most people start, its
135 // a small nuissance, and for these tests its often not needed/adds non-trivial overhead. I set the default to stopwatch if the
136 // perf:collect argument hasn't been specified, but that sadly requires that I pre-parse, interpret, and then re-format all the
137 // args to make that change :(
139 // In TestRun.ValidateMetricNames() I pre-check if ETW is going to be needed and give an error there rather than doing all the
140 // test setup (~1 minute?) and then giving the error after the user has probably wandered away. That also relies on some of this
141 // replicated command line parsing.
142 string[] args = new string[] { "--perf:collect", string.Join("+", run.MetricNames), "--perf:outputdir", run.OutputDir, "--perf:runid", run.BenchviewRunId };
143 using (var harness = new XunitPerformanceHarness(args))
145 ProcessStartInfo startInfo = new ProcessStartInfo(run.DotNetInstallation.DotNetExe, (ExePath + " " + CommandLineArguments).Trim());
146 startInfo.WorkingDirectory = WorkingDirPath;
147 startInfo.RedirectStandardError = true;
148 startInfo.RedirectStandardOutput = true;
149 IEnumerable<KeyValuePair<string, string>> extraEnvVars = config.EnvironmentVariables.Concat(EnvironmentVariables).Append(new KeyValuePair<string, string>("DOTNET_MULTILEVEL_LOOKUP", "0"));
150 foreach (KeyValuePair<string, string> kv in extraEnvVars)
152 startInfo.Environment[kv.Key] = kv.Value;
154 output.WriteLine("XUnitPerfHarness doesn't log env vars it uses to run processes. To workaround, logging them here:");
155 output.WriteLine($"Environment variables: {string.Join(", ", extraEnvVars.Select(kv => kv.Key + "=" + kv.Value))}");
156 output.WriteLine($"Working directory: \"{startInfo.WorkingDirectory}\"");
157 output.WriteLine($"Command line: \"{startInfo.FileName}\" {startInfo.Arguments}");
159 BenchmarkRunResult result = new BenchmarkRunResult(this, config);
160 StringBuilder stderr = new StringBuilder();
161 StringBuilder stdout = new StringBuilder();
162 var scenarioConfiguration = new ScenarioTestConfiguration(TimeSpan.FromMinutes(60), startInfo)
164 //XUnitPerformanceHarness writes files to disk starting with {runid}-{ScenarioBenchmarkName}-{TestName}
165 TestName = (Name + "-" + config.Name).Replace(' ', '_'),
166 Scenario = new ScenarioBenchmark("JitBench"),
167 Iterations = run.Iterations,
168 PreIterationDelegate = scenario =>
172 scenario.Process.ErrorDataReceived += (object sender, DataReceivedEventArgs errorLine) =>
174 if(!string.IsNullOrEmpty(errorLine.Data))
176 stderr.AppendLine(errorLine.Data);
177 redirector.WriteLine("STDERROR: " + errorLine.Data);
180 scenario.Process.OutputDataReceived += (object sender, DataReceivedEventArgs outputLine) =>
182 stdout.AppendLine(outputLine.Data);
183 redirector.WriteLine(outputLine.Data);
186 PostIterationDelegate = scenarioResult =>
188 result.IterationResults.Add(RecordIterationMetrics(scenarioResult, stdout.ToString(), stderr.ToString(), redirector));
191 harness.RunScenario(scenarioConfiguration, sb => { BenchviewResultExporter.ConvertRunResult(sb, result); });
197 protected virtual IterationResult RecordIterationMetrics(ScenarioExecutionResult scenarioIteration, string stdout, string stderr, ITestOutputHelper output)
199 IterationResult iterationResult = new IterationResult();
200 int elapsedMs = (int)(scenarioIteration.ProcessExitInfo.ExitTime - scenarioIteration.ProcessExitInfo.StartTime).TotalMilliseconds;
201 iterationResult.Measurements.Add(Metric.ElapsedTimeMilliseconds, elapsedMs);
202 if (!string.IsNullOrWhiteSpace(scenarioIteration.EventLogFileName) && File.Exists(scenarioIteration.EventLogFileName))
204 AddEtwData(iterationResult, scenarioIteration, output);
206 return iterationResult;
209 protected static void AddEtwData(
210 IterationResult iteration,
211 ScenarioExecutionResult scenarioExecutionResult,
212 ITestOutputHelper output)
214 string[] modulesOfInterest = new string[] {
215 "Anonymously Hosted DynamicMethods Assembly",
221 "Word2VecScenario.dll",
223 "System.Private.CoreLib.dll",
227 // Get the list of processes of interest.
230 var processes = new SimpleTraceEventParser().GetProfileData(scenarioExecutionResult);
232 // Extract the Pmc data for each one of the processes.
233 foreach (var process in processes)
235 if (process.Id != scenarioExecutionResult.ProcessExitInfo.ProcessId)
238 iteration.Measurements.Add(new Metric($"PMC/{process.Name}/Duration", "ms"),
239 process.LifeSpan.Duration.TotalMilliseconds);
241 // Add process metrics values.
242 foreach (var pmcData in process.PerformanceMonitorCounterData)
243 iteration.Measurements.Add(new Metric($"PMC/{process.Name}/{pmcData.Key.Name}", pmcData.Key.Unit), pmcData.Value);
245 foreach (var module in process.Modules)
247 var moduleName = Path.GetFileName(module.FullName);
248 if (modulesOfInterest.Any(m => m.Equals(moduleName, StringComparison.OrdinalIgnoreCase)))
250 foreach (var pmcData in module.PerformanceMonitorCounterData)
252 Metric m = new Metric($"PMC/{process.Name}!{moduleName}/{pmcData.Key.Name}", pmcData.Key.Unit);
253 // Sometimes the etw parser gives duplicate module entries which leads to duplicate keys
254 // but I haven't hunted down the reason. For now it is first one wins.
255 if (!iteration.Measurements.ContainsKey(m))
257 iteration.Measurements.Add(m, pmcData.Value);
265 catch (InvalidOperationException e)
267 output.WriteLine("Error while processing ETW log: " + scenarioExecutionResult.EventLogFileName);
268 output.WriteLine(e.ToString());
273 /// When serializing the result data to benchview this is called to determine if any of the metrics should be reported differently
274 /// than they were collected. We use this to collect several measurements in each iteration, then present those measurements
275 /// to benchview as if each was a distinct test model with its own set of iterations of a single measurement.
277 public virtual bool TryGetBenchviewCustomMetricReporting(Metric originalMetric, out Metric newMetric, out string newScenarioModelName)
279 if (originalMetric.Name.StartsWith("PMC/"))
281 int prefixLength = "PMC/".Length;
282 int secondSlash = originalMetric.Name.IndexOf('/', prefixLength);
283 newScenarioModelName = originalMetric.Name.Substring(prefixLength, secondSlash - prefixLength);
284 string newMetricName = originalMetric.Name.Substring(secondSlash+1);
285 newMetric = new Metric(newMetricName, originalMetric.Unit);
290 newMetric = default(Metric);
291 newScenarioModelName = null;