Fix JitBench (#19584)
[platform/upstream/coreclr.git] / tests / src / performance / Scenario / JitBench / Runner / Benchmark.cs
1 using System;
2 using System.Collections.Generic;
3 using System.Diagnostics;
4 using System.IO;
5 using System.Linq;
6 using System.Runtime.InteropServices;
7 using System.Text;
8 using System.Threading.Tasks;
9 using System.Xml;
10 using Microsoft.Xunit.Performance.Api;
11 using Microsoft.Xunit.Performance.Api.Profilers.Etw;
12
13 namespace JitBench
14 {
15     public abstract class Benchmark
16     {
17         public Benchmark(string name)
18         {
19             Name = name;
20             EnvironmentVariables = new Dictionary<string, string>();
21         }
22
23         public string Name { get; private set; }
24         public string ExePath { get; protected set; }
25         public string WorkingDirPath { get; protected set; }
26         public string CommandLineArguments { get; protected set; }
27         public Dictionary<string, string> EnvironmentVariables { get; private set; }
28
29         public BenchmarkRunResult[] Run(TestRun run, ITestOutputHelper output)
30         {
31             using (var runSectionOutput = new IndentedTestOutputHelper($"Run {Name} iterations", output))
32             {
33                 return MeasureIterations(run, runSectionOutput);
34             }   
35         }
36
37         public abstract Task Setup(DotNetInstallation dotnetInstall, string intermediateOutputDir, bool useExistingSetup, ITestOutputHelper output);
38
39         protected void RetargetProjects(
40             DotNetInstallation dotNetInstall,
41             string rootDir,
42             IEnumerable<string> projectFileRelativePaths)
43         {
44             if (string.IsNullOrWhiteSpace(rootDir))
45             {
46                 throw new ArgumentNullException(rootDir);
47             }
48             if (!Directory.Exists(rootDir))
49             {
50                 throw new DirectoryNotFoundException($"Root directory was not found: {rootDir}");
51             }
52
53             foreach (string projectFileRelativePath in projectFileRelativePaths)
54             {
55                 string projectFile = Path.Combine(rootDir, projectFileRelativePath);
56                 if (!File.Exists(projectFile))
57                 {
58                     throw new FileNotFoundException($"Project file was not found: {projectFile}");
59                 }
60
61                 var doc = new XmlDocument();
62                 Encoding docEncoding;
63                 using (var fs = new FileStream(projectFile, FileMode.Open, FileAccess.Read, FileShare.Read))
64                 using (var sr = new StreamReader(fs))
65                 {
66                     docEncoding = sr.CurrentEncoding;
67                     doc.Load(sr);
68                 }
69                 XmlElement root = doc.DocumentElement;
70
71                 // Comment out all existing TargetFramework and RuntimeFrameworkVersion elements
72                 foreach (XmlElement e in root.SelectNodes("PropertyGroup/TargetFramework").OfType<XmlElement>())
73                 {
74                     e.ParentNode.ReplaceChild(doc.CreateComment(e.OuterXml), e);
75                 }
76                 foreach (XmlElement e in root.SelectNodes("PropertyGroup/RuntimeFrameworkVersion").OfType<XmlElement>())
77                 {
78                     e.ParentNode.ReplaceChild(doc.CreateComment(e.OuterXml), e);
79                 }
80
81                 // Add TargetFramework and RuntimeFrameworkVersion elements with the requested values to the top
82                 {
83                     XmlElement propertyGroupElement = doc.CreateElement("PropertyGroup");
84                     root.PrependChild(propertyGroupElement);
85
86                     XmlElement targetFrameworkElement = doc.CreateElement("TargetFramework");
87                     XmlElement runtimeFrameworkVersionElement = doc.CreateElement("RuntimeFrameworkVersion");
88                     propertyGroupElement.AppendChild(targetFrameworkElement);
89                     propertyGroupElement.AppendChild(runtimeFrameworkVersionElement);
90
91                     targetFrameworkElement.InnerText =
92                         DotNetSetup.GetTargetFrameworkMonikerForFrameworkVersion(dotNetInstall.FrameworkVersion);
93                     runtimeFrameworkVersionElement.InnerText = dotNetInstall.FrameworkVersion;
94                 }
95
96                 using (var fs = new FileStream(projectFile, FileMode.Truncate, FileAccess.Write, FileShare.Read))
97                 using (var sw = new StreamWriter(fs, docEncoding))
98                 {
99                     doc.Save(sw);
100                 }
101             }
102         }
103
104         public virtual Metric[] GetDefaultDisplayMetrics()
105         {
106             return new Metric[] { Metric.ElapsedTimeMilliseconds };
107         }
108
109         /// <summary>
110         /// Does this benchmark run properly on a given architecture?
111         /// </summary>
112         public virtual bool IsArchitectureSupported(Architecture arch)
113         {
114             return (arch == Architecture.X86 || arch == Architecture.X64);
115         }
116
117         BenchmarkRunResult[] MeasureIterations(TestRun run, ITestOutputHelper output)
118         {
119             List<BenchmarkRunResult> results = new List<BenchmarkRunResult>();
120             foreach (BenchmarkConfiguration config in run.Configurations)
121             {
122                 results.Add(MeasureIterations(run, config, output));
123             }
124             return results.ToArray();
125         }
126
127         BenchmarkRunResult MeasureIterations(TestRun run, BenchmarkConfiguration config, ITestOutputHelper output)
128         {
129             // The XunitPerformanceHarness is hardcoded to log to the console. It would be nice if the output was configurable somehow
130             // but in lieue of that we can redirect all console output with light hackery.
131             using (var redirector = new ConsoleRedirector(output))
132             {
133                 // XunitPerformanceHarness expects to do the raw commandline parsing itself, but I really don't like that its default collection
134                 // metric requires the use of ETW. Getting an admin console or admin VS instance isn't where most people start, its
135                 // a small nuissance, and for these tests its often not needed/adds non-trivial overhead. I set the default to stopwatch if the
136                 // perf:collect argument hasn't been specified, but that sadly requires that I pre-parse, interpret, and then re-format all the 
137                 // args to make that change :(
138                 // 
139                 // In TestRun.ValidateMetricNames() I pre-check if ETW is going to be needed and give an error there rather than doing all the
140                 // test setup (~1 minute?) and then giving the error after the user has probably wandered away. That also relies on some of this
141                 // replicated command line parsing.
142                 string[] args = new string[] { "--perf:collect", string.Join("+", run.MetricNames), "--perf:outputdir", run.OutputDir, "--perf:runid", run.BenchviewRunId };
143                 using (var harness = new XunitPerformanceHarness(args))
144                 {
145                     ProcessStartInfo startInfo = new ProcessStartInfo(run.DotNetInstallation.DotNetExe, (ExePath + " " + CommandLineArguments).Trim());
146                     startInfo.WorkingDirectory = WorkingDirPath;
147                     startInfo.RedirectStandardError = true;
148                     startInfo.RedirectStandardOutput = true;
149                     IEnumerable<KeyValuePair<string, string>> extraEnvVars = config.EnvironmentVariables.Concat(EnvironmentVariables).Append(new KeyValuePair<string, string>("DOTNET_MULTILEVEL_LOOKUP", "0"));
150                     foreach (KeyValuePair<string, string> kv in extraEnvVars)
151                     {
152                         startInfo.Environment[kv.Key] = kv.Value;
153                     }
154                     output.WriteLine("XUnitPerfHarness doesn't log env vars it uses to run processes. To workaround, logging them here:");
155                     output.WriteLine($"Environment variables: {string.Join(", ", extraEnvVars.Select(kv => kv.Key + "=" + kv.Value))}");
156                     output.WriteLine($"Working directory: \"{startInfo.WorkingDirectory}\"");
157                     output.WriteLine($"Command line: \"{startInfo.FileName}\" {startInfo.Arguments}");
158
159                     BenchmarkRunResult result = new BenchmarkRunResult(this, config);
160                     StringBuilder stderr = new StringBuilder();
161                     StringBuilder stdout = new StringBuilder();
162                     var scenarioConfiguration = new ScenarioTestConfiguration(TimeSpan.FromMinutes(60), startInfo)
163                     {
164                         //XUnitPerformanceHarness writes files to disk starting with {runid}-{ScenarioBenchmarkName}-{TestName}
165                         TestName = (Name + "-" + config.Name).Replace(' ', '_'),
166                         Scenario = new ScenarioBenchmark("JitBench"),
167                         Iterations = run.Iterations,
168                         PreIterationDelegate = scenario =>
169                         {
170                             stderr.Clear();
171                             stdout.Clear();
172                             scenario.Process.ErrorDataReceived += (object sender, DataReceivedEventArgs errorLine) =>
173                             {
174                                 if(!string.IsNullOrEmpty(errorLine.Data))
175                                 {
176                                     stderr.AppendLine(errorLine.Data);
177                                     redirector.WriteLine("STDERROR: " + errorLine.Data);
178                                 }
179                             };
180                             scenario.Process.OutputDataReceived += (object sender, DataReceivedEventArgs outputLine) =>
181                             {
182                                 stdout.AppendLine(outputLine.Data);
183                                 redirector.WriteLine(outputLine.Data);
184                             };
185                         },
186                         PostIterationDelegate = scenarioResult =>
187                         {
188                             result.IterationResults.Add(RecordIterationMetrics(scenarioResult, stdout.ToString(), stderr.ToString(), redirector));
189                         }
190                     };
191                     harness.RunScenario(scenarioConfiguration, sb => { BenchviewResultExporter.ConvertRunResult(sb, result); });
192                     return result;
193                 }
194             }
195         }
196
197         protected virtual IterationResult RecordIterationMetrics(ScenarioExecutionResult scenarioIteration, string stdout, string stderr, ITestOutputHelper output)
198         {
199             IterationResult iterationResult = new IterationResult();
200             int elapsedMs = (int)(scenarioIteration.ProcessExitInfo.ExitTime - scenarioIteration.ProcessExitInfo.StartTime).TotalMilliseconds;
201             iterationResult.Measurements.Add(Metric.ElapsedTimeMilliseconds, elapsedMs);
202             if (!string.IsNullOrWhiteSpace(scenarioIteration.EventLogFileName) && File.Exists(scenarioIteration.EventLogFileName))
203             {
204                 AddEtwData(iterationResult, scenarioIteration, output);
205             }
206             return iterationResult;
207         }
208
209         protected static void AddEtwData(
210             IterationResult iteration,
211             ScenarioExecutionResult scenarioExecutionResult,
212             ITestOutputHelper output)
213         {
214             string[] modulesOfInterest = new string[] {
215                         "Anonymously Hosted DynamicMethods Assembly",
216                         "clrjit.dll",
217                         "coreclr.dll",
218                         "dotnet.exe",
219                         "MusicStore.dll",
220                         "AllReady.dll",
221                         "Word2VecScenario.dll",
222                         "ntoskrnl.exe",
223                         "System.Private.CoreLib.dll",
224                         "Unknown",
225                     };
226
227             // Get the list of processes of interest.
228             try
229             {
230                 var processes = new SimpleTraceEventParser().GetProfileData(scenarioExecutionResult);
231
232                 // Extract the Pmc data for each one of the processes.
233                 foreach (var process in processes)
234                 {
235                     if (process.Id != scenarioExecutionResult.ProcessExitInfo.ProcessId)
236                         continue;
237
238                     iteration.Measurements.Add(new Metric($"PMC/{process.Name}/Duration", "ms"),
239                         process.LifeSpan.Duration.TotalMilliseconds);
240
241                     // Add process metrics values.
242                     foreach (var pmcData in process.PerformanceMonitorCounterData)
243                         iteration.Measurements.Add(new Metric($"PMC/{process.Name}/{pmcData.Key.Name}", pmcData.Key.Unit), pmcData.Value);
244
245                     foreach (var module in process.Modules)
246                     {
247                         var moduleName = Path.GetFileName(module.FullName);
248                         if (modulesOfInterest.Any(m => m.Equals(moduleName, StringComparison.OrdinalIgnoreCase)))
249                         {
250                             foreach (var pmcData in module.PerformanceMonitorCounterData)
251                             {
252                                 Metric m = new Metric($"PMC/{process.Name}!{moduleName}/{pmcData.Key.Name}", pmcData.Key.Unit);
253                                 // Sometimes the etw parser gives duplicate module entries which leads to duplicate keys
254                                 // but I haven't hunted down the reason. For now it is first one wins.
255                                 if (!iteration.Measurements.ContainsKey(m))
256                                 {
257                                     iteration.Measurements.Add(m, pmcData.Value);
258                                 }
259                             }
260                                 
261                         }
262                     }
263                 }
264             }
265             catch (InvalidOperationException e)
266             {
267                 output.WriteLine("Error while processing ETW log: " + scenarioExecutionResult.EventLogFileName);
268                 output.WriteLine(e.ToString());
269             }
270         }
271
272         /// <summary>
273         /// When serializing the result data to benchview this is called to determine if any of the metrics should be reported differently
274         /// than they were collected. We use this to collect several measurements in each iteration, then present those measurements
275         /// to benchview as if each was a distinct test model with its own set of iterations of a single measurement.
276         /// </summary>
277         public virtual bool TryGetBenchviewCustomMetricReporting(Metric originalMetric, out Metric newMetric, out string newScenarioModelName)
278         {   
279             if (originalMetric.Name.StartsWith("PMC/"))
280             {
281                 int prefixLength = "PMC/".Length;
282                 int secondSlash = originalMetric.Name.IndexOf('/', prefixLength);
283                 newScenarioModelName = originalMetric.Name.Substring(prefixLength, secondSlash - prefixLength);
284                 string newMetricName = originalMetric.Name.Substring(secondSlash+1);
285                 newMetric = new Metric(newMetricName, originalMetric.Unit);
286                 return true;
287             }
288             else
289             {
290                 newMetric = default(Metric);
291                 newScenarioModelName = null;
292                 return false;
293             }
294         }
295     }
296 }