with:
repository: "microsoft/vscode-github-triage-actions"
path: ./actions
- ref: cd16cd2aad6ba2da74bb6c6f7293adddd579a90e # locker action commit sha
+ ref: 858022dfd0ed0511dd13456cb2fa1517253c6378 # locker action commit sha
- name: Install Actions
run: npm install --production --prefix ./actions
- name: Run Locker
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TestExtension", "src\tests\TestExtension\TestExtension.csproj", "{C6EB3C21-FDFF-4CF0-BE3A-3D1A3924408E}"
EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.FileFormats", "src\Microsoft.FileFormats\Microsoft.FileFormats.csproj", "{830A70D3-E604-467A-9846-6C5DF5BD3976}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.SymbolStore", "src\Microsoft.SymbolStore\Microsoft.SymbolStore.csproj", "{438A539E-6AF2-4402-BBA0-E2AAC71A28A1}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "dotnet-symbol", "src\Tools\dotnet-symbol\dotnet-symbol.csproj", "{B6C33C85-08A7-47D9-BEA8-36164BB3653B}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TestHelpers", "src\tests\TestHelpers\TestHelpers.csproj", "{C32F2858-6B5F-4967-ABC4-852B6399C4AE}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.FileFormats.UnitTests", "src\tests\Microsoft.FileFormats.UnitTests\Microsoft.FileFormats.UnitTests.csproj", "{44F93947-8FD4-4946-8AE5-EF6D25970CC7}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.SymbolStore.UnitTests", "src\tests\Microsoft.SymbolStore.UnitTests\Microsoft.SymbolStore.UnitTests.csproj", "{C2422836-BA25-4751-9060-7C7890085869}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.SymbolManifestGenerator", "src\Microsoft.SymbolManifestGenerator\Microsoft.SymbolManifestGenerator.csproj", "{28B55114-88C0-44B6-BBD8-50C14ED59EE2}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Checked|Any CPU = Checked|Any CPU
{C6EB3C21-FDFF-4CF0-BE3A-3D1A3924408E}.RelWithDebInfo|x64.Build.0 = Release|Any CPU
{C6EB3C21-FDFF-4CF0-BE3A-3D1A3924408E}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU
{C6EB3C21-FDFF-4CF0-BE3A-3D1A3924408E}.RelWithDebInfo|x86.Build.0 = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Checked|Any CPU.ActiveCfg = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Checked|Any CPU.Build.0 = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Checked|ARM.ActiveCfg = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Checked|ARM.Build.0 = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Checked|ARM64.ActiveCfg = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Checked|ARM64.Build.0 = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Checked|x64.ActiveCfg = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Checked|x64.Build.0 = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Checked|x86.ActiveCfg = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Checked|x86.Build.0 = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Debug|ARM.ActiveCfg = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Debug|ARM.Build.0 = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Debug|ARM64.ActiveCfg = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Debug|ARM64.Build.0 = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Debug|x64.Build.0 = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Debug|x86.Build.0 = Debug|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Release|Any CPU.Build.0 = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Release|ARM.ActiveCfg = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Release|ARM.Build.0 = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Release|ARM64.ActiveCfg = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Release|ARM64.Build.0 = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Release|x64.ActiveCfg = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Release|x64.Build.0 = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Release|x86.ActiveCfg = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.Release|x86.Build.0 = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.RelWithDebInfo|ARM.ActiveCfg = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.RelWithDebInfo|ARM.Build.0 = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.RelWithDebInfo|ARM64.ActiveCfg = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.RelWithDebInfo|ARM64.Build.0 = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.RelWithDebInfo|x64.Build.0 = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU
+ {830A70D3-E604-467A-9846-6C5DF5BD3976}.RelWithDebInfo|x86.Build.0 = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Checked|Any CPU.ActiveCfg = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Checked|Any CPU.Build.0 = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Checked|ARM.ActiveCfg = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Checked|ARM.Build.0 = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Checked|ARM64.ActiveCfg = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Checked|ARM64.Build.0 = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Checked|x64.ActiveCfg = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Checked|x64.Build.0 = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Checked|x86.ActiveCfg = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Checked|x86.Build.0 = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Debug|ARM.ActiveCfg = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Debug|ARM.Build.0 = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Debug|ARM64.ActiveCfg = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Debug|ARM64.Build.0 = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Debug|x64.Build.0 = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Debug|x86.Build.0 = Debug|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Release|Any CPU.Build.0 = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Release|ARM.ActiveCfg = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Release|ARM.Build.0 = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Release|ARM64.ActiveCfg = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Release|ARM64.Build.0 = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Release|x64.ActiveCfg = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Release|x64.Build.0 = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Release|x86.ActiveCfg = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.Release|x86.Build.0 = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.RelWithDebInfo|ARM.ActiveCfg = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.RelWithDebInfo|ARM.Build.0 = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.RelWithDebInfo|ARM64.ActiveCfg = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.RelWithDebInfo|ARM64.Build.0 = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.RelWithDebInfo|x64.Build.0 = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1}.RelWithDebInfo|x86.Build.0 = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Checked|Any CPU.ActiveCfg = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Checked|Any CPU.Build.0 = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Checked|ARM.ActiveCfg = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Checked|ARM.Build.0 = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Checked|ARM64.ActiveCfg = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Checked|ARM64.Build.0 = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Checked|x64.ActiveCfg = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Checked|x64.Build.0 = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Checked|x86.ActiveCfg = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Checked|x86.Build.0 = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Debug|ARM.ActiveCfg = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Debug|ARM.Build.0 = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Debug|ARM64.ActiveCfg = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Debug|ARM64.Build.0 = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Debug|x64.Build.0 = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Debug|x86.Build.0 = Debug|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Release|Any CPU.Build.0 = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Release|ARM.ActiveCfg = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Release|ARM.Build.0 = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Release|ARM64.ActiveCfg = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Release|ARM64.Build.0 = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Release|x64.ActiveCfg = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Release|x64.Build.0 = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Release|x86.ActiveCfg = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.Release|x86.Build.0 = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.RelWithDebInfo|ARM.ActiveCfg = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.RelWithDebInfo|ARM.Build.0 = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.RelWithDebInfo|ARM64.ActiveCfg = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.RelWithDebInfo|ARM64.Build.0 = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.RelWithDebInfo|x64.Build.0 = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B}.RelWithDebInfo|x86.Build.0 = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Checked|Any CPU.ActiveCfg = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Checked|Any CPU.Build.0 = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Checked|ARM.ActiveCfg = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Checked|ARM.Build.0 = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Checked|ARM64.ActiveCfg = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Checked|ARM64.Build.0 = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Checked|x64.ActiveCfg = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Checked|x64.Build.0 = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Checked|x86.ActiveCfg = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Checked|x86.Build.0 = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Debug|ARM.ActiveCfg = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Debug|ARM.Build.0 = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Debug|ARM64.ActiveCfg = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Debug|ARM64.Build.0 = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Debug|x64.Build.0 = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Debug|x86.Build.0 = Debug|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Release|Any CPU.Build.0 = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Release|ARM.ActiveCfg = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Release|ARM.Build.0 = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Release|ARM64.ActiveCfg = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Release|ARM64.Build.0 = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Release|x64.ActiveCfg = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Release|x64.Build.0 = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Release|x86.ActiveCfg = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.Release|x86.Build.0 = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.RelWithDebInfo|ARM.ActiveCfg = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.RelWithDebInfo|ARM.Build.0 = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.RelWithDebInfo|ARM64.ActiveCfg = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.RelWithDebInfo|ARM64.Build.0 = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.RelWithDebInfo|x64.Build.0 = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE}.RelWithDebInfo|x86.Build.0 = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Checked|Any CPU.ActiveCfg = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Checked|Any CPU.Build.0 = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Checked|ARM.ActiveCfg = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Checked|ARM.Build.0 = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Checked|ARM64.ActiveCfg = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Checked|ARM64.Build.0 = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Checked|x64.ActiveCfg = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Checked|x64.Build.0 = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Checked|x86.ActiveCfg = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Checked|x86.Build.0 = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Debug|ARM.ActiveCfg = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Debug|ARM.Build.0 = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Debug|ARM64.ActiveCfg = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Debug|ARM64.Build.0 = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Debug|x64.Build.0 = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Debug|x86.Build.0 = Debug|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Release|Any CPU.Build.0 = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Release|ARM.ActiveCfg = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Release|ARM.Build.0 = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Release|ARM64.ActiveCfg = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Release|ARM64.Build.0 = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Release|x64.ActiveCfg = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Release|x64.Build.0 = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Release|x86.ActiveCfg = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.Release|x86.Build.0 = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.RelWithDebInfo|ARM.ActiveCfg = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.RelWithDebInfo|ARM.Build.0 = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.RelWithDebInfo|ARM64.ActiveCfg = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.RelWithDebInfo|ARM64.Build.0 = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.RelWithDebInfo|x64.Build.0 = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7}.RelWithDebInfo|x86.Build.0 = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Checked|Any CPU.ActiveCfg = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Checked|Any CPU.Build.0 = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Checked|ARM.ActiveCfg = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Checked|ARM.Build.0 = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Checked|ARM64.ActiveCfg = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Checked|ARM64.Build.0 = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Checked|x64.ActiveCfg = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Checked|x64.Build.0 = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Checked|x86.ActiveCfg = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Checked|x86.Build.0 = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Debug|ARM.ActiveCfg = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Debug|ARM.Build.0 = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Debug|ARM64.ActiveCfg = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Debug|ARM64.Build.0 = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Debug|x64.Build.0 = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Debug|x86.Build.0 = Debug|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Release|Any CPU.Build.0 = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Release|ARM.ActiveCfg = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Release|ARM.Build.0 = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Release|ARM64.ActiveCfg = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Release|ARM64.Build.0 = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Release|x64.ActiveCfg = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Release|x64.Build.0 = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Release|x86.ActiveCfg = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.Release|x86.Build.0 = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.RelWithDebInfo|ARM.ActiveCfg = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.RelWithDebInfo|ARM.Build.0 = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.RelWithDebInfo|ARM64.ActiveCfg = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.RelWithDebInfo|ARM64.Build.0 = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.RelWithDebInfo|x64.Build.0 = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU
+ {C2422836-BA25-4751-9060-7C7890085869}.RelWithDebInfo|x86.Build.0 = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Checked|Any CPU.ActiveCfg = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Checked|Any CPU.Build.0 = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Checked|ARM.ActiveCfg = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Checked|ARM.Build.0 = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Checked|ARM64.ActiveCfg = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Checked|ARM64.Build.0 = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Checked|x64.ActiveCfg = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Checked|x64.Build.0 = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Checked|x86.ActiveCfg = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Checked|x86.Build.0 = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Debug|ARM.ActiveCfg = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Debug|ARM.Build.0 = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Debug|ARM64.ActiveCfg = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Debug|ARM64.Build.0 = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Debug|x64.Build.0 = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Debug|x86.Build.0 = Debug|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Release|Any CPU.Build.0 = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Release|ARM.ActiveCfg = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Release|ARM.Build.0 = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Release|ARM64.ActiveCfg = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Release|ARM64.Build.0 = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Release|x64.ActiveCfg = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Release|x64.Build.0 = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Release|x86.ActiveCfg = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.Release|x86.Build.0 = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.RelWithDebInfo|ARM.ActiveCfg = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.RelWithDebInfo|ARM.Build.0 = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.RelWithDebInfo|ARM64.ActiveCfg = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.RelWithDebInfo|ARM64.Build.0 = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.RelWithDebInfo|x64.Build.0 = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2}.RelWithDebInfo|x86.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
{E8F133F8-4D20-475D-9D16-2BA236DAB65F} = {03479E19-3F18-49A6-910A-F5041E27E7C0}
{1043FA82-37CC-4809-80DC-C1EB06A55133} = {19FAB78C-3351-4911-8F0C-8C6056401740}
{C6EB3C21-FDFF-4CF0-BE3A-3D1A3924408E} = {03479E19-3F18-49A6-910A-F5041E27E7C0}
+ {830A70D3-E604-467A-9846-6C5DF5BD3976} = {19FAB78C-3351-4911-8F0C-8C6056401740}
+ {438A539E-6AF2-4402-BBA0-E2AAC71A28A1} = {19FAB78C-3351-4911-8F0C-8C6056401740}
+ {B6C33C85-08A7-47D9-BEA8-36164BB3653B} = {B62728C8-1267-4043-B46F-5537BBAEC692}
+ {C32F2858-6B5F-4967-ABC4-852B6399C4AE} = {03479E19-3F18-49A6-910A-F5041E27E7C0}
+ {44F93947-8FD4-4946-8AE5-EF6D25970CC7} = {03479E19-3F18-49A6-910A-F5041E27E7C0}
+ {C2422836-BA25-4751-9060-7C7890085869} = {03479E19-3F18-49A6-910A-F5041E27E7C0}
+ {28B55114-88C0-44B6-BBD8-50C14ED59EE2} = {19FAB78C-3351-4911-8F0C-8C6056401740}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {46465737-C938-44FC-BE1A-4CE139EBB5E0}
extends:
template: /eng/pipelines/pipeline-resources.yml
parameters:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- isOfficialBuild: false
- ${{ else }}:
- isOfficialBuild: true
stages:
- stage: build
displayName: Build and Test Diagnostics
osGroup: Linux
osSuffix: -musl
nativeBuildContainer: linux_musl_x64
+ crossBuild: true
buildOnly: true
buildConfigs:
- configuration: Release
- template: /eng/pipelines/build.yml
parameters:
jobTemplate: ${{ variables.jobTemplate }}
- name: Alpine3_13
+ name: Ubuntu_22_04
+ osGroup: Linux
+ container: test_ubuntu_22_04
+ dependsOn: Linux
+ testOnly: true
+ buildConfigs:
+ - configuration: Release
+ architecture: x64
+ - ${{ if in(variables['Build.Reason'], 'PullRequest') }}:
+ - configuration: Debug
+ architecture: x64
+
+ - template: /eng/pipelines/build.yml
+ parameters:
+ jobTemplate: ${{ variables.jobTemplate }}
+ name: Alpine3_19
osGroup: Linux
osSuffix: -musl
container: test_linux_musl_x64
displayName: 'Publish Bundled Tools'
condition: succeeded()
- output: pipelineArtifact
- artifact: Logs_Packaging_Signing
+ artifact: Logs_Packaging_Signing_Attempt$(System.JobAttempt)
path: '$(Build.SourcesDirectory)/artifacts/log'
displayName: 'Publish Signing and Packaging Logs'
condition: always()
+ continueOnError: true
+ sbomEnabled: false # we don't need SBOM for logs
steps:
- task: DownloadPipelineArtifact@2
displayName: 'Download release builds'
StopTracing = 0x01, // stop a given session
CollectTracing = 0x02, // create/start a given session
CollectTracing2 = 0x03, // create/start a given session with/without rundown
+ CollectTracing3 = 0x04, // create/start a given session with/without collecting stacks
+ CollectTracing4 = 0x05, // create/start a given session with specific rundown keyword
}
```
See: [EventPipe Commands](#EventPipe-Commands)
StopTracing = 0x01, // stop a given session
CollectTracing = 0x02, // create/start a given session
CollectTracing2 = 0x03, // create/start a given session with/without rundown
+ CollectTracing3 = 0x04, // create/start a given session with/without collecting stacks
+ CollectTracing4 = 0x05, // create/start a given session with specific rundown keyword
}
```
EventPipe Payloads are encoded with the following rules:
Header: `{ Magic; 28; 0xFF00; 0x0000; }`
-`CollectTracing2` returns:
+`CollectTracing3` returns:
* `ulong sessionId`: the ID for the stream session starting on the current connection
##### Details:
```
Followed by an Optional Continuation of a `nettrace` format stream of events.
+### `CollectTracing4`
+
+Command Code: `0x0205`
+
+The `CollectTracing4` command is an extension of the `CollectTracing3` command - its behavior is the same as `CollectTracing3` command, except the requestRundown field is replaced by the rundownKeyword field to allow customizing the set of rundown events to be fired.
+
+A rundown keyword of `0x80020139` has the equivalent behavior as `CollectTracing3` with `requestRundown=true` and rundown keyword of `0` has the equivalent behavior as `requestRundown=false`.
+
+
+> Note available for .NET 9.0 and later.
+
+#### Inputs:
+
+Header: `{ Magic; Size; 0x0205; 0x0000 }`
+
+* `uint circularBufferMB`: The size of the circular buffer used for buffering event data while streaming
+* `uint format`: 0 for the legacy NetPerf format and 1 for the NetTrace format
+* `ulong rundownKeyword`: Indicates the keyword for the rundown provider
+* `array<provider_config> providers`: The providers to turn on for the streaming session
+
+A `provider_config` is composed of the following data:
+* `ulong keywords`: The keywords to turn on with this providers
+* `uint logLevel`: The level of information to turn on
+* `string provider_name`: The name of the provider
+* `string filter_data` (optional): Filter information
+
+> see ETW documentation for a more detailed explanation of Keywords, Filters, and Log Level.
+>
+#### Returns (as an IPC Message Payload):
+
+Header: `{ Magic; 28; 0xFF00; 0x0000; }`
+
+`CollectTracing4` returns:
+* `ulong sessionId`: the ID for the stream session starting on the current connection
+
+##### Details:
+
+Input:
+```
+Payload
+{
+ uint circularBufferMB,
+ uint format,
+ ulong rundownKeyword
+ array<provider_config> providers
+}
+
+provider_config
+{
+ ulong keywords,
+ uint logLevel,
+ string provider_name,
+ string filter_data (optional)
+}
+```
+
+Returns:
+```c
+Payload
+{
+ ulong sessionId
+}
+```
+Followed by an Optional Continuation of a `nettrace` format stream of events.
### `StopTracing`
--- /dev/null
+# MyGet symbol server extensions #
+
+This is a feature request for MyGet to create an implementation of the [Zip Package Based Symbol Server](Package_Based_Symbol_Server.md). As far as I can tell MyGet already exposes the [Simple Symbol Query Protocol](Simple_Symbol_Query_Protocol.md), albeit never specified with that name and marketed only as being SymSrv-compatible.
+
+## Endpoints ##
+
+Myget would expose the same per-feed symbol server endpoints it does today, for example:
+
+ https://www.myget.org/F/dev-feed/symbols
+ https://www.myget.org/F/dev-feed/auth/11111111-1111-1111-1111-11111111111/symbols
+
+If possible, it would also be nice to define an aggregate feed that can serve up any file from a set of feeds. We could then configure a larger organizational aggregate feed for customers to use. This probably needs some further discussion once Maarten is back from vacation.
+
+## Packages ##
+
+The service operates over all the packages that are active on that feed at a given time. Active packages are the packages that have been uploaded and not yet deleted either directly by the developer or implicitly by the configurable myget retention policies.
+
+## Other clientKey sources ##
+
+This feature doesn't preclude MyGet from continuing to satisfy requests using clientKeys there were automatically derived from package contents or any other source, but those keys should never take precedence to a key mapping provided explicitly by the client in symbol\_index.json.
+
+## Access Privileges ##
+
+The symbol service is expected to be equally accessible as reading the underlying feed by default. If the underlying feed requires authentication, so to should the symbol service end-point. We aren't requesting MyGet add any additional configurability, but its fine if they did.
+
+## Package Management ##
+
+We believe the existing mechanisms MyGet uses to upload and manage packages on a feed are sufficient to implicitly manage the content on the symbol service. No additional requests here.
+
+## Performance ##
+
+We don't yet know what loads to expect. Although the load will probably be far lower to start, here an initial guess at a load this service might need to scale to:
+
+- 100,000 packages per-feed
+- 100 million aggregate clientKeys per-feed
+- 1000 requests/sec (burst)
+- 1 GB/s (burst)
+- 1 million requests/day (sustained load)
+- 1 TB/day (sustained load)
+- Average response time: < 1 sec, 99% response time < 5 sec (measured from the time the request arrives at the myget server to the time file data begins streaming back)
+
+My hope is that this is still well within the scalability range of other aspects of the existing myget service and thus doesn't require any significant investment in new infrastructure or more complex service logic.
\ No newline at end of file
--- /dev/null
+# Zip Package Based Symbol Server #
+
+A zip package based symbol server is a network service that implements the [Simple Symbol Query Protocol](Simple_Symbol_Query_Protocol.md) (SSQP) using a set of zip compressed files with specific contents to define the files that are available for download and the clientKey/filenames that address them. This specification defines the format of the zip packages. Although this format is intended to be fully compatible with NuGet package and NuGet symbol package specifications, the zip packages are not required to qualify under either standard.
+
+## The zip package format ##
+
+Each symbol package is a compressed container of files in the zip format. At the root of the container there must be one file named 'symbol\_index.json'. There may be an arbitrary number of other files in the container either at the root level or in arbitrarily nested sub-containers. The symbol\_index.json is a json array where each element identifies a clientKey and a blobPath, the corresponding file in the zip that should be returned by an SSQP request for the clientKey. The blobPath is a filename, preceded by 0 or more container names using '/' as the separator:
+
+```json
+[
+ {
+ "clientKey" : "12387532",
+ "blobPath" : "debug_info.txt"
+ },
+ {
+ "clientKey" : "MyProgram.exe/09safnf82asddasdqwd998vds/MyProgram.exe",
+ "blobPath" : "MyProgram.exe"
+ },
+ {
+ "clientKey" : "12-09",
+ "blobPath" : "Content/localized/en-us/data.xml"
+ },
+ {
+ "clientKey" : "23456",
+ "blobPath" : "Content/localized/en-us/data.xml"
+ }
+]
+```
+
+## Expected service behavior ##
+
+In order to implement the [Simple Symbol Query Protocol](Simple_Symbol_Query_Protocol.md) the service must identify a map entry in some package's symbol\_index.json which has the matching clientKey and then return the file pointed to by blobPath. If there is more than one entry in the same package which has the same clientKey value that is a bad package and the service may handle the error in an implementation specific way. If more than one package defines an entry with the same clientKey the service may choose one of the entries arbitrarily using implementation specific behavior. If the clientKey isn't present in any package the service may return a 404, or it may fallback to other implementation specific techniques to satisfy the request. The service must be prepared to handle having N different clientKeys all refer to the same blob.
+
+
+## Combining with other sources of clientKeys ##
+
+It is possible to run an SSQP service that uses more than one data source to determine the total set of clientKey/filename requests it is able to respond to. For example most existing NuGet symbol service implementations compute their own mappings for files in specific portions of a NuGet symbol package if the files are one of a few well-known formats. This specification explicitly allows for these other data sources to be integrated but implementers should document what happens in the event two disparate sources of mapping information request different blobs to be returned for the same clientKey.
+
+## Usage notes ##
+
+SSQP and the package based symbol server are best suited for controlled settings in which all publishers agree on the same conventions for publishing content. For example a set of developers on a particular project or the employees of a small company.
+
+If there is disagreement (or outright malicious actors) these services do not intrinsically provide any way to determine who deserves to be more trusted. Publishers could easily submit packages with conflicting indexing information which will give undefined results to the SSQP clients. Running this service for a large group, such as worldwide unrestricted publishing access, is therefore not recommended without adding additional arbitration procedures.
\ No newline at end of file
--- /dev/null
+# Table of Contents #
+
+[Simple Symbol Server Protocol (SSQP)](Simple_Symbol_Query_Protocol.md) - Describes the protocol used by clients to download files from a symbol server
+
+[Package Based Symbol Server](Package_Based_Symbol_Server.md) - Describes a particular subset of SSQP servers that use specially formatted .zip or nuget packages to represent the files that will be made available.
+
+[Myget symbol server feature request](Myget_symbol_server_extensions.md) - A feature request to the myget team to implement a Package Based Symbol Server
+
+[SSQP Key Conventions](SSQP_Key_Conventions.md) - A description of the standard key formats intended for use with SSQP.
+
+[CLR Private SSQP Key Conventions](SSQP_CLR_Private_Key_Conventions.md) - A description of non-standard key formats produced solely by the CLR team
\ No newline at end of file
--- /dev/null
+# SSQP CLR Private Key conventions #
+
+These conventions are private extensions to the normal [SSQP conventions](SSQP_Key_Conventions.md). They fulfill niche scenarios specific to the CLR product and are not expected to be used within any general purpose index generating tool.
+
+## Basic rules ##
+
+The private conventions use the same basic rules for bytes, bytes sequences, integers, strings, etc as described in the standard conventions.
+
+## Key formats ##
+
+
+### PE-filesize-timestamp-coreclr
+
+This key indexes an sos\*.dll or mscordaccore\*.dll file that should be used to debug a given coreclr.dll. The lookup key is computed similar to PE-timestamp-filesize except the timestamp and filesize values are taken from coreclr.dll rather than the file being indexed.
+Example:
+
+**File names:** `mscordaccore.dll, sos.dll or SOS.NETCore.dll`
+
+**CoreCLR’s COFF header Timestamp field:** `0x542d5742`
+
+**CoreCLR’s COFF header SizeOfImage field:** `0x32000`
+
+**Lookup keys:**
+
+ mscordaccore.dll/542d574200032000/mscordaccore.dll
+ sos.dll/542d574200032000/sos.dll
+ SOS.NETCore.dll/542d574200032000/SOS.NETCore.dll
+
+
+### ELF-buildid-coreclr
+
+This applies to any file named libmscordaccore.so or libsos.so that should be used to debug a given libcoreclr.so. The key is computed similarly to ELF-buildid except the note bytes is retrieved from the libcoreclr.so file and prefixed with 'elf-buildid-coreclr-':
+
+`<file_name>/elf-buildid-coreclr-<note_byte_sequence>/<file_name>`
+
+Example:
+
+**File names:** `libmscordaccore.so, libsos.so or SOS.NETCore.dll`
+
+**libcoreclr.so’s build note bytes:** `0x18, 0x0a, 0x37, 0x3d, 0x6a, 0xfb, 0xab, 0xf0, 0xeb, 0x1f, 0x09, 0xbe, 0x1b, 0xc4, 0x5b, 0xd7, 0x96, 0xa7, 0x10, 0x85`
+
+**Lookup keys:**
+
+ libmscordaccore.so/elf-buildid-coreclr-180a373d6afbabf0eb1f09be1bc45bd796a71085/libmscordaccore.so
+ libsos.so/elf-buildid-coreclr-180a373d6afbabf0eb1f09be1bc45bd796a71085/libsos.so
+ sos-netcore.dll/elf-buildid-coreclr-180a373d6afbabf0eb1f09be1bc45bd796a71085/sos-netcore.dll
+
+
+### Mach-uuid-coreclr
+
+This applies to any file named libmscordaccore.dylib or libsos.dylib that should be used to debug a given libcoreclr.dylib. The key is computed similarly to Mach-uuid except the uuid is retrieved from the libcoreclr.dylib file and prefixed with 'mach-uuid-coreclr-':
+
+`<file_name>/mach-uuid-coreclr-<uuid_bytes>/<file_name>`
+
+Example:
+
+**File names:** `libmscordaccore.dylb, libsos.dylib or SOS.NETCore.dll`
+
+**libcoreclr.dylib’s uuid bytes:** `0x49, 0x7B, 0x72, 0xF6, 0x39, 0x0A, 0x44, 0xFC, 0x87, 0x8E, 0x5A, 0x2D, 0x63, 0xB6, 0xCC, 0x4B`
+
+**Lookup keys:**
+
+ libmscordaccore.dylib/mach-uuid-coreclr-497b72f6390a44fc878e5a2d63b6cc4b/libmscordaccore.dylib
+ libsos.dylib/mach-uuid-coreclr-497b72f6390a44fc878e5a2d63b6cc4b/libsos.dylib
+ sos.netcore.dll/mach-uuid-coreclr-497b72f6390a44fc878e5a2d63b6cc4b/sos.netcore.dll
+
--- /dev/null
+# SSQP Key conventions #
+
+When using [SSQP](Simple_Symbol_Query_Protocol.md) it is critical that the content publishers and content consumers agree what keys should correspond to which files. Although any publisher-consumer pair is free to create private agreements, using a standard key format offers the widest compatibility.
+
+
+## Key formatting basic rules
+Unless otherwise specified:
+
+- Bytes: Convert to characters by splitting the byte into the most significant 4 bits and 4 least significant bits, each of which has value 0-15. Convert each of those chunks to the corresponding lower case hexadecimal character. Last concatenate the two characters putting the most significant bit chunk first. For example 0 => '00', 1 => '01', 45 => '2d', 185 => 'b9'
+- Byte sequences: Convert to characters by converting each byte as above and then concatenating the characters. For example 2,45,4 => '022d04'
+- Multi-byte integers: Convert to characters by first converting it to a big-endian byte sequence next convert the sequence as above and finally trim all leading '0' characters. Example 3,559,453,162 => 'd428f1ea', 114 => '72'
+- strings: Convert all the characters to lower-case
+- guid: The guid consists of a 4 byte integer, two 2 byte integers, and a sequence of 8 bytes. It is formatted by converting each portion to hex characters without trimming leading '0' characters on the integers, then concatenate the results. Example: { 0x097B72F6, 0x390A, 0x04FC, { 0x87, 0x8E, 0x5A, 0x2D, 0x63, 0xB6, 0xCC, 0x4B } } => '097b72f6390a04fc878e5a2d63b6cc4b'
+
+## Key formats
+
+
+### PE-timestamp-filesize
+This key references Windows Portable Executable format files which commonly have .dll or .exe suffixes. The key is computed by extracting the Timestamp (4 byte integer) and SizeOfImage (4 byte integer) fields from the COFF header in PE image. The key is formatted:
+
+`<filename>/<Timestamp><SizeOfImage>/<filename>`
+
+Note that the timeStamp is always printed as eight digits (with leading zeroes as needed) using upper-case for ‘A’ to ‘F’ (important if your symbol server is case sensitive), whereas the image size is printed using as few digits as needed, in lower-case.
+
+Example:
+
+**File name:** `Foo.exe`
+
+**COFF header Timestamp field:** `0x542d574e`
+
+**COFF header SizeOfImage field:** `0xc2000`
+
+**Lookup key:** `foo.exe/542D574Ec2000/foo.exe`
+
+
+### PDB-Signature-Age
+
+This applies to Microsoft C++ Symbol Format, commonly called PDB and using files with the .pdb file extension. The key is computed by extracting the Signature (guid) and Age (4 byte integer) values from the guid stream within MSF container. The final key is formatted:
+
+`<filename>/<Signature><Age>/<filename>`
+
+Example:
+
+**File name:** `Foo.pdb`
+
+**Signature field:** `{ 0x497B72F6, 0x390A, 0x44FC, { 0x87, 0x8E, 0x5A, 0x2D, 0x63, 0xB6, 0xCC, 0x4B } }`
+
+**Age field:** `0x1`
+
+**Lookup key**: `foo.pdb/497b72f6390a44fc878e5a2d63b6cc4b1/foo.pdb`
+
+
+### Portable-Pdb-Signature
+
+This applies to Microsoft .Net portable PDB format files, commonly using the suffix .pdb. The Portable PDB format uses the same key format as Windows PDBs, except that 0xFFFFFFFF (UInt32.MaxValue) is used for the age. In other words, the key is computed by extracting the Signature (guid) from debug metadata header and combining it with 'FFFFFFFF'. The final key is formatted:
+
+`<filename>/<guid>FFFFFFFF/<filename>`
+
+Example:
+
+**File name:** `Foo.pdb`
+
+**Signature field:** `{ 0x497B72F6, 0x390A, 0x44FC { 0x87, 0x8E, 0x5A, 0x2D, 0x63, 0xB6, 0xCC, 0x4B } }`
+
+**Lookup key:** `foo.pdb/497b72f6390a44fc878e5a2d63b6cc4bFFFFFFFF/foo.pdb`
+
+
+### ELF-buildid
+
+The ELF format files indexed with the ELF-buildid suffix are expected to be the exact image that is loaded in a process or core dump, doesn’t require the module to be stripped of its symbols (although it usually is), and commonly uses the .so suffix or no suffix. The key is computed by reading the 20 byte sequence of the ELF Note section that is named “GNU” and that has note type GNU Build Id (3). If byte sequence is smaller than 20 bytes, bytes of value 0x00 should be added until the byte sequence is 20 bytes long. The final key is formatted:
+
+`<file_name>/elf-buildid-<note_byte_sequence>/<file_name>`
+
+Example:
+
+**File name:** `foo.so`
+
+**Build note bytes:** `0x18, 0x0a, 0x37, 0x3d, 0x6a, 0xfb, 0xab, 0xf0, 0xeb, 0x1f, 0x09, 0xbe, 0x1b, 0xc4, 0x5b, 0xd7, 0x96, 0xa7, 0x10, 0x85`
+
+**Lookup key:** `foo.so/elf-buildid-180a373d6afbabf0eb1f09be1bc45bd796a71085/foo.so`
+
+
+### ELF-buildid-sym
+
+The ELF format files indexed with the ELF-buildid-sym suffix are the result of the stripping process and contain only the symbols from the ELF-buildid indexed module. They commonly end in ‘.debug’, ‘.so.dbg’ or ‘.dbg’. The key is computed by reading the 20 byte sequence of the ELF Note section that is named “GNU” and that has note type GNU Build Id (3). If byte sequence is smaller than 20 bytes, bytes of value 0x00 should be added until the byte sequence is 20 bytes long. The file name is not used in the index because there are cases where all we have is the build id. The final key is formatted:
+
+`_.debug/elf-buildid-sym-<note_byte_sequence>/_.debug`
+
+Example:
+
+**File name:** `foo.so.dbg`
+
+**Build note bytes:** `0x18, 0x0a, 0x37, 0x3d, 0x6a, 0xfb, 0xab, 0xf0, 0xeb, 0x1f, 0x09, 0xbe, 0x1b, 0xc4, 0x5b, 0xd7, 0x96, 0xa7, 0x10, 0x85`
+
+**Lookup key:** `_.debug/elf-buildid-sym-180a373d6afbabf0eb1f09be1bc45bd796a71085/_.debug`
+
+Example:
+
+**File name:** `bar.so.dbg`
+
+**Build note bytes:** `0x18, 0x0a, 0x37, 0x3d, 0x6a, 0xfb, 0xab, 0xf0, 0xeb, 0x1f, 0x09, 0xbe, 0x1b, 0xc4, 0x5b, 0xd7`
+
+**Lookup key:** `_.debug/elf-buildid-sym-180a373d6afbabf0eb1f09be1bc45bd700000000/_.debug`
+
+
+### Mach-uuid
+This applies to any MachO format files that have been stripped of debugging information, commonly ending in 'dylib'. The key is computed by reading the uuid byte sequence of the MachO LC_UUID load command. The final key is formatted:
+
+`<file_name>/mach-uuid-<uuid_bytes>/<file_name>`
+
+Example:
+
+**File name:** `foo.dylib`
+
+**Uuid bytes:** `0x49, 0x7B, 0x72, 0xF6, 0x39, 0x0A, 0x44, 0xFC, 0x87, 0x8E, 0x5A, 0x2D, 0x63, 0xB6, 0xCC, 0x4B`
+
+**Lookup key:** `foo.dylib/mach-uuid-497b72f6390a44fc878e5a2d63b6cc4b/foo.dylib`
+
+
+### Mach-uuid-sym
+
+This applies to any MachO format files that have not been stripped of debugging information, commonly ending in '.dylib.dwarf'. The key is computed by reading the uuid byte sequence of the MachO LC_UUID load command. The final key is formatted:
+
+`_.dwarf/mach-uuid-sym-<uuid_bytes>/_.dwarf`
+
+Example:
+
+**File name:** `foo.dylib.dwarf`
+
+**Uuid bytes:** `0x49, 0x7B, 0x72, 0xF6, 0x39, 0x0A, 0x44, 0xFC, 0x87, 0x8E, 0x5A, 0x2D, 0x63, 0xB6, 0xCC, 0x4B`
+
+**Lookup key:** `_.dwarf/mach-uuid-sym-497b72f6390a44fc878e5a2d63b6cc4b/_.dwarf`
+
+
+### SHA1
+
+This applies to any file, but is commonly used on sources. The key is computed by calculating a SHA1 hash, then formatting the 20 byte hash sequence prepended with “sha1-“
+
+Example:
+
+**File name:** `Foo.cs`
+
+**Sha1 hash bytes:** `0x49, 0x7B, 0x72, 0xF6, 0x39, 0x0A, 0x44, 0xFC, 0x87, 0x8E, 0x5A, 0x2D, 0x63, 0xB6, 0xCC, 0x4B, 0x0C, 0x2D, 0x99, 0x84`
+
+**Lookup key:** `foo.cs/sha1-497b72f6390a44fc878e5a2d63b6cc4b0c2d9984/foo.cs`
+
+### R2R PerfMap v1
+
+This applies to v1 PerfMap files produced by CrossGen2 , commonly having extensions `.ni.r2rmap`. The key is formed by formatting the signature, the file name, and the version in the following manner:
+
+Example:
+
+**File name:** `System.Private.CoreLib.ni.r2rmap`
+
+**Signature at pseudo-rva 0xFFFFFFFF:** `f5fddf60efb0bee79ef02a19c3decba9`
+
+**Version at pseudo-rva 0xFFFFFFFE:** `1`
+
+**Lookup key:** `system.private.corelib.ni.r2rmap/r2rmap-v1-f5fddf60efb0bee79ef02a19c3decba9/system.private.corelib.ni.r2rmap`
--- /dev/null
+# Simple Symbol Query Protocol (SSQP)#
+
+Frequently when diagnosing computer programs there is a need to retrieve additional information about the program beyond what was required for a computer to run it. This is accomplished by having a network service, the 'symbol server', which provides the additional information on demand. A diagnostic tool such as a debugger or profiler act as symbol server clients, submitting requests for additional information they need.
+
+The protocol solely consists of a mechanism for the client to provide a key, the 'clientKey', identifying the file it wants. The server then sends back that file. The publisher that created the content and client that downloads it may understand the semantics of the key and the resulting file, but from the perspective of the protocol and the server these items are merely character sequences and binary blobs.
+
+## Request ##
+All requests are [HTTP/1.1](https://tools.ietf.org/html/rfc2616 "HTTP/1.1") messages using the 'get' verb. The client is configured with a valid URI for the service endpoint, and a clientKey it wants to retrieve. The client should request URI <service\_endpoint\>/<encodedClientKey\>.
+
+ For example:
+
+**service endpoint URI:** http://www.contuso.com/symbol/server
+
+**clientKey:** debug\_info.txt/12345abcdefg/debug\_info.txt
+
+ GET http://www.contuso.com/symbol/server/debug\_info.txt/12345abcdefg/debug\_info.txt
+
+### Encoding and case sensitivity ###
+The clientKey needs to be URL encoded as it may contain characters outside the [URI unreserved character set](https://tools.ietf.org/html/rfc3986#Section-2.3). Any '/' character should not be escaped, but all other characters are URL encoded as normal. clientKey is not case-sensitive. Clients are encouraged to normalize the encoding to lower-case in the request URI, but the server should perform any clientKey comparisons in a case-insensitive manner regardless.
+
+## Response ##
+
+The response is a standard [HTTP/1.1](https://tools.ietf.org/html/rfc2616 "HTTP/1.1") response including the content of the requested file. Content-Type should be application/octet-stream.
+
+An http 304 redirection will be honored by the client.
+
+## Relation to the SymSrv protocol ##
+
+This protocol has been deliberately designed so that when used with the [recommended key conventions](SSQP_Key_Conventions.md) for windows file types it is also compatible with windows SymSrv clients.
+
+## Security Warnings ##
+### ClientKeys ###
+Be careful about assuming that client keys can be interpreted as relative file system paths or other meaningful storage paths. Although all the key conventions may appear safe to use this way an attacker might create keys which have special meaning to the filesystem in an attempt to read or write to sensitive areas on the client or server. If you do use the key this way, carefully validate it first.
+
+### Trusting downloaded files ###
+Clients consuming the diagnostic files downloaded via SSQP often expect that there is a certain relationship between the key and the data that is downloaded. For example a client may expect requesting key 'sha-1-123878123871238712378519847' guarantees that the data which comes back has the given hash. The server is not required to do any verification of this however. A client that wants to be certain must either:
+
+ - verify that invariants hold after the data has been downloaded
+ - trust the author of the mapping data, every intermediary that handled the data at rest or in transit, and know that none of the trusted parties merge in any clientKey mappings from untrusted parties
+
+There are internet hosted symbol server implementations that DO merge mappings from untrusted and potentially anonymous 3rd parties. In this case it is trivial for an attacker to pose as such a 3rd party and add mappings that remap legitimate clientKeys to arbitrary attacker chosen files. Clients need to assume that ANY data returned by such a service could have been tampered with, regardless of whether the particular key being requested was originally derived from a trusted source.
+
+For tools that allow the end-user to specify the SSQP endpoint to connect to, consider the risk that the end-user may change configuration from a service that hosts only trusted content to a service that hosts untrusted content without realizing the security implications of this choice. Some guides on the web from reputable and independent sources explicitly tell developers to make such configuration changes without any mention of the risks.
+
+Clients are strongly recommended to only work with trusted services, harden against using malicious downloaded files, or implement a scheme to independently verify the integrity and authenticity of downloaded files before using them.
\ No newline at end of file
<Dependencies>
<ProductDependencies>
- <Dependency Name="Microsoft.SymbolStore" Version="1.0.517501">
- <Uri>https://github.com/dotnet/symstore</Uri>
- <Sha>550601c12a227c87ded32316345934101a8a2422</Sha>
- </Dependency>
- <Dependency Name="Microsoft.Diagnostics.Runtime" Version="3.1.520301">
+ <Dependency Name="Microsoft.Diagnostics.Runtime" Version="3.1.525101">
<Uri>https://github.com/microsoft/clrmd</Uri>
- <Sha>b63a9ce8a0fe4bc3dea1b941b611bd1ac2720020</Sha>
+ <Sha>d5923fbec06fc227cde5e5a2d7ea58c02802e971</Sha>
</Dependency>
- <Dependency Name="Microsoft.Diagnostics.Runtime.Utilities" Version="3.1.520301">
+ <Dependency Name="Microsoft.Diagnostics.Runtime.Utilities" Version="3.1.525101">
<Uri>https://github.com/microsoft/clrmd</Uri>
- <Sha>b63a9ce8a0fe4bc3dea1b941b611bd1ac2720020</Sha>
+ <Sha>d5923fbec06fc227cde5e5a2d7ea58c02802e971</Sha>
</Dependency>
</ProductDependencies>
<ToolsetDependencies>
- <Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="9.0.0-beta.24207.1">
+ <Dependency Name="Microsoft.DotNet.Arcade.Sdk" Version="9.0.0-beta.24281.1">
<Uri>https://github.com/dotnet/arcade</Uri>
- <Sha>9e3b333509ec2990dd20fbf01f50fd42d7f23d56</Sha>
+ <Sha>e6b3f32f9855dccbe2447471c8f729b66f17d242</Sha>
</Dependency>
<!-- Intermediate is necessary for source build. -->
- <Dependency Name="Microsoft.SourceBuild.Intermediate.arcade" Version="9.0.0-beta.24207.1">
+ <Dependency Name="Microsoft.SourceBuild.Intermediate.arcade" Version="9.0.0-beta.24281.1">
<Uri>https://github.com/dotnet/arcade</Uri>
- <Sha>9e3b333509ec2990dd20fbf01f50fd42d7f23d56</Sha>
+ <Sha>e6b3f32f9855dccbe2447471c8f729b66f17d242</Sha>
<SourceBuild RepoName="arcade" ManagedOnly="true" />
</Dependency>
- <Dependency Name="Microsoft.DotNet.CodeAnalysis" Version="9.0.0-beta.24207.1">
+ <Dependency Name="Microsoft.DotNet.CodeAnalysis" Version="9.0.0-beta.24281.1">
<Uri>https://github.com/dotnet/arcade</Uri>
- <Sha>9e3b333509ec2990dd20fbf01f50fd42d7f23d56</Sha>
+ <Sha>e6b3f32f9855dccbe2447471c8f729b66f17d242</Sha>
</Dependency>
<Dependency Name="Microsoft.DotNet.RemoteExecutor" Version="7.0.0-beta.22316.2" Pinned="true">
<Uri>https://github.com/dotnet/arcade</Uri>
<Sha>ccfe6da198c5f05534863bbb1bff66e830e0c6ab</Sha>
</Dependency>
- <Dependency Name="Microsoft.Dotnet.Sdk.Internal" Version="9.0.100-preview.3.24161.2">
+ <Dependency Name="Microsoft.Dotnet.Sdk.Internal" Version="9.0.100-preview.5.24253.16">
<Uri>https://github.com/dotnet/installer</Uri>
- <Sha>e911f5c82cc02aea96e227596e16c830d54cf03a</Sha>
+ <Sha>fa261b952d702c6bd604728fcbdb58ac071a22b1</Sha>
</Dependency>
- <Dependency Name="Microsoft.AspNetCore.App.Ref.Internal" Version="9.0.0-preview.4.24208.6">
+ <Dependency Name="Microsoft.AspNetCore.App.Ref.Internal" Version="9.0.0-preview.6.24306.5">
<Uri>https://github.com/dotnet/aspnetcore</Uri>
- <Sha>79ef5e329b1e31c3775a1977798253cc8f7da6cc</Sha>
+ <Sha>10517269f40d53eb22cce6b4d520ed27ed1e1b9f</Sha>
</Dependency>
- <Dependency Name="Microsoft.AspNetCore.App.Ref" Version="9.0.0-preview.4.24208.6">
+ <Dependency Name="Microsoft.AspNetCore.App.Ref" Version="9.0.0-preview.6.24306.5">
<Uri>https://github.com/dotnet/aspnetcore</Uri>
- <Sha>79ef5e329b1e31c3775a1977798253cc8f7da6cc</Sha>
+ <Sha>10517269f40d53eb22cce6b4d520ed27ed1e1b9f</Sha>
</Dependency>
- <Dependency Name="Microsoft.NETCore.App.Runtime.win-x64" Version="9.0.0-preview.4.24208.10">
+ <Dependency Name="Microsoft.NETCore.App.Runtime.win-x64" Version="9.0.0-preview.6.24306.8">
<Uri>https://github.com/dotnet/runtime</Uri>
- <Sha>404b286b23093cd93a985791934756f64a33483e</Sha>
+ <Sha>f6a7ebbb81540401e6b5520afa3ba87c2bd6bcfe</Sha>
</Dependency>
- <Dependency Name="VS.Redist.Common.NetCore.SharedFramework.x64.9.0" Version="9.0.0-preview.4.24208.10">
+ <Dependency Name="VS.Redist.Common.NetCore.SharedFramework.x64.9.0" Version="9.0.0-preview.6.24306.8">
<Uri>https://github.com/dotnet/runtime</Uri>
- <Sha>404b286b23093cd93a985791934756f64a33483e</Sha>
+ <Sha>f6a7ebbb81540401e6b5520afa3ba87c2bd6bcfe</Sha>
</Dependency>
<!-- Intermediate is necessary for source build. -->
- <Dependency Name="Microsoft.SourceBuild.Intermediate.source-build-reference-packages" Version="9.0.0-alpha.1.24162.2">
+ <Dependency Name="Microsoft.SourceBuild.Intermediate.source-build-reference-packages" Version="9.0.0-alpha.1.24304.1">
<Uri>https://github.com/dotnet/source-build-reference-packages</Uri>
- <Sha>c0b5d69a1a1513528c77fffff708c7502d57c35c</Sha>
+ <Sha>9ae78a4e6412926d19ba97cfed159bf9de70b538</Sha>
<SourceBuild RepoName="source-build-reference-packages" ManagedOnly="true" />
</Dependency>
- <Dependency Name="Microsoft.CodeAnalysis" Version="4.6.0-1.23073.4">
+ <Dependency Name="Microsoft.CodeAnalysis" Version="4.11.0-2.24271.11">
<Uri>https://github.com/dotnet/roslyn</Uri>
- <Sha>6acaa7b7c0efea8ea292ca26888c0346fbf8b0c1</Sha>
+ <Sha>40e6b96cad11400acb5b8999057ac8ba748df940</Sha>
</Dependency>
- <Dependency Name="Microsoft.CodeAnalysis.CSharp" Version="4.6.0-1.23073.4">
+ <Dependency Name="Microsoft.CodeAnalysis.CSharp" Version="4.11.0-2.24271.11">
<Uri>https://github.com/dotnet/roslyn</Uri>
- <Sha>6acaa7b7c0efea8ea292ca26888c0346fbf8b0c1</Sha>
+ <Sha>40e6b96cad11400acb5b8999057ac8ba748df940</Sha>
</Dependency>
- <Dependency Name="Microsoft.CodeAnalysis.Analyzers" Version="3.11.0-beta1.23420.2">
+ <Dependency Name="Microsoft.CodeAnalysis.Analyzers" Version="3.11.0-beta1.24225.1">
<Uri>https://github.com/dotnet/roslyn-analyzers</Uri>
- <Sha>76d99c5f3e11f0600fae074270c0d89042c360f0</Sha>
+ <Sha>8dccccec1ce3bd2fb532ec77d7e092ab9d684db7</Sha>
</Dependency>
- <Dependency Name="Microsoft.CodeAnalysis.NetAnalyzers" Version="8.0.0-preview.23420.2">
+ <Dependency Name="Microsoft.CodeAnalysis.NetAnalyzers" Version="9.0.0-preview.24225.1">
<Uri>https://github.com/dotnet/roslyn-analyzers</Uri>
- <Sha>76d99c5f3e11f0600fae074270c0d89042c360f0</Sha>
+ <Sha>8dccccec1ce3bd2fb532ec77d7e092ab9d684db7</Sha>
</Dependency>
</ToolsetDependencies>
</Dependencies>
<SkipPackagePublishingVersionChecks>true</SkipPackagePublishingVersionChecks>
</PropertyGroup>
<PropertyGroup>
- <!-- Latest symstore version updated by darc -->
- <MicrosoftSymbolStoreVersion>1.0.517501</MicrosoftSymbolStoreVersion>
<!-- Latest shared runtime version updated by darc -->
- <VSRedistCommonNetCoreSharedFrameworkx6490Version>9.0.0-preview.4.24208.10</VSRedistCommonNetCoreSharedFrameworkx6490Version>
- <MicrosoftNETCoreAppRuntimewinx64Version>9.0.0-preview.4.24208.10</MicrosoftNETCoreAppRuntimewinx64Version>
+ <VSRedistCommonNetCoreSharedFrameworkx6490Version>9.0.0-preview.6.24306.8</VSRedistCommonNetCoreSharedFrameworkx6490Version>
+ <MicrosoftNETCoreAppRuntimewinx64Version>9.0.0-preview.6.24306.8</MicrosoftNETCoreAppRuntimewinx64Version>
<!-- Latest shared aspnetcore version updated by darc -->
- <MicrosoftAspNetCoreAppRefInternalVersion>9.0.0-preview.4.24208.6</MicrosoftAspNetCoreAppRefInternalVersion>
- <MicrosoftAspNetCoreAppRefVersion>9.0.0-preview.4.24208.6</MicrosoftAspNetCoreAppRefVersion>
+ <MicrosoftAspNetCoreAppRefInternalVersion>9.0.0-preview.6.24306.5</MicrosoftAspNetCoreAppRefInternalVersion>
+ <MicrosoftAspNetCoreAppRefVersion>9.0.0-preview.6.24306.5</MicrosoftAspNetCoreAppRefVersion>
<!-- dotnet/installer: Testing version of the SDK. Needed for the signed & entitled host. -->
- <MicrosoftDotnetSdkInternalVersion>9.0.100-preview.3.24161.2</MicrosoftDotnetSdkInternalVersion>
- </PropertyGroup>
- <PropertyGroup>
- <!-- Runtime versions to test -->
- <MicrosoftNETCoreApp60Version>6.0.24</MicrosoftNETCoreApp60Version>
- <MicrosoftAspNetCoreApp60Version>$(MicrosoftNETCoreApp60Version)</MicrosoftAspNetCoreApp60Version>
- <MicrosoftNETCoreApp70Version>7.0.16</MicrosoftNETCoreApp70Version>
- <MicrosoftAspNetCoreApp70Version>$(MicrosoftNETCoreApp70Version)</MicrosoftAspNetCoreApp70Version>
- <!-- The SDK runtime version used to build single-file apps (currently hardcoded) -->
- <SingleFileRuntime60Version>$(MicrosoftNETCoreApp60Version)</SingleFileRuntime60Version>
- <SingleFileRuntime70Version>7.0.16</SingleFileRuntime70Version>
- <SingleFileRuntimeLatestVersion>8.0.2</SingleFileRuntimeLatestVersion>
+ <MicrosoftDotnetSdkInternalVersion>9.0.100-preview.5.24253.16</MicrosoftDotnetSdkInternalVersion>
</PropertyGroup>
<PropertyGroup>
<!-- Opt-in/out repo features -->
<UsingToolXliff>false</UsingToolXliff>
+ <!-- Uncomment this line to use the custom version of roslyn as needed. -->
+ <!-- <UsingToolMicrosoftNetCompilers Condition="'$(DotNetBuildSourceOnly)' != 'true'">true</UsingToolMicrosoftNetCompilers> -->
<!-- CoreFX -->
<SystemReflectionMetadataVersion>5.0.0</SystemReflectionMetadataVersion>
<SystemCollectionsImmutableVersion>6.0.0</SystemCollectionsImmutableVersion>
<!-- Other libs -->
<MicrosoftBclAsyncInterfacesVersion>6.0.0</MicrosoftBclAsyncInterfacesVersion>
- <MicrosoftDiagnosticsRuntimeVersion>3.1.520301</MicrosoftDiagnosticsRuntimeVersion>
- <MicrosoftDiaSymReaderNativeVersion>16.11.27-beta1.23180.1</MicrosoftDiaSymReaderNativeVersion>
+ <MicrosoftDiagnosticsRuntimeVersion>3.1.525101</MicrosoftDiagnosticsRuntimeVersion>
+ <MicrosoftDiaSymReaderNativeVersion>17.10.0-beta1.24272.1</MicrosoftDiaSymReaderNativeVersion>
<MicrosoftDiagnosticsTracingTraceEventVersion>3.0.7</MicrosoftDiagnosticsTracingTraceEventVersion>
<MicrosoftExtensionsLoggingVersion>6.0.0</MicrosoftExtensionsLoggingVersion>
<MicrosoftExtensionsLoggingAbstractionsVersion>6.0.4</MicrosoftExtensionsLoggingAbstractionsVersion>
<SystemTextEncodingsWebVersion>6.0.0</SystemTextEncodingsWebVersion>
<SystemTextJsonVersion>6.0.8</SystemTextJsonVersion>
<XUnitAbstractionsVersion>2.0.3</XUnitAbstractionsVersion>
- <MicrosoftDotNetCodeAnalysisVersion>9.0.0-beta.24207.1</MicrosoftDotNetCodeAnalysisVersion>
+ <MicrosoftDotNetCodeAnalysisVersion>9.0.0-beta.24281.1</MicrosoftDotNetCodeAnalysisVersion>
<StyleCopAnalyzersVersion>1.2.0-beta.406</StyleCopAnalyzersVersion>
<MicrosoftDotNetRemoteExecutorVersion>7.0.0-beta.22316.2</MicrosoftDotNetRemoteExecutorVersion>
<cdbsosversion>10.0.18362</cdbsosversion>
<NewtonSoftJsonVersion>13.0.1</NewtonSoftJsonVersion>
- <MicrosoftSourceBuildIntermediatesourcebuildreferencepackagesPackageVersion>9.0.0-alpha.1.24162.2</MicrosoftSourceBuildIntermediatesourcebuildreferencepackagesPackageVersion>
+ <MicrosoftSourceBuildIntermediatesourcebuildreferencepackagesPackageVersion>9.0.0-alpha.1.24304.1</MicrosoftSourceBuildIntermediatesourcebuildreferencepackagesPackageVersion>
<!-- Roslyn and analyzers -->
+ <!-- dotnet/roslyn dependencies -->
+ <!--
+ These versions should not be used by any project that contributes to the design-time experience in VS, such as an analyzer, code-fix, or generator assembly.
+ Any tools that contribute to the design-time experience should use the MicrosoftCodeAnalysisVersion_LatestVS property above to ensure
+ they do not break the local dev experience.
+ -->
+ <MicrosoftCodeAnalysisCSharpVersion>4.11.0-2.24271.11</MicrosoftCodeAnalysisCSharpVersion>
+ <MicrosoftCodeAnalysisVersion>4.11.0-2.24271.11</MicrosoftCodeAnalysisVersion>
+ <MicrosoftNetCompilersToolsetVersion>4.11.0-2.24271.11</MicrosoftNetCompilersToolsetVersion>
<!-- Compatibility with VS 16.11/.NET SDK 5.0.4xx -->
<MicrosoftCodeAnalysisVersion_3_11>3.11.0</MicrosoftCodeAnalysisVersion_3_11>
<!-- Compatibility with VS 17.0/.NET SDK 6.0.1xx -->
It should never go ahead of the Roslyn version included in the SDK version in dotnet/arcade's global.json to avoid causing breaks in product construction.
-->
<MicrosoftCodeAnalysisVersion_4_4>4.4.0</MicrosoftCodeAnalysisVersion_4_4>
- <MicrosoftCodeAnalysisVersion_LatestVS>4.4.0</MicrosoftCodeAnalysisVersion_LatestVS>
+ <MicrosoftCodeAnalysisVersion_LatestVS>4.8.0</MicrosoftCodeAnalysisVersion_LatestVS>
<MicrosoftCodeAnalysisVersion_LatestVS Condition="'$(DotNetBuildSourceOnly)' == 'true'">$(MicrosoftCodeAnalysisVersion)</MicrosoftCodeAnalysisVersion_LatestVS>
<MicrosoftCodeAnalysisAnalyzersVersion>3.11.0-beta1.23420.2</MicrosoftCodeAnalysisAnalyzersVersion>
<MicrosoftCodeAnalysisNetAnalyzersVersion>8.0.0-preview.23420.2</MicrosoftCodeAnalysisNetAnalyzersVersion>
These packages affect the design-time experience in VS, so we update them at the same cadance as the MicrosoftCodeAnalysisVersion_LatestVS version.
-->
<MicrosoftCodeAnalysisCSharpCodeStyleVersion>$(MicrosoftCodeAnalysisVersion_LatestVS)</MicrosoftCodeAnalysisCSharpCodeStyleVersion>
- <!-- dotnet/roslyn dependencies -->
- <!--
- These versions should not be used by any project that contributes to the design-time experience in VS, such as an analyzer, code-fix, or generator assembly.
- Any tools that contribute to the design-time experience should use the MicrosoftCodeAnalysisVersion_LatestVS property above to ensure
- they do not break the local dev experience.
- -->
- <MicrosoftCodeAnalysisCSharpVersion>4.8.0-2.23422.14</MicrosoftCodeAnalysisCSharpVersion>
- <MicrosoftCodeAnalysisVersion>4.8.0-2.23422.14</MicrosoftCodeAnalysisVersion>
- <MicrosoftNetCompilersToolsetVersion>4.8.0-2.23422.14</MicrosoftNetCompilersToolsetVersion>
</PropertyGroup>
- <!--
+ <!--
SOS test runtime versions
Internal service release testing:
$(DotnetRuntimeDownloadVersion) - the service release package version i.e. 2.1.17, 3.1.3-servicing.20128.1 or "default"
$(RuntimeSourceFeed) - the service release internal blob storage link
$(RuntimeSourceFeedKey) - the service release blob feed token
-
+
-->
<PropertyGroup>
- <MicrosoftNETCoreApp80Version>8.0.2</MicrosoftNETCoreApp80Version>
- <MicrosoftNETCoreApp70Version>7.0.16</MicrosoftNETCoreApp70Version>
- <MicrosoftNETCoreApp60Version>6.0.27</MicrosoftNETCoreApp60Version>
+ <MicrosoftNETCoreApp80Version>8.0.5</MicrosoftNETCoreApp80Version>
+ <MicrosoftNETCoreApp70Version>7.0.19</MicrosoftNETCoreApp70Version>
+ <MicrosoftNETCoreApp60Version>6.0.30</MicrosoftNETCoreApp60Version>
</PropertyGroup>
<PropertyGroup>
<DotnetRuntimeVersion Condition="'$(DotnetRuntimeVersion)' == ''">default</DotnetRuntimeVersion>
</PropertyGroup>
<ItemGroup Condition="!$(InternalReleaseTesting)">
<RuntimeTestVersions Include="Latest">
- <!--
<RuntimeDownload>$(VSRedistCommonNetCoreSharedFrameworkx6490Version)</RuntimeDownload>
<Runtime>$(MicrosoftNETCoreAppRuntimewinx64Version)</Runtime>
<AspNetDownload>$(MicrosoftAspNetCoreAppRefInternalVersion)</AspNetDownload>
<AspNet>$(MicrosoftAspNetCoreAppRefVersion)</AspNet>
- -->
- <RuntimeDownload>9.0.0-preview.3.24160.3</RuntimeDownload>
- <Runtime>9.0.0-preview.3.24160.3</Runtime>
- <AspNetDownload>9.0.0-preview.3.24158.2</AspNetDownload>
- <AspNet>9.0.0-preview.3.24158.2</AspNet>
<TargetFramework>net9.0</TargetFramework>
</RuntimeTestVersions>
<RuntimeTestVersions Include="Servicing1">
-# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds.
-# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080
+# This script adds internal feeds required to build commits that depend on internal package sources. For instance,
+# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables
+# disabled internal Maestro (darc-int*) feeds.
+#
+# Optionally, this script also adds a credential entry for each of the internal feeds if supplied. This credential
+# is added via the standard environment variable VSS_NUGET_EXTERNAL_FEED_ENDPOINTS. See
+# https://github.com/microsoft/artifacts-credprovider/tree/v1.1.1?tab=readme-ov-file#environment-variables for more details
#
-# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry
-# under <packageSourceCredentials> for each Maestro managed private feed. Two additional credential
-# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport.
-#
-# This script needs to be called in every job that will restore packages and which the base repo has
-# private AzDO feeds in the NuGet.config.
-#
-# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)`
-# from the AzureDevOps-Artifact-Feeds-Pats variable group.
-#
-# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing
+# See example call for this script below.
#
# - task: PowerShell@2
-# displayName: Setup Private Feeds Credentials
+# displayName: Setup Internal Feeds
# condition: eq(variables['Agent.OS'], 'Windows_NT')
# inputs:
# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
-# arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token
-# env:
-# Token: $(dn-bot-dnceng-artifact-feeds-rw)
+# arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config
+# - task: NuGetAuthenticate@1
+#
+# Note that the NuGetAuthenticate task should be called after SetupNugetSources.
+# This ensures that:
+# - Appropriate creds are set for the added internal feeds (if not supplied to the scrupt)
+# - The credential provider is installed
+#
+# This logic is also abstracted into enable-internal-sources.yml.
[CmdletBinding()]
param (
[Parameter(Mandatory = $true)][string]$ConfigFile,
- [Parameter(Mandatory = $true)][string]$Password
+ [string]$Password
)
$ErrorActionPreference = "Stop"
. $PSScriptRoot\tools.ps1
+$feedEndpoints = $null
+
+# If a credential is provided, ensure that we don't overwrite the current set of
+# credentials that may have been provided by a previous call to the credential provider.
+if ($Password -and $null -ne $env:VSS_NUGET_EXTERNAL_FEED_ENDPOINTS) {
+ $feedEndpoints = $env:VSS_NUGET_EXTERNAL_FEED_ENDPOINTS | ConvertFrom-Json
+} elseif ($Password) {
+ $feedEndpoints = @{ endpointCredentials = @() }
+}
+
# Add source entry to PackageSources
-function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) {
+function AddPackageSource($sources, $SourceName, $SourceEndPoint, $pwd) {
$packageSource = $sources.SelectSingleNode("add[@key='$SourceName']")
- if ($packageSource -eq $null)
+ if ($null -eq $packageSource)
{
+ Write-Host "`tAdding package source" $SourceName
$packageSource = $doc.CreateElement("add")
$packageSource.SetAttribute("key", $SourceName)
$packageSource.SetAttribute("value", $SourceEndPoint)
else {
Write-Host "Package source $SourceName already present."
}
- AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd
-}
-
-# Add a credential node for the specified source
-function AddCredential($creds, $source, $username, $pwd) {
- # Looks for credential configuration for the given SourceName. Create it if none is found.
- $sourceElement = $creds.SelectSingleNode($Source)
- if ($sourceElement -eq $null)
- {
- $sourceElement = $doc.CreateElement($Source)
- $creds.AppendChild($sourceElement) | Out-Null
- }
- # Add the <Username> node to the credential if none is found.
- $usernameElement = $sourceElement.SelectSingleNode("add[@key='Username']")
- if ($usernameElement -eq $null)
- {
- $usernameElement = $doc.CreateElement("add")
- $usernameElement.SetAttribute("key", "Username")
- $sourceElement.AppendChild($usernameElement) | Out-Null
+ if ($pwd) {
+ $feedEndpoints.endpointCredentials = AddCredential -endpointCredentials $feedEndpoints.endpointCredentials -source $SourceEndPoint -pwd $pwd
}
- $usernameElement.SetAttribute("value", $Username)
+}
- # Add the <ClearTextPassword> to the credential if none is found.
- # Add it as a clear text because there is no support for encrypted ones in non-windows .Net SDKs.
- # -> https://github.com/NuGet/Home/issues/5526
- $passwordElement = $sourceElement.SelectSingleNode("add[@key='ClearTextPassword']")
- if ($passwordElement -eq $null)
- {
- $passwordElement = $doc.CreateElement("add")
- $passwordElement.SetAttribute("key", "ClearTextPassword")
- $sourceElement.AppendChild($passwordElement) | Out-Null
+# Add a new feed endpoint credential
+function AddCredential([array]$endpointCredentials, $source, $pwd) {
+ $endpointCredentials += @{
+ endpoint = $source;
+ password = $pwd
}
-
- $passwordElement.SetAttribute("value", $pwd)
+ return $endpointCredentials
}
-function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $pwd) {
- $maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]")
+function InsertMaestroInternalFeedCredentials($Sources, $pwd) {
+ $maestroInternalSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]")
- Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds."
-
- ForEach ($PackageSource in $maestroPrivateSources) {
- Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key
- AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -pwd $pwd
+ ForEach ($PackageSource in $maestroInternalSources) {
+ Write-Host "`tAdding credential for Maestro's feed:" $PackageSource.Key
+ $feedEndpoints.endpointCredentials = AddCredential -endpointCredentials $feedEndpoints.endpointCredentials -source $PackageSource.value -pwd $pwd
}
}
-function EnablePrivatePackageSources($DisabledPackageSources) {
- $maestroPrivateSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]")
- ForEach ($DisabledPackageSource in $maestroPrivateSources) {
- Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled by deleting it from disabledPackageSource"
+function EnableInternalPackageSources($DisabledPackageSources) {
+ $maestroInternalSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]")
+ ForEach ($DisabledPackageSource in $maestroInternalSources) {
+ Write-Host "`tEnsuring internal source '$($DisabledPackageSource.key)' is enabled by deleting it from disabledPackageSource"
# Due to https://github.com/NuGet/Home/issues/10291, we must actually remove the disabled entries
$DisabledPackageSources.RemoveChild($DisabledPackageSource)
}
ExitWithExitCode 1
}
-if (!$Password) {
- Write-PipelineTelemetryError -Category 'Build' -Message 'Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Please supply a valid PAT'
- ExitWithExitCode 1
-}
-
# Load NuGet.config
$doc = New-Object System.Xml.XmlDocument
$filename = (Get-Item $ConfigFile).FullName
# Get reference to <PackageSources> or create one if none exist already
$sources = $doc.DocumentElement.SelectSingleNode("packageSources")
-if ($sources -eq $null) {
+if ($null -eq $sources) {
$sources = $doc.CreateElement("packageSources")
$doc.DocumentElement.AppendChild($sources) | Out-Null
}
-# Looks for a <PackageSourceCredentials> node. Create it if none is found.
-$creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials")
-if ($creds -eq $null) {
- $creds = $doc.CreateElement("packageSourceCredentials")
- $doc.DocumentElement.AppendChild($creds) | Out-Null
-}
-
# Check for disabledPackageSources; we'll enable any darc-int ones we find there
$disabledSources = $doc.DocumentElement.SelectSingleNode("disabledPackageSources")
-if ($disabledSources -ne $null) {
+if ($null -ne $disabledSources) {
Write-Host "Checking for any darc-int disabled package sources in the disabledPackageSources node"
- EnablePrivatePackageSources -DisabledPackageSources $disabledSources
+ EnableInternalPackageSources -DisabledPackageSources $disabledSources
}
-$userName = "dn-bot"
-
-# Insert credential nodes for Maestro's private feeds
-InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -pwd $Password
+if ($Password) {
+ InsertMaestroInternalFeedCredentials -Sources $sources -pwd $Password
+}
# 3.1 uses a different feed url format so it's handled differently here
$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']")
-if ($dotnet31Source -ne $null) {
- AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password
- AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password
+if ($null -ne $dotnet31Source) {
+ AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v3/index.json" -pwd $Password
+ AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v3/index.json" -pwd $Password
}
$dotnetVersions = @('5','6','7','8')
foreach ($dotnetVersion in $dotnetVersions) {
$feedPrefix = "dotnet" + $dotnetVersion;
$dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']")
- if ($dotnetSource -ne $null) {
- AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password
- AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password
+ if ($dotnetSource) {
+ AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedprefix-internal/nuget/v3/index.json" -pwd $Password
+ AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v3/index.json" -pwd $Password
}
}
-$doc.Save($filename)
\ No newline at end of file
+$doc.Save($filename)
+
+# If any credentials were added or altered, update the VSS_NUGET_EXTERNAL_FEED_ENDPOINTS environment variable
+if ($null -ne $feedEndpoints) {
+ # ci is set to true so vso logging commands will be used.
+ $ci = $true
+ Write-PipelineSetVariable -Name 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' -Value $($feedEndpoints | ConvertTo-Json) -IsMultiJobVariable $false
+ Write-PipelineSetVariable -Name 'NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED' -Value "False" -IsMultiJobVariable $false
+}
\ No newline at end of file
#!/usr/bin/env bash
-# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds.
-# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080
+# This script adds internal feeds required to build commits that depend on intenral package sources. For instance,
+# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables
+# disabled internal Maestro (darc-int*) feeds.
+#
+# Optionally, this script also adds a credential entry for each of the internal feeds if supplied.
#
-# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry
-# under <packageSourceCredentials> for each Maestro's managed private feed. Two additional credential
-# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport.
-#
-# This script needs to be called in every job that will restore packages and which the base repo has
-# private AzDO feeds in the NuGet.config.
-#
-# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)`
-# from the AzureDevOps-Artifact-Feeds-Pats variable group.
-#
-# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing.
+# See example call for this script below.
#
# - task: Bash@3
-# displayName: Setup Private Feeds Credentials
+# displayName: Setup Internal Feeds
# inputs:
# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
-# arguments: $(Build.SourcesDirectory)/NuGet.config $Token
+# arguments: $(Build.SourcesDirectory)/NuGet.config
# condition: ne(variables['Agent.OS'], 'Windows_NT')
-# env:
-# Token: $(dn-bot-dnceng-artifact-feeds-rw)
+# - task: NuGetAuthenticate@1
+#
+# Note that the NuGetAuthenticate task should be called after SetupNugetSources.
+# This ensures that:
+# - Appropriate creds are set for the added internal feeds (if not supplied to the scrupt)
+# - The credential provider is installed.
+#
+# This logic is also abstracted into enable-internal-sources.yml.
ConfigFile=$1
CredToken=$2
ExitWithExitCode 1
fi
-if [ -z "$CredToken" ]; then
- Write-PipelineTelemetryError -category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Please supply a valid PAT"
- ExitWithExitCode 1
-fi
-
if [[ `uname -s` == "Darwin" ]]; then
NL=$'\\\n'
TB=''
PackageSources+=$(grep -oh '"darc-int-[^"]*"' $ConfigFile | tr -d '"')
IFS=$PrevIFS
-for FeedName in ${PackageSources[@]} ; do
- # Check if there is no existing credential for this FeedName
- grep -i "<$FeedName>" $ConfigFile
- if [ "$?" != "0" ]; then
- echo "Adding credentials for $FeedName."
+if [ "$CredToken" ]; then
+ for FeedName in ${PackageSources[@]} ; do
+ # Check if there is no existing credential for this FeedName
+ grep -i "<$FeedName>" $ConfigFile
+ if [ "$?" != "0" ]; then
+ echo "Adding credentials for $FeedName."
- PackageSourceCredentialsNodeFooter="</packageSourceCredentials>"
- NewCredential="${TB}${TB}<$FeedName>${NL}<add key=\"Username\" value=\"dn-bot\" />${NL}<add key=\"ClearTextPassword\" value=\"$CredToken\" />${NL}</$FeedName>"
+ PackageSourceCredentialsNodeFooter="</packageSourceCredentials>"
+ NewCredential="${TB}${TB}<$FeedName>${NL}<add key=\"Username\" value=\"dn-bot\" />${NL}<add key=\"ClearTextPassword\" value=\"$CredToken\" />${NL}</$FeedName>"
- sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile
- fi
-done
+ sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile
+ fi
+ done
+fi
# Re-enable any entries in disabledPackageSources where the feed name contains darc-int
grep -i "<disabledPackageSources>" $ConfigFile
[switch] $pack,
[switch] $publish,
[switch] $clean,
- [switch] $verticalBuild,
[switch][Alias('pb')]$productBuild,
[switch][Alias('bl')]$binaryLog,
[switch][Alias('nobl')]$excludeCIBinarylog,
Write-Host " -sign Sign build outputs"
Write-Host " -publish Publish artifacts (e.g. symbols)"
Write-Host " -clean Clean the solution"
- Write-Host " -verticalBuild Run in 'vertical build' infra mode."
Write-Host " -productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)"
Write-Host ""
/p:Deploy=$deploy `
/p:Test=$test `
/p:Pack=$pack `
- /p:DotNetBuildRepo=$($productBuild -or $verticalBuild) `
+ /p:DotNetBuildRepo=$productBuild `
/p:IntegrationTest=$integrationTest `
/p:PerformanceTest=$performanceTest `
/p:Sign=$sign `
restore=false
build=false
source_build=false
-vertical_build=false
product_build=false
rebuild=false
test=false
restore=true
pack=true
;;
- -verticalbuild|-vb)
- build=true
- vertical_build=true
- product_build=true
- restore=true
- pack=true
- ;;
-test|-t)
test=true
;;
--- /dev/null
+parameters:
+# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ cancelTimeoutInMinutes: ''
+ condition: ''
+ container: ''
+ continueOnError: false
+ dependsOn: ''
+ displayName: ''
+ pool: ''
+ steps: []
+ strategy: ''
+ timeoutInMinutes: ''
+ variables: []
+ workspace: ''
+ templateContext: {}
+
+# Job base template specific parameters
+ # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
+ # publishing defaults
+ artifacts: ''
+ enableMicrobuild: false
+ enablePublishBuildArtifacts: false
+ enablePublishBuildAssets: false
+ enablePublishTestResults: false
+ enablePublishUsingPipelines: false
+ enableBuildRetry: false
+ disableComponentGovernance: ''
+ componentGovernanceIgnoreDirectories: ''
+ mergeTestResults: false
+ testRunTitle: ''
+ testResultsFormat: ''
+ name: ''
+ preSteps: []
+ artifactPublishSteps: []
+ runAsPublic: false
+
+# Sbom related params
+ enableSbom: true
+ PackageVersion: 9.0.0
+ BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+
+# 1es specific parameters
+ is1ESPipeline: ''
+
+jobs:
+- job: ${{ parameters.name }}
+
+ ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
+ cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
+
+ ${{ if ne(parameters.condition, '') }}:
+ condition: ${{ parameters.condition }}
+
+ ${{ if ne(parameters.container, '') }}:
+ container: ${{ parameters.container }}
+
+ ${{ if ne(parameters.continueOnError, '') }}:
+ continueOnError: ${{ parameters.continueOnError }}
+
+ ${{ if ne(parameters.dependsOn, '') }}:
+ dependsOn: ${{ parameters.dependsOn }}
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: ${{ parameters.displayName }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+
+ ${{ if ne(parameters.strategy, '') }}:
+ strategy: ${{ parameters.strategy }}
+
+ ${{ if ne(parameters.timeoutInMinutes, '') }}:
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+
+ ${{ if ne(parameters.templateContext, '') }}:
+ templateContext: ${{ parameters.templateContext }}
+
+ variables:
+ - ${{ if ne(parameters.enableTelemetry, 'false') }}:
+ - name: DOTNET_CLI_TELEMETRY_PROFILE
+ value: '$(Build.Repository.Uri)'
+ - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
+ - name: EnableRichCodeNavigation
+ value: 'true'
+ # Retry signature validation up to three times, waiting 2 seconds between attempts.
+ # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
+ - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
+ value: 3,2000
+ - ${{ each variable in parameters.variables }}:
+ # handle name-value variable syntax
+ # example:
+ # - name: [key]
+ # value: [value]
+ - ${{ if ne(variable.name, '') }}:
+ - name: ${{ variable.name }}
+ value: ${{ variable.value }}
+
+ # handle variable groups
+ - ${{ if ne(variable.group, '') }}:
+ - group: ${{ variable.group }}
+
+ # handle template variable syntax
+ # example:
+ # - template: path/to/template.yml
+ # parameters:
+ # [key]: [value]
+ - ${{ if ne(variable.template, '') }}:
+ - template: ${{ variable.template }}
+ ${{ if ne(variable.parameters, '') }}:
+ parameters: ${{ variable.parameters }}
+
+ # handle key-value variable syntax.
+ # example:
+ # - [key]: [value]
+ - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
+ - ${{ each pair in variable }}:
+ - name: ${{ pair.key }}
+ value: ${{ pair.value }}
+
+ # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
+ - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: DotNet-HelixApi-Access
+
+ ${{ if ne(parameters.workspace, '') }}:
+ workspace: ${{ parameters.workspace }}
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if ne(parameters.preSteps, '') }}:
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - task: MicroBuildSigningPlugin@4
+ displayName: Install MicroBuild plugin
+ inputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ env:
+ TeamName: $(_TeamName)
+ MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)'
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
+ - task: NuGetAuthenticate@1
+
+ - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
+ - task: DownloadPipelineArtifact@2
+ inputs:
+ buildType: current
+ artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
+ targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
+ itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
+
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+
+ - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
+ - task: RichCodeNavIndexer@0
+ displayName: RichCodeNav Upload
+ inputs:
+ languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
+ environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
+ richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
+ uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
+ continueOnError: true
+
+ - template: /eng/common/core-templates/steps/component-governance.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ ${{ if eq(parameters.disableComponentGovernance, '') }}:
+ ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
+ disableComponentGovernance: false
+ ${{ else }}:
+ disableComponentGovernance: true
+ ${{ else }}:
+ disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
+ componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
+
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: MicroBuildCleanup@1
+ displayName: Execute Microbuild cleanup tasks
+ condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ env:
+ TeamName: $(_TeamName)
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
+ - template: /eng/common/core-templates/steps/generate-sbom.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ PackageVersion: ${{ parameters.packageVersion}}
+ BuildDropPath: ${{ parameters.buildDropPath }}
+ IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
+ publishArtifacts: false
+
+ # Publish test results
+ - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
+ - task: PublishTestResults@2
+ displayName: Publish XUnit Test Results
+ inputs:
+ testResultsFormat: 'xUnit'
+ testResultsFiles: '*.xml'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+ - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
+ - task: PublishTestResults@2
+ displayName: Publish TRX Test Results
+ inputs:
+ testResultsFormat: 'VSTest'
+ testResultsFiles: '*.trx'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+
+ # gather artifacts
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather binaries for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/bin'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
+ - task: CopyFiles@2
+ displayName: Gather packages for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/packages'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather logs for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/log'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log'
+ continueOnError: true
+ condition: always()
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - task: CopyFiles@2
+ displayName: Gather logs for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/log/$(_BuildConfig)'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ continueOnError: true
+ condition: always()
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - task: CopyFiles@2
+ displayName: Gather buildconfiguration for build retry
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/eng/common/BuildConfiguration'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/eng/common/BuildConfiguration'
+ continueOnError: true
+ condition: always()
+ - ${{ each step in parameters.artifactPublishSteps }}:
+ - ${{ step }}
--- /dev/null
+parameters:
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: ''
+
+ CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
+ GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
+
+ SourcesDirectory: $(Build.SourcesDirectory)
+ CreatePr: true
+ AutoCompletePr: false
+ ReusePr: true
+ UseLfLineEndings: true
+ UseCheckedInLocProjectJson: false
+ SkipLocProjectJsonGeneration: false
+ LanguageSet: VS_Main_Languages
+ LclSource: lclFilesInRepo
+ LclPackageId: ''
+ RepoType: gitHub
+ GitHubOrg: dotnet
+ MirrorRepo: ''
+ MirrorBranch: main
+ condition: ''
+ JobNameSuffix: ''
+ is1ESPipeline: ''
+jobs:
+- job: OneLocBuild${{ parameters.JobNameSuffix }}
+
+ dependsOn: ${{ parameters.dependsOn }}
+
+ displayName: OneLocBuild${{ parameters.JobNameSuffix }}
+
+ variables:
+ - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
+ - name: _GenerateLocProjectArguments
+ value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
+ -LanguageSet "${{ parameters.LanguageSet }}"
+ -CreateNeutralXlfs
+ - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
+ - name: _GenerateLocProjectArguments
+ value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+ ${{ if eq(parameters.pool, '') }}:
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
+ - task: Powershell@2
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
+ arguments: $(_GenerateLocProjectArguments)
+ displayName: Generate LocProject.json
+ condition: ${{ parameters.condition }}
+
+ - task: OneLocBuild@2
+ displayName: OneLocBuild
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ inputs:
+ locProj: eng/Localize/LocProject.json
+ outDir: $(Build.ArtifactStagingDirectory)
+ lclSource: ${{ parameters.LclSource }}
+ lclPackageId: ${{ parameters.LclPackageId }}
+ isCreatePrSelected: ${{ parameters.CreatePr }}
+ isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
+ ${{ if eq(parameters.CreatePr, true) }}:
+ isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
+ ${{ if eq(parameters.RepoType, 'gitHub') }}:
+ isShouldReusePrSelected: ${{ parameters.ReusePr }}
+ packageSourceAuth: patAuth
+ patVariable: ${{ parameters.CeapexPat }}
+ ${{ if eq(parameters.RepoType, 'gitHub') }}:
+ repoType: ${{ parameters.RepoType }}
+ gitHubPatVariable: "${{ parameters.GithubPat }}"
+ ${{ if ne(parameters.MirrorRepo, '') }}:
+ isMirrorRepoSelected: true
+ gitHubOrganization: ${{ parameters.GitHubOrg }}
+ mirrorRepo: ${{ parameters.MirrorRepo }}
+ mirrorBranch: ${{ parameters.MirrorBranch }}
+ condition: ${{ parameters.condition }}
+
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish Localization Files
+ pathToPublish: '$(Build.ArtifactStagingDirectory)/loc'
+ publishLocation: Container
+ artifactName: Loc
+ condition: ${{ parameters.condition }}
+
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish LocProject.json
+ pathToPublish: '$(Build.SourcesDirectory)/eng/Localize/'
+ publishLocation: Container
+ artifactName: Loc
+ condition: ${{ parameters.condition }}
\ No newline at end of file
--- /dev/null
+parameters:
+ configuration: 'Debug'
+
+ # Optional: condition for the job to run
+ condition: ''
+
+ # Optional: 'true' if future jobs should run even if this job fails
+ continueOnError: false
+
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: {}
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishUsingPipelines: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishAssetsImmediately: false
+
+ artifactsPublishingAdditionalParameters: ''
+
+ signingValidationAdditionalParameters: ''
+
+ is1ESPipeline: ''
+
+jobs:
+- job: Asset_Registry_Publish
+
+ dependsOn: ${{ parameters.dependsOn }}
+ timeoutInMinutes: 150
+
+ ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ displayName: Publish Assets
+ ${{ else }}:
+ displayName: Publish to Build Asset Registry
+
+ variables:
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: Publish-Build-Assets
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - name: runCodesignValidationInjection
+ value: false
+ # unconditional - needed for logs publishing (redactor tool version)
+ - template: /eng/common/core-templates/post-build/common-variables.yml
+
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: NetCore1ESPool-Publishing-Internal
+ image: windows.vs2019.amd64
+ os: windows
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - checkout: self
+ fetchDepth: 3
+ clean: true
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download artifact
+ inputs:
+ artifactName: AssetManifests
+ downloadPath: '$(Build.StagingDirectory)/Download'
+ checkDownloadedFiles: true
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: NuGetAuthenticate@1
+
+ - task: PowerShell@2
+ displayName: Publish Build Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
+ /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
+ /p:BuildAssetRegistryToken=$(MaestroAccessToken)
+ /p:MaestroApiEndpoint=https://maestro.dot.net
+ /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
+ /p:OfficialBuildId=$(Build.BuildNumber)
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: powershell@2
+ displayName: Create ReleaseConfigs Artifact
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force
+ $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt"
+ Add-Content -Path $filePath -Value $(BARBuildId)
+ Add-Content -Path $filePath -Value "$(DefaultChannels)"
+ Add-Content -Path $filePath -Value $(IsStableBuild)
+
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish ReleaseConfigs Artifact
+ pathToPublish: '$(Build.StagingDirectory)/ReleaseConfigs'
+ publishLocation: Container
+ artifactName: ReleaseConfigs
+
+ - task: powershell@2
+ displayName: Check if SymbolPublishingExclusionsFile.txt exists
+ inputs:
+ targetType: inline
+ script: |
+ $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
+ if(Test-Path -Path $symbolExclusionfile)
+ {
+ Write-Host "SymbolExclusionFile exists"
+ Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
+ }
+ else{
+ Write-Host "Symbols Exclusion file does not exist"
+ Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
+ }
+
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish SymbolPublishingExclusionsFile Artifact
+ condition: eq(variables['SymbolExclusionFile'], 'true')
+ pathToPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ publishLocation: Container
+ artifactName: ReleaseConfigs
+
+ - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: PowerShell@2
+ displayName: Publish Using Darc
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: -BuildId $(BARBuildId)
+ -PublishingInfraVersion 3
+ -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
+ -MaestroToken '$(MaestroApiAccessToken)'
+ -WaitPublishingFinish true
+ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
+ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ JobLabel: 'Publish_Artifacts_Logs'
--- /dev/null
+parameters:
+ # This template adds arcade-powered source-build to CI. The template produces a server job with a
+ # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
+
+ # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
+ jobNamePrefix: 'Source_Build'
+
+ # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
+ # managed-only repositories. This is an object with these properties:
+ #
+ # name: ''
+ # The name of the job. This is included in the job ID.
+ # targetRID: ''
+ # The name of the target RID to use, instead of the one auto-detected by Arcade.
+ # nonPortable: false
+ # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
+ # linux-x64), and compiling against distro-provided packages rather than portable ones.
+ # skipPublishValidation: false
+ # Disables publishing validation. By default, a check is performed to ensure no packages are
+ # published by source-build.
+ # container: ''
+ # A container to use. Runs in docker.
+ # pool: {}
+ # A pool to use. Runs directly on an agent.
+ # buildScript: ''
+ # Specifies the build script to invoke to perform the build in the repo. The default
+ # './build.sh' should work for typical Arcade repositories, but this is customizable for
+ # difficult situations.
+ # jobProperties: {}
+ # A list of job properties to inject at the top level, for potential extensibility beyond
+ # container and pool.
+ platform: {}
+
+ is1ESPipeline: ''
+
+ # If set to true and running on a non-public project,
+ # Internal nuget and blob storage locations will be enabled.
+ # This is not enabled by default because many repositories do not need internal sources
+ # and do not need to have the required service connections approved in the pipeline.
+ enableInternalSources: false
+
+jobs:
+- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
+ displayName: Source-Build (${{ parameters.platform.name }})
+
+ ${{ each property in parameters.platform.jobProperties }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ ${{ if ne(parameters.platform.container, '') }}:
+ container: ${{ parameters.platform.container }}
+
+ ${{ if eq(parameters.platform.pool, '') }}:
+ # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
+ # source-build builds run in Docker, including the default managed platform.
+ # /eng/common/core-templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
+ ${{ if eq(parameters.is1ESPipeline, 'true') }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
+ demands: ImageOverride -equals build.ubuntu.2004.amd64
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
+ image: 1es-mariner-2
+ os: linux
+ ${{ else }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
+ demands: ImageOverride -equals Build.Ubuntu.2204.Amd64.Open
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
+ demands: ImageOverride -equals Build.Ubuntu.2204.Amd64
+ ${{ if ne(parameters.platform.pool, '') }}:
+ pool: ${{ parameters.platform.pool }}
+
+ workspace:
+ clean: all
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if eq(parameters.enableInternalSources, true) }}:
+ - template: /eng/common/core-templates/steps/enable-internal-sources.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ - template: /eng/common/core-templates/steps/enable-internal-runtimes.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ - template: /eng/common/core-templates/steps/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ platform: ${{ parameters.platform }}
--- /dev/null
+parameters:
+ runAsPublic: false
+ sourceIndexUploadPackageVersion: 2.0.0-20240522.1
+ sourceIndexProcessBinlogPackageVersion: 1.0.1-20240522.1
+ sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
+ sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
+ preSteps: []
+ binlogPath: artifacts/log/Debug/Build.binlog
+ condition: ''
+ dependsOn: ''
+ pool: ''
+ is1ESPipeline: ''
+
+jobs:
+- job: SourceIndexStage1
+ dependsOn: ${{ parameters.dependsOn }}
+ condition: ${{ parameters.condition }}
+ variables:
+ - name: SourceIndexUploadPackageVersion
+ value: ${{ parameters.sourceIndexUploadPackageVersion }}
+ - name: SourceIndexProcessBinlogPackageVersion
+ value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }}
+ - name: SourceIndexPackageSource
+ value: ${{ parameters.sourceIndexPackageSource }}
+ - name: BinlogPath
+ value: ${{ parameters.binlogPath }}
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+ ${{ if eq(parameters.pool, '') }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $(DncEngPublicBuildPool)
+ image: windows.vs2022.amd64.open
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $(DncEngInternalBuildPool)
+ image: windows.vs2022.amd64
+
+ steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - task: UseDotNet@2
+ displayName: Use .NET 8 SDK
+ inputs:
+ packageType: sdk
+ version: 8.0.x
+ installationPath: $(Agent.TempDirectory)/dotnet
+ workingDirectory: $(Agent.TempDirectory)
+
+ - script: |
+ $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
+ $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
+ displayName: Download Tools
+ # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
+ workingDirectory: $(Agent.TempDirectory)
+
+ - script: ${{ parameters.sourceIndexBuildCommand }}
+ displayName: Build Repository
+
+ - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
+ displayName: Process Binlog into indexable sln
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: AzureCLI@2
+ displayName: Get stage 1 auth token
+ inputs:
+ azureSubscription: 'SourceDotNet Stage1 Publish'
+ addSpnToEnvironment: true
+ scriptType: 'ps'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ echo "##vso[task.setvariable variable=ARM_CLIENT_ID]$env:servicePrincipalId"
+ echo "##vso[task.setvariable variable=ARM_ID_TOKEN]$env:idToken"
+ echo "##vso[task.setvariable variable=ARM_TENANT_ID]$env:tenantId"
+
+ - script: |
+ echo "Client ID: $(ARM_CLIENT_ID)"
+ echo "ID Token: $(ARM_ID_TOKEN)"
+ echo "Tenant ID: $(ARM_TENANT_ID)"
+ az login --service-principal -u $(ARM_CLIENT_ID) --tenant $(ARM_TENANT_ID) --allow-no-subscriptions --federated-token $(ARM_ID_TOKEN)
+ displayName: "Login to Azure"
+
+ - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
+ displayName: Upload stage1 artifacts to source index
--- /dev/null
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+ # Optional: if specified, restore and use this version of Guardian instead of the default.
+ overrideGuardianVersion: ''
+ is1ESPipeline: ''
+
+jobs:
+- template: /eng/common/core-templates/jobs/jobs.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ enableMicrobuild: false
+ enablePublishBuildArtifacts: false
+ enablePublishTestResults: false
+ enablePublishBuildAssets: false
+ enablePublishUsingPipelines: false
+ enableTelemetry: true
+
+ variables:
+ - group: Publish-Build-Assets
+ # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
+ # sync with the packages.config file.
+ - name: DefaultGuardianVersion
+ value: 0.109.0
+ - name: GuardianPackagesConfigFile
+ value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
+ - name: GuardianVersion
+ value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
+
+ jobs: ${{ parameters.jobs }}
+
--- /dev/null
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: Enable publishing using release pipelines
+ enablePublishUsingPipelines: false
+
+ # Optional: Enable running the source-build jobs to build repo from source
+ enableSourceBuild: false
+
+ # Optional: Parameters for source-build template.
+ # See /eng/common/core-templates/jobs/source-build.yml for options
+ sourceBuildParameters: []
+
+ graphFileGeneration:
+ # Optional: Enable generating the graph files at the end of the build
+ enabled: false
+ # Optional: Include toolset dependencies in the generated graph files
+ includeToolset: false
+
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+
+ # Optional: Override automatically derived dependsOn value for "publish build assets" job
+ publishBuildAssetsDependsOn: ''
+
+ # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
+ publishAssetsImmediately: false
+
+ # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
+ artifactsPublishingAdditionalParameters: ''
+ signingValidationAdditionalParameters: ''
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ enableSourceIndex: false
+ sourceIndexParams: {}
+
+ artifacts: {}
+ is1ESPipeline: ''
+
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+jobs:
+- ${{ each job in parameters.jobs }}:
+ - ${{ if eq(parameters.is1ESPipeline, 'true') }}:
+ - template: /eng/common/templates-official/job/job.yml
+ parameters:
+ # pass along parameters
+ ${{ each parameter in parameters }}:
+ ${{ if ne(parameter.key, 'jobs') }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ # pass along job properties
+ ${{ each property in job }}:
+ ${{ if ne(property.key, 'job') }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ name: ${{ job.job }}
+
+ - ${{ else }}:
+ - template: /eng/common/templates/job/job.yml
+ parameters:
+ # pass along parameters
+ ${{ each parameter in parameters }}:
+ ${{ if ne(parameter.key, 'jobs') }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ # pass along job properties
+ ${{ each property in job }}:
+ ${{ if ne(property.key, 'job') }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ name: ${{ job.job }}
+
+- ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - template: /eng/common/core-templates/jobs/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ allCompletedJobId: Source_Build_Complete
+ ${{ each parameter in parameters.sourceBuildParameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
+ - template: ../job/source-index-stage1.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ runAsPublic: ${{ parameters.runAsPublic }}
+ ${{ each parameter in parameters.sourceIndexParams }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - template: ../job/publish-build-assets.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ continueOnError: ${{ parameters.continueOnError }}
+ dependsOn:
+ - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.jobs }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - Source_Build_Complete
+
+ runAsPublic: ${{ parameters.runAsPublic }}
+ publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
+ publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
+ enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
--- /dev/null
+parameters:
+ # This template adds arcade-powered source-build to CI. A job is created for each platform, as
+ # well as an optional server job that completes when all platform jobs complete.
+
+ # The name of the "join" job for all source-build platforms. If set to empty string, the job is
+ # not included. Existing repo pipelines can use this job depend on all source-build jobs
+ # completing without maintaining a separate list of every single job ID: just depend on this one
+ # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
+ allCompletedJobId: ''
+
+ # See /eng/common/core-templates/job/source-build.yml
+ jobNamePrefix: 'Source_Build'
+
+ # This is the default platform provided by Arcade, intended for use by a managed-only repo.
+ defaultManagedPlatform:
+ name: 'Managed'
+ container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9'
+
+ # Defines the platforms on which to run build jobs. One job is created for each platform, and the
+ # object in this array is sent to the job template as 'platform'. If no platforms are specified,
+ # one job runs on 'defaultManagedPlatform'.
+ platforms: []
+
+ is1ESPipeline: ''
+
+ # If set to true and running on a non-public project,
+ # Internal nuget and blob storage locations will be enabled.
+ # This is not enabled by default because many repositories do not need internal sources
+ # and do not need to have the required service connections approved in the pipeline.
+ enableInternalSources: false
+
+jobs:
+
+- ${{ if ne(parameters.allCompletedJobId, '') }}:
+ - job: ${{ parameters.allCompletedJobId }}
+ displayName: Source-Build Complete
+ pool: server
+ dependsOn:
+ - ${{ each platform in parameters.platforms }}:
+ - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
+ - ${{ if eq(length(parameters.platforms), 0) }}:
+ - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
+
+- ${{ each platform in parameters.platforms }}:
+ - template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ platform }}
+ enableInternalSources: ${{ parameters.enableInternalSources }}
+
+- ${{ if eq(length(parameters.platforms), 0) }}:
+ - template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ parameters.defaultManagedPlatform }}
+ enableInternalSources: ${{ parameters.enableInternalSources }}
--- /dev/null
+variables:
+ - group: Publish-Build-Assets
+
+ # Whether the build is internal or not
+ - name: IsInternalBuild
+ value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
+
+ # Default Maestro++ API Endpoint and API Version
+ - name: MaestroApiEndPoint
+ value: "https://maestro.dot.net"
+ - name: MaestroApiAccessToken
+ value: $(MaestroAccessToken)
+ - name: MaestroApiVersion
+ value: "2020-02-20"
+
+ - name: SourceLinkCLIVersion
+ value: 3.0.0
+ - name: SymbolToolVersion
+ value: 1.0.1
+ - name: BinlogToolVersion
+ value: 1.0.11
+
+ - name: runCodesignValidationInjection
+ value: false
--- /dev/null
+parameters:
+ # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
+ # Publishing V1 is no longer supported
+ # Publishing V2 is no longer supported
+ # Publishing V3 is the default
+ - name: publishingInfraVersion
+ displayName: Which version of publishing should be used to promote the build definition?
+ type: number
+ default: 3
+ values:
+ - 3
+
+ - name: BARBuildId
+ displayName: BAR Build Id
+ type: number
+ default: 0
+
+ - name: PromoteToChannelIds
+ displayName: Channel to promote BARBuildId to
+ type: string
+ default: ''
+
+ - name: enableSourceLinkValidation
+ displayName: Enable SourceLink validation
+ type: boolean
+ default: false
+
+ - name: enableSigningValidation
+ displayName: Enable signing validation
+ type: boolean
+ default: true
+
+ - name: enableSymbolValidation
+ displayName: Enable symbol validation
+ type: boolean
+ default: false
+
+ - name: enableNugetValidation
+ displayName: Enable NuGet validation
+ type: boolean
+ default: true
+
+ - name: publishInstallersAndChecksums
+ displayName: Publish installers and checksums
+ type: boolean
+ default: true
+
+ - name: SDLValidationParameters
+ type: object
+ default:
+ enable: false
+ publishGdn: false
+ continueOnError: false
+ params: ''
+ artifactNames: ''
+ downloadArtifacts: true
+
+ # These parameters let the user customize the call to sdk-task.ps1 for publishing
+ # symbols & general artifacts as well as for signing validation
+ - name: symbolPublishingAdditionalParameters
+ displayName: Symbol publishing additional parameters
+ type: string
+ default: ''
+
+ - name: artifactsPublishingAdditionalParameters
+ displayName: Artifact publishing additional parameters
+ type: string
+ default: ''
+
+ - name: signingValidationAdditionalParameters
+ displayName: Signing validation additional parameters
+ type: string
+ default: ''
+
+ # Which stages should finish execution before post-build stages start
+ - name: validateDependsOn
+ type: object
+ default:
+ - build
+
+ - name: publishDependsOn
+ type: object
+ default:
+ - Validate
+
+ # Optional: Call asset publishing rather than running in a separate stage
+ - name: publishAssetsImmediately
+ type: boolean
+ default: false
+
+ - name: is1ESPipeline
+ type: boolean
+ default: false
+
+stages:
+- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
+ - stage: Validate
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Validate Build Assets
+ variables:
+ - template: /eng/common/core-templates/post-build/common-variables.yml
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobs:
+ - job:
+ displayName: NuGet Validation
+ condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ ${{ if eq(parameters.is1ESPipeline, true) }}:
+ name: $(DncEngInternalBuildPool)
+ image: windows.vs2022.amd64
+ os: windows
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ demands: ImageOverride -equals windows.vs2022.amd64
+
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
+ arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
+ -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
+
+ - job:
+ displayName: Signing Validation
+ condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ ${{ if eq(parameters.is1ESPipeline, true) }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ demands: ImageOverride -equals windows.vs2022.amd64
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+ itemPattern: |
+ **
+ !**/Microsoft.SourceBuild.Intermediate.*.nupkg
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@1
+ displayName: 'Authenticate to AzDO Feeds'
+
+ # Signing validation will optionally work with the buildmanifest file which is downloaded from
+ # Azure DevOps above.
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task SigningValidation -restore -msbuildEngine vs
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
+ ${{ parameters.signingValidationAdditionalParameters }}
+
+ - template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ StageLabel: 'Validation'
+ JobLabel: 'Signing'
+ BinlogToolVersion: $(BinlogToolVersion)
+
+ - job:
+ displayName: SourceLink Validation
+ condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ ${{ if eq(parameters.is1ESPipeline, true) }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ demands: ImageOverride -equals windows.vs2022.amd64
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: BlobArtifacts
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
+ arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
+ -ExtractPath $(Agent.BuildDirectory)/Extract/
+ -GHRepoName $(Build.Repository.Name)
+ -GHCommit $(Build.SourceVersion)
+ -SourcelinkCliVersion $(SourceLinkCLIVersion)
+ continueOnError: true
+
+- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
+ - stage: publish_using_darc
+ ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
+ dependsOn: ${{ parameters.publishDependsOn }}
+ ${{ else }}:
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Publish using Darc
+ variables:
+ - template: /eng/common/core-templates/post-build/common-variables.yml
+ - template: /eng/common/core-templates/variables/pool-providers.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ jobs:
+ - job:
+ displayName: Publish Using Darc
+ timeoutInMinutes: 120
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ ${{ if eq(parameters.is1ESPipeline, true) }}:
+ name: NetCore1ESPool-Publishing-Internal
+ image: windows.vs2019.amd64
+ os: windows
+ ${{ else }}:
+ name: NetCore1ESPool-Publishing-Internal
+ demands: ImageOverride -equals windows.vs2019.amd64
+ steps:
+ - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+
+ - task: NuGetAuthenticate@1
+
+ - task: PowerShell@2
+ displayName: Publish Using Darc
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: -BuildId $(BARBuildId)
+ -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
+ -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
+ -MaestroToken '$(MaestroApiAccessToken)'
+ -WaitPublishingFinish true
+ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
+ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
--- /dev/null
+parameters:
+ BARBuildId: ''
+ PromoteToChannelIds: ''
+ is1ESPipeline: ''
+
+steps:
+ - ${{ if eq(parameters.is1ESPipeline, '') }}:
+ - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error
+
+ - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Release Configs
+ inputs:
+ buildType: current
+ artifactName: ReleaseConfigs
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ name: setReleaseVars
+ displayName: Set Release Configs Vars
+ inputs:
+ targetType: inline
+ pwsh: true
+ script: |
+ try {
+ if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
+ $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
+
+ $BarId = $Content | Select -Index 0
+ $Channels = $Content | Select -Index 1
+ $IsStableBuild = $Content | Select -Index 2
+
+ $AzureDevOpsProject = $Env:System_TeamProject
+ $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
+ $AzureDevOpsBuildId = $Env:Build_BuildId
+ }
+ else {
+ $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
+
+ $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
+ $apiHeaders.Add('Accept', 'application/json')
+ $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
+
+ $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+
+ $BarId = $Env:BARBuildId
+ $Channels = $Env:PromoteToMaestroChannels -split ","
+ $Channels = $Channels -join "]["
+ $Channels = "[$Channels]"
+
+ $IsStableBuild = $buildInfo.stable
+ $AzureDevOpsProject = $buildInfo.azureDevOpsProject
+ $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
+ $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
+ }
+
+ Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
+ Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
+ Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+
+ Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
+ Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
+ Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ exit 1
+ }
+ env:
+ MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
--- /dev/null
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Triggering subscriptions
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
+ arguments: -SourceRepo $(Build.Repository.Uri)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
--- /dev/null
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Add Build to Channel
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
+ arguments: -BuildId $(BARBuildId)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroApiAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
--- /dev/null
+parameters:
+ disableComponentGovernance: false
+ componentGovernanceIgnoreDirectories: ''
+ is1ESPipeline: false
+
+steps:
+- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
+ - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
+ displayName: Set skipComponentGovernanceDetection variable
+- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
+ - task: ComponentGovernanceComponentDetection@0
+ continueOnError: true
+ inputs:
+ ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
\ No newline at end of file
--- /dev/null
+# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
+# variable with the base64-encoded SAS token, by default
+
+parameters:
+- name: federatedServiceConnection
+ type: string
+ default: 'dotnetbuilds-internal-read'
+- name: outputVariableName
+ type: string
+ default: 'dotnetbuilds-internal-container-read-token-base64'
+- name: expiryInHours
+ type: number
+ default: 1
+- name: base64Encode
+ type: boolean
+ default: true
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+steps:
+- ${{ if ne(variables['System.TeamProject'], 'public') }}:
+ - template: /eng/common/core-templates/steps/get-delegation-sas.yml
+ parameters:
+ federatedServiceConnection: ${{ parameters.federatedServiceConnection }}
+ outputVariableName: ${{ parameters.outputVariableName }}
+ expiryInHours: ${{ parameters.expiryInHours }}
+ base64Encode: ${{ parameters.base64Encode }}
+ storageAccount: dotnetbuilds
+ container: internal
+ permissions: rl
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
\ No newline at end of file
--- /dev/null
+parameters:
+# This is the Azure federated service connection that we log into to get an access token.
+- name: nugetFederatedServiceConnection
+ type: string
+ default: 'dnceng-artifacts-feeds-read'
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+steps:
+- ${{ if ne(variables['System.TeamProject'], 'public') }}:
+ # If running on dnceng (internal project), just use the default behavior for NuGetAuthenticate.
+ # If running on DevDiv, NuGetAuthenticate is not really an option. It's scoped to a single feed, and we have many feeds that
+ # may be added. Instead, we'll use the traditional approach (add cred to nuget.config), but use an account token.
+ - ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ - task: PowerShell@2
+ displayName: Setup Internal Feeds
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
+ arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config
+ - task: NuGetAuthenticate@1
+ - ${{ else }}:
+ - template: /eng/common/templates/steps/get-federated-access-token.yml
+ parameters:
+ federatedServiceConnection: ${{ parameters.nugetFederatedServiceConnection }}
+ outputVariableName: 'dnceng-artifacts-feeds-read-access-token'
+ - task: PowerShell@2
+ displayName: Setup Internal Feeds
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
+ arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $(dnceng-artifacts-feeds-read-access-token)
+ # This is required in certain scenarios to install the ADO credential provider.
+ # It installed by default in some msbuild invocations (e.g. VS msbuild), but needs to be installed for others
+ # (e.g. dotnet msbuild).
+ - task: NuGetAuthenticate@1
--- /dev/null
+# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
+# PackageName - The name of the package this SBOM represents.
+# PackageVersion - The version of the package this SBOM represents.
+# ManifestDirPath - The path of the directory where the generated manifest files will be placed
+# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
+
+parameters:
+ PackageVersion: 9.0.0
+ BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+ PackageName: '.NET'
+ ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
+ IgnoreDirectories: ''
+ sbomContinueOnError: true
+ is1ESPipeline: false
+ # disable publishArtifacts if some other step is publishing the artifacts (like job.yml).
+ publishArtifacts: true
+
+steps:
+- task: PowerShell@2
+ displayName: Prep for SBOM generation in (Non-linux)
+ condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
+ inputs:
+ filePath: ./eng/common/generate-sbom-prep.ps1
+ arguments: ${{parameters.manifestDirPath}}
+
+# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
+- script: |
+ chmod +x ./eng/common/generate-sbom-prep.sh
+ ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
+ displayName: Prep for SBOM generation in (Linux)
+ condition: eq(variables['Agent.Os'], 'Linux')
+ continueOnError: ${{ parameters.sbomContinueOnError }}
+
+- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
+ displayName: 'Generate SBOM manifest'
+ continueOnError: ${{ parameters.sbomContinueOnError }}
+ inputs:
+ PackageName: ${{ parameters.packageName }}
+ BuildDropPath: ${{ parameters.buildDropPath }}
+ PackageVersion: ${{ parameters.packageVersion }}
+ ManifestDirPath: ${{ parameters.manifestDirPath }}
+ ${{ if ne(parameters.IgnoreDirectories, '') }}:
+ AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
+
+- ${{ if eq(parameters.publishArtifacts, 'true')}}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish SBOM manifest
+ continueOnError: ${{parameters.sbomContinueOnError}}
+ targetPath: '${{ parameters.manifestDirPath }}'
+ artifactName: $(ARTIFACT_NAME)
+
--- /dev/null
+parameters:
+- name: federatedServiceConnection
+ type: string
+- name: outputVariableName
+ type: string
+- name: expiryInHours
+ type: number
+ default: 1
+- name: base64Encode
+ type: boolean
+ default: false
+- name: storageAccount
+ type: string
+- name: container
+ type: string
+- name: permissions
+ type: string
+ default: 'rl'
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+steps:
+- task: AzureCLI@2
+ displayName: 'Generate delegation SAS Token for ${{ parameters.storageAccount }}/${{ parameters.container }}'
+ inputs:
+ azureSubscription: ${{ parameters.federatedServiceConnection }}
+ scriptType: 'pscore'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ # Calculate the expiration of the SAS token and convert to UTC
+ $expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
+
+ $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
+
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Failed to generate SAS token."
+ exit 1
+ }
+
+ if ('${{ parameters.base64Encode }}' -eq 'true') {
+ $sas = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($sas))
+ }
+
+ Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
+ Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$sas"
--- /dev/null
+parameters:
+- name: federatedServiceConnection
+ type: string
+- name: outputVariableName
+ type: string
+# Resource to get a token for. Common values include:
+# - '499b84ac-1321-427f-aa17-267ca6975798' for Azure DevOps
+# - 'https://storage.azure.com/' for storage
+# Defaults to Azure DevOps
+- name: resource
+ type: string
+ default: '499b84ac-1321-427f-aa17-267ca6975798'
+
+steps:
+- task: AzureCLI@2
+ displayName: 'Getting federated access token for feeds'
+ inputs:
+ azureSubscription: ${{ parameters.federatedServiceConnection }}
+ scriptType: 'pscore'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ $accessToken = az account get-access-token --query accessToken --resource ${{ parameters.resource }} --output tsv
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Failed to get access token for resource '${{ parameters.resource }}'"
+ exit 1
+ }
+ Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value"
+ Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$accessToken"
\ No newline at end of file
--- /dev/null
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+- name: args
+ type: object
+ default: {}
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - template: /eng/common/templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ ${{ each parameter in parameters.args }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+- ${{ else }}:
+ - template: /eng/common/templates-official/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ ${{ each parameter in parameters.args }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
--- /dev/null
+parameters:
+ StageLabel: ''
+ JobLabel: ''
+ CustomSensitiveDataList: ''
+ # A default - in case value from eng/common/core-templates/post-build/common-variables.yml is not passed
+ BinlogToolVersion: '1.0.11'
+ is1ESPipeline: false
+
+steps:
+- task: Powershell@2
+ displayName: Prepare Binlogs to Upload
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ continueOnError: true
+ condition: always()
+
+- task: PowerShell@2
+ displayName: Redact Logs
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1
+ # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
+ # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ # If the file exists - sensitive data for redaction will be sourced from it
+ # (single entry per line, lines starting with '# ' are considered comments and skipped)
+ arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs'
+ -BinlogToolVersion ${{parameters.BinlogToolVersion}}
+ -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ '$(publishing-dnceng-devdiv-code-r-build-re)'
+ '$(MaestroAccessToken)'
+ '$(dn-bot-all-orgs-artifact-feeds-rw)'
+ '$(akams-client-id)'
+ '$(akams-client-secret)'
+ '$(microsoft-symbol-server-pat)'
+ '$(symweb-symbol-server-pat)'
+ '$(dn-bot-all-orgs-build-rw-code-rw)'
+ ${{parameters.CustomSensitiveDataList}}
+ continueOnError: true
+ condition: always()
+
+- task: CopyFiles@2
+ displayName: Gather post build logs
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/PostBuildLogs'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
+
+- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish Logs
+ pathToPublish: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
+ publishLocation: Container
+ artifactName: PostBuildLogs
+ continueOnError: true
+ condition: always()
--- /dev/null
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+- name: args
+ type: object
+ default: {}
+
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - template: /eng/common/templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+- ${{ else }}:
+ - template: /eng/common/templates-official/steps/publish-pipeline-artifacts.yml
+ parameters:
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
--- /dev/null
+parameters:
+ # Optional azure devops PAT with build execute permissions for the build's organization,
+ # only needed if the build that should be retained ran on a different organization than
+ # the pipeline where this template is executing from
+ Token: ''
+ # Optional BuildId to retain, defaults to the current running build
+ BuildId: ''
+ # Azure devops Organization URI for the build in the https://dev.azure.com/<organization> format.
+ # Defaults to the organization the current pipeline is running on
+ AzdoOrgUri: '$(System.CollectionUri)'
+ # Azure devops project for the build. Defaults to the project the current pipeline is running on
+ AzdoProject: '$(System.TeamProject)'
+
+steps:
+ - task: powershell@2
+ inputs:
+ targetType: 'filePath'
+ filePath: eng/common/retain-build.ps1
+ pwsh: true
+ arguments: >
+ -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
+ -AzdoProject ${{parameters.AzdoProject}}
+ -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
+ -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
+ displayName: Enable permanent build retention
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ BUILD_ID: $(Build.BuildId)
\ No newline at end of file
--- /dev/null
+# Please remember to update the documentation if you make changes to these parameters!
+parameters:
+ HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
+ HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
+ HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
+ HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
+ HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY
+ HelixProjectArguments: '' # optional -- arguments passed to the build command
+ HelixConfiguration: '' # optional -- additional property attached to a job
+ HelixPreCommands: '' # optional -- commands to run before Helix work item execution
+ HelixPostCommands: '' # optional -- commands to run after Helix work item execution
+ WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
+ WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
+ WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
+ CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
+ XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
+ XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
+ XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
+ XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
+ XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
+ IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
+ DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
+ IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
+ HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
+ Creator: '' # optional -- if the build is external, use this to specify who is sending the job
+ DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
+ condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
+ continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+
+steps:
+ - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
+ displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
--- /dev/null
+parameters:
+ # This template adds arcade-powered source-build to CI.
+
+ # This is a 'steps' template, and is intended for advanced scenarios where the existing build
+ # infra has a careful build methodology that must be followed. For example, a repo
+ # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
+ # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
+ # GitHub. Using this steps template leaves room for that infra to be included.
+
+ # Defines the platform on which to run the steps. See 'eng/common/core-templates/job/source-build.yml'
+ # for details. The entire object is described in the 'job' template for simplicity, even though
+ # the usage of the properties on this object is split between the 'job' and 'steps' templates.
+ platform: {}
+ is1ESPipeline: false
+
+steps:
+# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
+- script: |
+ set -x
+ df -h
+
+ # If file changes are detected, set CopyWipIntoInnerSourceBuildRepo to copy the WIP changes into the inner source build repo.
+ internalRestoreArgs=
+ if ! git diff --quiet; then
+ internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
+ # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
+ # This only works if there is a username/email configured, which won't be the case in most CI runs.
+ git config --get user.email
+ if [ $? -ne 0 ]; then
+ git config user.email dn-bot@microsoft.com
+ git config user.name dn-bot
+ fi
+ fi
+
+ # If building on the internal project, the internal storage variable may be available (usually only if needed)
+ # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
+ # in the default public locations.
+ internalRuntimeDownloadArgs=
+ if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
+ internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
+ fi
+
+ buildConfig=Release
+ # Check if AzDO substitutes in a build config from a variable, and use it if so.
+ if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
+ buildConfig='$(_BuildConfig)'
+ fi
+
+ officialBuildArgs=
+ if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
+ officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
+ fi
+
+ targetRidArgs=
+ if [ '${{ parameters.platform.targetRID }}' != '' ]; then
+ targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
+ fi
+
+ runtimeOsArgs=
+ if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
+ runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
+ fi
+
+ baseOsArgs=
+ if [ '${{ parameters.platform.baseOS }}' != '' ]; then
+ baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
+ fi
+
+ publishArgs=
+ if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
+ publishArgs='--publish'
+ fi
+
+ assetManifestFileName=SourceBuild_RidSpecific.xml
+ if [ '${{ parameters.platform.name }}' != '' ]; then
+ assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
+ fi
+
+ ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
+ --configuration $buildConfig \
+ --restore --build --pack $publishArgs -bl \
+ $officialBuildArgs \
+ $internalRuntimeDownloadArgs \
+ $internalRestoreArgs \
+ $targetRidArgs \
+ $runtimeOsArgs \
+ $baseOsArgs \
+ /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
+ /p:ArcadeBuildFromSource=true \
+ /p:DotNetBuildSourceOnly=true \
+ /p:DotNetBuildRepo=true \
+ /p:AssetManifestFileName=$assetManifestFileName
+ displayName: Build
+
+# Upload build logs for diagnosis.
+- task: CopyFiles@2
+ displayName: Prepare BuildLogs staging directory
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)'
+ Contents: |
+ **/*.log
+ **/*.binlog
+ artifacts/sb/prebuilt-report/**
+ TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
+ CleanTargetFolder: true
+ continueOnError: true
+ condition: succeededOrFailed()
+
+- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ displayName: Publish BuildLogs
+ targetPath: '$(Build.StagingDirectory)/BuildLogs'
+ artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
+ continueOnError: true
+ condition: succeededOrFailed()
+ sbomEnabled: false # we don't need SBOM for logs
+
+# Manually inject component detection so that we can ignore the source build upstream cache, which contains
+# a nupkg cache of input packages (a local feed).
+# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
+# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
+- task: ComponentGovernanceComponentDetection@0
+ displayName: Component Detection (Exclude upstream cache)
+ inputs:
+ ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'
--- /dev/null
+parameters:
+ is1ESPipeline: false
+
+variables:
+ - ${{ if eq(parameters.is1ESPipeline, 'true') }}:
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+ - ${{ else }}:
+ - template: /eng/common/templates/variables/pool-providers.yml
\ No newline at end of file
--- /dev/null
+deb http://raspbian.raspberrypi.org/raspbian/ bookworm main contrib non-free rpi
+deb-src http://raspbian.raspberrypi.org/raspbian/ bookworm main contrib non-free rpi
case $lowerI in
-\?|-h|--help)
usage
- exit 1
;;
arm)
__BuildArch=arm
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
;;
lldb*)
- version="${lowerI/lldb/}"
- parts=(${version//./ })
+ version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
+ majorVersion="${version%%.*}"
+
+ [ -z "${version##*.*}" ] && minorVersion="${version#*.}"
+ if [ -z "$minorVersion" ]; then
+ minorVersion=0
+ fi
# for versions > 6.0, lldb has dropped the minor version
- if [[ "${parts[0]}" -gt 6 ]]; then
- version="${parts[0]}"
+ if [ "$majorVersion" -le 6 ]; then
+ version="$majorVersion.$minorVersion"
+ else
+ version="$majorVersion"
fi
__LLDB_Package="liblldb-${version}-dev"
unset __LLDB_Package
;;
llvm*)
- version="${lowerI/llvm/}"
- parts=(${version//./ })
- __LLVM_MajorVersion="${parts[0]}"
- __LLVM_MinorVersion="${parts[1]}"
-
- # for versions > 6.0, llvm has dropped the minor version
- if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then
- __LLVM_MinorVersion=0;
+ version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
+ __LLVM_MajorVersion="${version%%.*}"
+
+ [ -z "${version##*.*}" ] && __LLVM_MinorVersion="${version#*.}"
+ if [ -z "$__LLVM_MinorVersion" ]; then
+ __LLVM_MinorVersion=0
+ fi
+
+ # for versions > 6.0, lldb has dropped the minor version
+ if [ "$__LLVM_MajorVersion" -gt 6 ]; then
+ __LLVM_MinorVersion=
fi
+
;;
xenial) # Ubuntu 16.04
if [[ "$__CodeName" != "jessie" ]]; then
bullseye) # Debian 11
__CodeName=bullseye
+ if [[ -z "$__UbuntuRepo" ]]; then
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ fi
+ ;;
+ bookworm) # Debian 12
+ __CodeName=bookworm
+
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
alpine*)
__CodeName=alpine
__UbuntuRepo=
- version="${lowerI/alpine/}"
- if [[ "$version" == "edge" ]]; then
+ if [[ "$lowerI" == "alpineedge" ]]; then
__AlpineVersion=edge
else
- parts=(${version//./ })
- __AlpineMajorVersion="${parts[0]}"
- __AlpineMinoVersion="${parts[1]}"
- __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinoVersion"
+ version="$(echo "$lowerI" | tr -d '[:alpha:]-=')"
+ __AlpineMajorVersion="${version%%.*}"
+ __AlpineMinorVersion="${version#*.}"
+ __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinorVersion"
fi
;;
freebsd13)
mkdir -p "$__RootfsDir"
__RootfsDir="$( cd "$__RootfsDir" && pwd )"
+__hasWget=
+ensureDownloadTool()
+{
+ if command -v wget &> /dev/null; then
+ __hasWget=1
+ elif command -v curl &> /dev/null; then
+ __hasWget=0
+ else
+ >&2 echo "ERROR: either wget or curl is required by this script."
+ exit 1
+ fi
+}
+
if [[ "$__CodeName" == "alpine" ]]; then
__ApkToolsVersion=2.12.11
__ApkToolsDir="$(mktemp -d)"
__ApkKeysDir="$(mktemp -d)"
-
arch="$(uname -m)"
- wget "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static" -P "$__ApkToolsDir"
+
+ ensureDownloadTool
+
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -P "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static"
+ else
+ curl -SLO --create-dirs --output-dir "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static"
+ fi
if [[ "$arch" == "x86_64" ]]; then
__ApkToolsSHA512SUM="53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33"
elif [[ "$arch" == "aarch64" ]]; then
__ApkToolsSHA512SUM="9e2b37ecb2b56c05dad23d379be84fd494c14bd730b620d0d576bda760588e1f2f59a7fcb2f2080577e0085f23a0ca8eadd993b4e61c2ab29549fdb71969afd0"
else
- echo "WARNING: add missing hash for your host architecture. To find the value, use: `find /tmp -name apk.static -exec sha512sum {} \;`"
+ echo "WARNING: add missing hash for your host architecture. To find the value, use: 'find /tmp -name apk.static -exec sha512sum {} \;'"
fi
echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c
chmod +x "$__ApkToolsDir/apk.static"
fi
# initialize DB
+ # shellcheck disable=SC2086
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add
if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then
+ # shellcheck disable=SC2086
__AlpinePackages+=" $("$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
fi
# install all packages in one go
+ # shellcheck disable=SC2086
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
elif [[ "$__CodeName" == "freebsd" ]]; then
mkdir -p "$__RootfsDir"/usr/local/etc
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
- wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
+
+ ensureDownloadTool
+
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
+ else
+ curl -SL "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
+ fi
echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf
echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
mkdir -p "$__RootfsDir"/tmp
# get and build package manager
- wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
+ else
+ curl -SL "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
+ fi
cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# needed for install to succeed
mkdir -p "$__RootfsDir"/host/etc
rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# install packages we need.
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update
+ # shellcheck disable=SC2086
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
elif [[ "$__CodeName" == "illumos" ]]; then
mkdir "$__RootfsDir/tmp"
pushd "$__RootfsDir/tmp"
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
+
+ ensureDownloadTool
+
echo "Downloading sysroot."
- wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
+ else
+ curl -SL https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
+ fi
echo "Building binutils. Please wait.."
- wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
+ else
+ curl -SL https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
+ fi
mkdir build-binutils && cd build-binutils
../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir"
make -j "$JOBS" && make install && cd ..
echo "Building gcc. Please wait.."
- wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O- https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
+ else
+ curl -SL https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
+ fi
CFLAGS="-fPIC"
CXXFLAGS="-fPIC"
CXXFLAGS_FOR_TARGET="-fPIC"
fi
BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All"
echo "Downloading manifest"
- wget "$BaseUrl"
+ if [[ "$__hasWget" == 1 ]]; then
+ wget "$BaseUrl"
+ else
+ curl -SLO "$BaseUrl"
+ fi
echo "Downloading dependencies."
read -ra array <<<"$__IllumosPackages"
for package in "${array[@]}"; do
# find last occurrence of package in listing and extract its name
package="$(sed -En '/.*href="('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)"
echo "Resolved name '$package'"
- wget "$BaseUrl"/"$package".tgz
+ if [[ "$__hasWget" == 1 ]]; then
+ wget "$BaseUrl"/"$package".tgz
+ else
+ curl -SLO "$BaseUrl"/"$package".tgz
+ fi
ar -x "$package".tgz
tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null
done
rm -rf "$__RootfsDir"/{tmp,+*}
mkdir -p "$__RootfsDir"/usr/include/net
mkdir -p "$__RootfsDir"/usr/include/netpacket
- wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
- wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
- wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
- wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
+ wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
+ wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
+ wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
+ else
+ curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
+ curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
+ curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
+ curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
+ fi
elif [[ "$__CodeName" == "haiku" ]]; then
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
mkdir "$__RootfsDir/tmp/download"
+ ensureDownloadTool
+
echo "Downloading Haiku package tool"
- git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 $__RootfsDir/tmp/script
- wget -O "$__RootfsDir/tmp/download/hosttools.zip" $($__RootfsDir/tmp/script/fetch.sh --hosttools)
+ git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 "$__RootfsDir/tmp/script"
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)"
+ else
+ curl -SLo "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)"
+ fi
+
unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin"
DepotBaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg"
echo "Downloading $package..."
# API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60
# The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598
- hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \
- --header='Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
- wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
+ if [[ "$__hasWget" == 1 ]]; then
+ hpkgDownloadUrl="$(wget -qO- --post-data '{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \
+ --header 'Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
+ wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
+ else
+ hpkgDownloadUrl="$(curl -sSL -XPOST --data '{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \
+ --header 'Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
+ curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
+ fi
done
for package in haiku haiku_devel; do
echo "Downloading $package..."
- hpkgVersion="$(wget -qO- $HpkgBaseUrl | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
- wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
+ if [[ "$__hasWget" == 1 ]]; then
+ hpkgVersion="$(wget -qO- "$HpkgBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
+ wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
+ else
+ hpkgVersion="$(curl -sSL "$HpkgBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
+ curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
+ fi
done
# Set up the sysroot
# Download buildtools
echo "Downloading Haiku buildtools"
- wget -O "$__RootfsDir/tmp/download/buildtools.zip" $($__RootfsDir/tmp/script/fetch.sh --buildtools --arch=$__HaikuArch)
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -O "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)"
+ else
+ curl -SLo "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)"
+ fi
unzip -o "$__RootfsDir/tmp/download/buildtools.zip" -d "$__RootfsDir"
# Cleaning up temporary files
__Keyring="$__Keyring --force-check-gpg"
fi
+ # shellcheck disable=SC2086
debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list"
chroot "$__RootfsDir" apt-get update
chroot "$__RootfsDir" apt-get -f -y install
+ # shellcheck disable=SC2086
chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages
chroot "$__RootfsDir" symlinks -cr /usr
chroot "$__RootfsDir" apt-get clean
ROOTFS_DIR="$__RootfsDir" "$__CrossDir/tizen-build-rootfs.sh" "$__BuildArch"
else
echo "Unsupported target platform."
- usage;
- exit 1
+ usage
fi
Log()
{
- if [ $VERBOSE -ge $1 ]; then
+ if [ $VERBOSE -ge 1 ]; then
echo ${@:2}
fi
}
endif()
endif()
+# Set C++ standard library options if specified
+set(CLR_CMAKE_CXX_STANDARD_LIBRARY "" CACHE STRING "Standard library flavor to link against. Only supported with the Clang compiler.")
+if (CLR_CMAKE_CXX_STANDARD_LIBRARY)
+ add_compile_options($<$<COMPILE_LANG_AND_ID:CXX,Clang>:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>)
+ add_link_options($<$<LINK_LANG_AND_ID:CXX,Clang>:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>)
+endif()
+
+option(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC "Statically link against the C++ standard library" OFF)
+if(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC)
+ add_link_options($<$<LINK_LANGUAGE:CXX>:-static-libstdc++>)
+endif()
+
+set(CLR_CMAKE_CXX_ABI_LIBRARY "" CACHE STRING "C++ ABI implementation library to link against. Only supported with the Clang compiler.")
+if (CLR_CMAKE_CXX_ABI_LIBRARY)
+ # The user may specify the ABI library with the 'lib' prefix, like 'libstdc++'. Strip the prefix here so the linker finds the right library.
+ string(REGEX REPLACE "^lib(.+)" "\\1" CLR_CMAKE_CXX_ABI_LIBRARY ${CLR_CMAKE_CXX_ABI_LIBRARY})
+ # We need to specify this as a linker-backend option as Clang will filter this option out when linking to libc++.
+ add_link_options("LINKER:-l${CLR_CMAKE_CXX_ABI_LIBRARY}")
+endif()
+
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
dotnetRoot="$dotnetRoot/$architecture"
fi
-InstallDotNet $dotnetRoot $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || {
+InstallDotNet "$dotnetRoot" $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || {
local exit_code=$?
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2
ExitWithExitCode $exit_code
<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. -->
<Project>
+ <PropertyGroup>
+ <ImportDirectoryBuildTargets>false</ImportDirectoryBuildTargets>
+ <ImportDirectoryPackagesProps>false</ImportDirectoryPackagesProps>
+ </PropertyGroup>
+
<Import Project="Sdk.props" Sdk="Microsoft.DotNet.Arcade.Sdk" />
</Project>
<PropertyGroup>
<TargetFramework>net472</TargetFramework>
- <ImportDirectoryBuildTargets>false</ImportDirectoryBuildTargets>
<AutomaticallyUseReferenceAssemblyPackages>false</AutomaticallyUseReferenceAssemblyPackages>
</PropertyGroup>
<ItemGroup>
#
# This file detects the C/C++ compiler and exports it to the CC/CXX environment variables
#
-# NOTE: some scripts source this file and rely on stdout being empty, make sure to not output anything here!
+# NOTE: some scripts source this file and rely on stdout being empty, make sure
+# to not output *anything* here, unless it is an error message that fails the
+# build.
if [ -z "$build_arch" ] || [ -z "$compiler" ]; then
echo "Usage..."
echo "$desired_version"
}
+__baseOS="$(uname)"
+set_compiler_version_from_CC() {
+ if [ "$__baseOS" = "Darwin" ]; then
+ # On Darwin, the versions from -version/-dumpversion refer to Xcode
+ # versions, not llvm versions, so we can't rely on them.
+ return
+ fi
+
+ version="$("$CC" -dumpversion)"
+ if [ -z "$version" ]; then
+ echo "Error: $CC -dumpversion didn't provide a version"
+ exit 1
+ fi
+
+ # gcc and clang often display 3 part versions. However, gcc can show only 1 part in some environments.
+ IFS=. read -r majorVersion minorVersion _ <<EOF
+$version
+EOF
+}
+
if [ -z "$CLR_CC" ]; then
# Set default versions
if [ -z "$majorVersion" ]; then
# note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
if [ "$compiler" = "clang" ]; then versions="18 17 16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5"
- elif [ "$compiler" = "gcc" ]; then versions="13 12 11 10 9 8 7 6 5 4.9"; fi
+ elif [ "$compiler" = "gcc" ]; then versions="14 13 12 11 10 9 8 7 6 5 4.9"; fi
for version in $versions; do
_major="${version%%.*}"
done
if [ -z "$majorVersion" ]; then
- if command -v "$compiler" > /dev/null; then
- if [ "$(uname)" != "Darwin" ]; then
- echo "Warning: Specific version of $compiler not found, falling back to use the one in PATH."
- fi
- CC="$(command -v "$compiler")"
- CXX="$(command -v "$cxxCompiler")"
- else
- echo "No usable version of $compiler found."
+ if ! command -v "$compiler" > /dev/null; then
+ echo "Error: No usable version of $compiler found."
exit 1
fi
+
+ CC="$(command -v "$compiler" 2> /dev/null)"
+ CXX="$(command -v "$cxxCompiler" 2> /dev/null)"
+ set_compiler_version_from_CC
else
- if [ "$compiler" = "clang" ] && [ "$majorVersion" -lt 5 ]; then
- if [ "$build_arch" = "arm" ] || [ "$build_arch" = "armel" ]; then
- if command -v "$compiler" > /dev/null; then
- echo "Warning: Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH."
- CC="$(command -v "$compiler")"
- CXX="$(command -v "$cxxCompiler")"
- else
- echo "Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH."
- exit 1
- fi
+ if [ "$compiler" = "clang" ] && [ "$majorVersion" -lt 5 ] && { [ "$build_arch" = "arm" ] || [ "$build_arch" = "armel" ]; }; then
+ # If a major version was provided explicitly, and it was too old, find a newer compiler instead
+ if ! command -v "$compiler" > /dev/null; then
+ echo "Error: Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH."
+ exit 1
fi
+
+ CC="$(command -v "$compiler" 2> /dev/null)"
+ CXX="$(command -v "$cxxCompiler" 2> /dev/null)"
+ set_compiler_version_from_CC
fi
fi
else
desired_version="$(check_version_exists "$majorVersion" "$minorVersion")"
if [ "$desired_version" = "-1" ]; then
- echo "Could not find specific version of $compiler: $majorVersion $minorVersion."
+ echo "Error: Could not find specific version of $compiler: $majorVersion $minorVersion."
exit 1
fi
fi
if [ -z "$CC" ]; then
- CC="$(command -v "$compiler$desired_version")"
- CXX="$(command -v "$cxxCompiler$desired_version")"
- if [ -z "$CXX" ]; then CXX="$(command -v "$cxxCompiler")"; fi
+ CC="$(command -v "$compiler$desired_version" 2> /dev/null)"
+ CXX="$(command -v "$cxxCompiler$desired_version" 2> /dev/null)"
+ if [ -z "$CXX" ]; then CXX="$(command -v "$cxxCompiler" 2> /dev/null)"; fi
+ set_compiler_version_from_CC
fi
else
if [ ! -f "$CLR_CC" ]; then
- echo "CLR_CC is set but path '$CLR_CC' does not exist"
+ echo "Error: CLR_CC is set but path '$CLR_CC' does not exist"
exit 1
fi
CC="$CLR_CC"
CXX="$CLR_CXX"
+ set_compiler_version_from_CC
fi
if [ -z "$CC" ]; then
- echo "Unable to find $compiler."
+ echo "Error: Unable to find $compiler."
exit 1
fi
-# Only lld version >= 9 can be considered stable. lld supports s390x starting from 18.0.
-if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && ([ "$build_arch" != "s390x" ] || [ "$majorVersion" -ge 18 ]); then
- if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then
- LDFLAGS="-fuse-ld=lld"
+if [ "$__baseOS" != "Darwin" ]; then
+ # On Darwin, we always want to use the Apple linker.
+
+ # Only lld version >= 9 can be considered stable. lld supports s390x starting from 18.0.
+ if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && { [ "$build_arch" != "s390x" ] || [ "$majorVersion" -ge 18 ]; }; then
+ if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then
+ LDFLAGS="-fuse-ld=lld"
+ fi
fi
fi
-SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")"
+SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version" 2> /dev/null)"
export CC CXX LDFLAGS SCAN_BUILD_COMMAND
# shellcheck disable=SC1091
if [ -e "${rootfsDir}/etc/os-release" ]; then
. "${rootfsDir}/etc/os-release"
- if [ "${ID}" = "rhel" ] || [ "${ID}" = "rocky" ] || [ "${ID}" = "alpine" ] || [ "${ID}" = "ol" ]; then
- VERSION_ID="${VERSION_ID%.*}" # Remove the last version digit for these distros
- fi
-
if echo "${VERSION_ID:-}" | grep -qE '^([[:digit:]]|\.)+$'; then
nonPortableRid="${ID}.${VERSION_ID}-${targetArch}"
else
nonPortableRid="android.$__android_sdk_version-${targetArch}"
elif [ "$targetOs" = "illumos" ]; then
__uname_version=$(uname -v)
- case "$__uname_version" in
- omnios-*)
- __omnios_major_version=$(echo "$__uname_version" | cut -c9-10)
- nonPortableRid="omnios.$__omnios_major_version-${targetArch}"
- ;;
- joyent_*)
- __smartos_major_version=$(echo "$__uname_version" | cut -c9-10)
- nonPortableRid="smartos.$__smartos_major_version-${targetArch}"
- ;;
- *)
- nonPortableRid="illumos-${targetArch}"
- ;;
- esac
+ nonPortableRid="illumos-${targetArch}"
elif [ "$targetOs" = "solaris" ]; then
__uname_version=$(uname -v)
__solaris_major_version=$(echo "$__uname_version" | cut -d'.' -f1)
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
}
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
- $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.8.5" -MemberType NoteProperty
+ $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.10.0-pre.4.0" -MemberType NoteProperty
}
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
--- /dev/null
+# Overview
+
+Arcade provides templates for public (`/templates`) and 1ES pipeline templates (`/templates-official`) scenarios. Pipelines which are required to be managed by 1ES pipeline templates should reference `/templates-offical`, all other pipelines may reference `/templates`.
+
+## How to use
+
+Basic guidance is:
+
+- 1ES Pipeline Template or 1ES Microbuild template runs should reference `eng/common/templates-official`. Any internal production-graded pipeline should use these templates.
+
+- All other runs should reference `eng/common/templates`.
+
+See [azure-pipelines.yml](../../azure-pipelines.yml) (templates-official example) or [azure-pipelines-pr.yml](../../azure-pipelines-pr.yml) (templates example) for examples.
+
+#### The `templateIs1ESManaged` parameter
+
+The `templateIs1ESManaged` is available on most templates and affects which of the variants is used for nested templates. See [Development Notes](#development-notes) below for more information on the `templateIs1ESManaged1 parameter.
+
+- For templates under `job/`, `jobs/`, `steps`, or `post-build/`, this parameter must be explicitly set.
+
+## Multiple outputs
+
+1ES pipeline templates impose a policy where every publish artifact execution results in additional security scans being injected into your pipeline. When using `templates-official/jobs/jobs.yml`, Arcade reduces the number of additional security injections by gathering all publishing outputs into the [Build.ArtifactStagingDirectory](https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services), and utilizing the [outputParentDirectory](https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs#multiple-outputs) feature of 1ES pipeline templates. When implementing your pipeline, if you ensure publish artifacts are located in the `$(Build.ArtifactStagingDirectory)`, and utilize the 1ES provided template context, then you can reduce the number of security scans for your pipeline.
+
+Example:
+``` yaml
+# azure-pipelines.yml
+extends:
+ template: azure-pipelines/MicroBuild.1ES.Official.yml@MicroBuildTemplate
+ parameters:
+ stages:
+ - stage: build
+ jobs:
+ - template: /eng/common/templates-official/jobs/jobs.yml@self
+ parameters:
+ # 1ES makes use of outputs to reduce security task injection overhead
+ templateContext:
+ outputs:
+ - output: pipelineArtifact
+ displayName: 'Publish logs from source'
+ continueOnError: true
+ condition: always()
+ targetPath: $(Build.ArtifactStagingDirectory)/artifacts/log
+ artifactName: Logs
+ jobs:
+ - job: Windows
+ steps:
+ - script: echo "friendly neighborhood" > artifacts/marvel/spiderman.txt
+ # copy build outputs to artifact staging directory for publishing
+ - task: CopyFiles@2
+ displayName: Gather build output
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)/artifacts/marvel'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel'
+```
+
+Note: Multiple outputs are ONLY applicable to 1ES PT publishing (only usable when referencing `templates-official`).
+
+# Development notes
+
+**Folder / file structure**
+
+``` text
+eng\common\
+ [templates || templates-official]\
+ job\
+ job.yml (shim + artifact publishing logic)
+ onelocbuild.yml (shim)
+ publish-build-assets.yml (shim)
+ source-build.yml (shim)
+ source-index-stage1.yml (shim)
+ jobs\
+ codeql-build.yml (shim)
+ jobs.yml (shim)
+ source-build.yml (shim)
+ post-build\
+ post-build.yml (shim)
+ trigger-subscription.yml (shim)
+ common-variabls.yml (shim)
+ setup-maestro-vars.yml (shim)
+ steps\
+ publish-build-artifacts.yml (logic)
+ publish-pipeline-artifacts.yml (logic)
+ add-build-channel.yml (shim)
+ component-governance.yml (shim)
+ generate-sbom.yml (shim)
+ publish-logs.yml (shim)
+ retain-build.yml (shim)
+ send-to-helix.yml (shim)
+ source-build.yml (shim)
+ variables\
+ pool-providers.yml (logic + redirect) # templates/variables/pool-providers.yml will redirect to templates-official/variables/pool-providers.yml if you are running in the internal project
+ sdl-variables.yml (logic)
+ core-templates\
+ job\
+ job.yml (logic)
+ onelocbuild.yml (logic)
+ publish-build-assets.yml (logic)
+ source-build.yml (logic)
+ source-index-stage1.yml (logic)
+ jobs\
+ codeql-build.yml (logic)
+ jobs.yml (logic)
+ source-build.yml (logic)
+ post-build\
+ common-variabls.yml (logic)
+ post-build.yml (logic)
+ setup-maestro-vars.yml (logic)
+ trigger-subscription.yml (logic)
+ steps\
+ add-build-to-channel.yml (logic)
+ component-governance.yml (logic)
+ generate-sbom.yml (logic)
+ publish-build-artifacts.yml (redirect)
+ publish-logs.yml (logic)
+ publish-pipeline-artifacts.yml (redirect)
+ retain-build.yml (logic)
+ send-to-helix.yml (logic)
+ source-build.yml (logic)
+ variables\
+ pool-providers.yml (redirect)
+```
+
+In the table above, a file is designated as "shim", "logic", or "redirect".
+
+- shim - represents a yaml file which is an intermediate step between pipeline logic and .Net Core Engineering's templates (`core-templates`) and defines the `is1ESPipeline` parameter value.
+
+- logic - represents actual base template logic.
+
+- redirect- represents a file in `core-templates` which redirects to the "logic" file in either `templates` or `templates-official`.
+
+Logic for Arcade's templates live **primarily** in the `core-templates` folder. The exceptions to the location of the logic files are around artifact publishing, which is handled differently between 1es pipeline templates and standard templates. `templates` and `templates-official` provide shim entry points which redirect to `core-templates` while also defining the `is1ESPipeline` parameter. If a shim is referenced in `templates`, then `is1ESPipeline` is set to `false`. If a shim is referenced in `templates-official`, then `is1ESPipeline` is set to `true`.
+
+Within `templates` and `templates-official`, the templates at the "stages", and "jobs" / "job" level have been replaced with shims. Templates at the "steps" and "variables" level are typically too granular to be replaced with shims and instead persist logic which is directly applicable to either scenario.
+
+Within `core-templates`, there are a handful of places where logic is dependent on which shim entry point was used. In those places, we redirect back to the respective logic file in `templates` or `templates-official`.
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
-parameters:
-# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- cancelTimeoutInMinutes: ''
- condition: ''
- container: ''
- continueOnError: false
- dependsOn: ''
- displayName: ''
- pool: ''
- steps: []
- strategy: ''
- timeoutInMinutes: ''
- variables: []
- workspace: ''
- templateContext: ''
-
-# Job base template specific parameters
- # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
- artifacts: ''
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishBuildAssets: false
- enablePublishTestResults: false
- enablePublishUsingPipelines: false
- enableBuildRetry: false
- disableComponentGovernance: ''
- componentGovernanceIgnoreDirectories: ''
- mergeTestResults: false
- testRunTitle: ''
- testResultsFormat: ''
- name: ''
- preSteps: []
- runAsPublic: false
-# Sbom related params
- enableSbom: true
- PackageVersion: 7.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
-
jobs:
-- job: ${{ parameters.name }}
-
- ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
- cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
-
- ${{ if ne(parameters.condition, '') }}:
- condition: ${{ parameters.condition }}
-
- ${{ if ne(parameters.container, '') }}:
- container: ${{ parameters.container }}
-
- ${{ if ne(parameters.continueOnError, '') }}:
- continueOnError: ${{ parameters.continueOnError }}
-
- ${{ if ne(parameters.dependsOn, '') }}:
- dependsOn: ${{ parameters.dependsOn }}
-
- ${{ if ne(parameters.displayName, '') }}:
- displayName: ${{ parameters.displayName }}
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
-
- ${{ if ne(parameters.strategy, '') }}:
- strategy: ${{ parameters.strategy }}
-
- ${{ if ne(parameters.timeoutInMinutes, '') }}:
- timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
-
- ${{ if ne(parameters.templateContext, '') }}:
- templateContext: ${{ parameters.templateContext }}
-
- variables:
- - ${{ if ne(parameters.enableTelemetry, 'false') }}:
- - name: DOTNET_CLI_TELEMETRY_PROFILE
- value: '$(Build.Repository.Uri)'
- - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- - name: EnableRichCodeNavigation
- value: 'true'
- # Retry signature validation up to three times, waiting 2 seconds between attempts.
- # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
- value: 3,2000
- - ${{ each variable in parameters.variables }}:
- # handle name-value variable syntax
- # example:
- # - name: [key]
- # value: [value]
- - ${{ if ne(variable.name, '') }}:
- - name: ${{ variable.name }}
- value: ${{ variable.value }}
-
- # handle variable groups
- - ${{ if ne(variable.group, '') }}:
- - group: ${{ variable.group }}
-
- # handle template variable syntax
- # example:
- # - template: path/to/template.yml
- # parameters:
- # [key]: [value]
- - ${{ if ne(variable.template, '') }}:
- - template: ${{ variable.template }}
- ${{ if ne(variable.parameters, '') }}:
- parameters: ${{ variable.parameters }}
-
- # handle key-value variable syntax.
- # example:
- # - [key]: [value]
- - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
- - ${{ each pair in variable }}:
- - name: ${{ pair.key }}
- value: ${{ pair.value }}
-
- # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
- - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: DotNet-HelixApi-Access
-
- ${{ if ne(parameters.workspace, '') }}:
- workspace: ${{ parameters.workspace }}
-
- steps:
- - ${{ if ne(parameters.preSteps, '') }}:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - task: MicroBuildSigningPlugin@4
- displayName: Install MicroBuild plugin
- inputs:
- signType: $(_SignType)
- zipSources: false
- feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
- env:
- TeamName: $(_TeamName)
- MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)'
- continueOnError: ${{ parameters.continueOnError }}
- condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- - task: NuGetAuthenticate@1
-
- - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- - task: DownloadPipelineArtifact@2
- inputs:
- buildType: current
- artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
- targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
- itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
-
- - ${{ each step in parameters.steps }}:
- - ${{ step }}
-
- - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- - task: RichCodeNavIndexer@0
- displayName: RichCodeNav Upload
- inputs:
- languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
- environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
- richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
- uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
- continueOnError: true
-
- - template: /eng/common/templates-official/steps/component-governance.yml
- parameters:
- ${{ if eq(parameters.disableComponentGovernance, '') }}:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
- disableComponentGovernance: false
- ${{ else }}:
- disableComponentGovernance: true
- ${{ else }}:
- disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
- componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: MicroBuildCleanup@1
- displayName: Execute Microbuild cleanup tasks
- condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- env:
- TeamName: $(_TeamName)
-
- - ${{ if ne(parameters.artifacts.publish, '') }}:
- - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- - task: CopyFiles@2
- displayName: Gather binaries for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/bin'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
- - task: CopyFiles@2
- displayName: Gather packages for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/packages'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish pipeline artifacts
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
- continueOnError: true
- condition: always()
- - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- - task: 1ES.PublishPipelineArtifact@1
- inputs:
- targetPath: 'artifacts/log'
- artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
- displayName: 'Publish logs'
- continueOnError: true
- condition: always()
-
- - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
- continueOnError: true
- condition: always()
-
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
- - task: PublishTestResults@2
- displayName: Publish XUnit Test Results
- inputs:
- testResultsFormat: 'xUnit'
- testResultsFiles: '*.xml'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
- - task: PublishTestResults@2
- displayName: Publish TRX Test Results
- inputs:
- testResultsFormat: 'VSTest'
- testResultsFiles: '*.trx'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- - template: /eng/common/templates-official/steps/generate-sbom.yml
- parameters:
- PackageVersion: ${{ parameters.packageVersion}}
- BuildDropPath: ${{ parameters.buildDropPath }}
- IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- - task: 1ES.PublishPipelineArtifact@1
- inputs:
- targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration'
- artifactName: 'BuildConfiguration'
- displayName: 'Publish build retry configuration'
- continueOnError: true
\ No newline at end of file
+- template: /eng/common/core-templates/job/job.yml
+ parameters:
+ is1ESPipeline: true
+
+ # publish artifacts
+ # for 1ES managed templates, use the templateContext.output to handle multiple outputs.
+ templateContext:
+ outputParentDirectory: $(Build.ArtifactStagingDirectory)
+ outputs:
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - output: buildArtifacts
+ displayName: Publish pipeline artifacts
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
+ ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ condition: always()
+ continueOnError: true
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - output: pipelineArtifact
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)_Attempt$(System.JobAttempt)') }}
+ displayName: 'Publish logs'
+ continueOnError: true
+ condition: always()
+ sbomEnabled: false # we don't need SBOM for logs
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}:
+ - output: buildArtifacts
+ displayName: Publish Logs
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ publishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ continueOnError: true
+ condition: always()
+ sbomEnabled: false # we don't need SBOM for logs
+
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - output: pipelineArtifact
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/eng/common/BuildConfiguration'
+ artifactName: 'BuildConfiguration'
+ displayName: 'Publish build retry configuration'
+ continueOnError: true
+ sbomEnabled: false # we don't need SBOM for BuildConfiguration
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
+ - output: pipelineArtifact
+ displayName: Publish SBOM manifest
+ continueOnError: true
+ targetPath: $(Build.ArtifactStagingDirectory)/sbom
+ artifactName: $(ARTIFACT_NAME)
+
+ # add any outputs provided via root yaml
+ - ${{ if ne(parameters.templateContext.outputs, '') }}:
+ - ${{ each output in parameters.templateContext.outputs }}:
+ - ${{ output }}
+
+ # add any remaining templateContext properties
+ ${{ each context in parameters.templateContext }}:
+ ${{ if and(ne(context.key, 'outputParentDirectory'), ne(context.key, 'outputs')) }}:
+ ${{ context.key }}: ${{ context.value }}
+
+ ${{ each parameter in parameters }}:
+ ${{ if and(ne(parameter.key, 'templateContext'), ne(parameter.key, 'is1ESPipeline')) }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: ''
-
- CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
- GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
-
- SourcesDirectory: $(Build.SourcesDirectory)
- CreatePr: true
- AutoCompletePr: false
- ReusePr: true
- UseLfLineEndings: true
- UseCheckedInLocProjectJson: false
- SkipLocProjectJsonGeneration: false
- LanguageSet: VS_Main_Languages
- LclSource: lclFilesInRepo
- LclPackageId: ''
- RepoType: gitHub
- GitHubOrg: dotnet
- MirrorRepo: ''
- MirrorBranch: main
- condition: ''
- JobNameSuffix: ''
-
jobs:
-- job: OneLocBuild${{ parameters.JobNameSuffix }}
-
- dependsOn: ${{ parameters.dependsOn }}
-
- displayName: OneLocBuild${{ parameters.JobNameSuffix }}
-
- variables:
- - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
- - name: _GenerateLocProjectArguments
- value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
- -LanguageSet "${{ parameters.LanguageSet }}"
- -CreateNeutralXlfs
- - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
- - name: _GenerateLocProjectArguments
- value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
- - template: /eng/common/templates-official/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022-pt
- os: windows
-
- steps:
- - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- - task: Powershell@2
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
- arguments: $(_GenerateLocProjectArguments)
- displayName: Generate LocProject.json
- condition: ${{ parameters.condition }}
-
- - task: OneLocBuild@2
- displayName: OneLocBuild
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- inputs:
- locProj: eng/Localize/LocProject.json
- outDir: $(Build.ArtifactStagingDirectory)
- lclSource: ${{ parameters.LclSource }}
- lclPackageId: ${{ parameters.LclPackageId }}
- isCreatePrSelected: ${{ parameters.CreatePr }}
- isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
- ${{ if eq(parameters.CreatePr, true) }}:
- isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- isShouldReusePrSelected: ${{ parameters.ReusePr }}
- packageSourceAuth: patAuth
- patVariable: ${{ parameters.CeapexPat }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- repoType: ${{ parameters.RepoType }}
- gitHubPatVariable: "${{ parameters.GithubPat }}"
- ${{ if ne(parameters.MirrorRepo, '') }}:
- isMirrorRepoSelected: true
- gitHubOrganization: ${{ parameters.GitHubOrg }}
- mirrorRepo: ${{ parameters.MirrorRepo }}
- mirrorBranch: ${{ parameters.MirrorBranch }}
- condition: ${{ parameters.condition }}
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Localization Files
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
+- template: /eng/common/core-templates/job/onelocbuild.yml
+ parameters:
+ is1ESPipeline: true
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish LocProject.json
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
\ No newline at end of file
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- configuration: 'Debug'
-
- # Optional: condition for the job to run
- condition: ''
-
- # Optional: 'true' if future jobs should run even if this job fails
- continueOnError: false
-
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: {}
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishUsingPipelines: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishAssetsImmediately: false
-
- artifactsPublishingAdditionalParameters: ''
-
- signingValidationAdditionalParameters: ''
-
jobs:
-- job: Asset_Registry_Publish
-
- dependsOn: ${{ parameters.dependsOn }}
- timeoutInMinutes: 150
-
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- displayName: Publish Assets
- ${{ else }}:
- displayName: Publish to Build Asset Registry
-
- variables:
- - template: /eng/common/templates-official/variables/pool-providers.yml
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: Publish-Build-Assets
- - group: AzureDevOps-Artifact-Feeds-Pats
- - name: runCodesignValidationInjection
- value: false
- # unconditional - needed for logs publishing (redactor tool version)
- - template: /eng/common/templates-official/post-build/common-variables.yml
-
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: NetCore1ESPool-Publishing-Internal
- image: windows.vs2019.amd64
- os: windows
- steps:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - checkout: self
- fetchDepth: 3
- clean: true
-
- - task: DownloadBuildArtifacts@0
- displayName: Download artifact
- inputs:
- artifactName: AssetManifests
- downloadPath: '$(Build.StagingDirectory)/Download'
- checkDownloadedFiles: true
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: NuGetAuthenticate@1
-
- - task: PowerShell@2
- displayName: Publish Build Assets
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
- /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
- /p:BuildAssetRegistryToken=$(MaestroAccessToken)
- /p:MaestroApiEndpoint=https://maestro.dot.net
- /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
- /p:OfficialBuildId=$(Build.BuildNumber)
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: powershell@2
- displayName: Create ReleaseConfigs Artifact
- inputs:
- targetType: inline
- script: |
- New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force
- $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt"
- Add-Content -Path $filePath -Value $(BARBuildId)
- Add-Content -Path $filePath -Value "$(DefaultChannels)"
- Add-Content -Path $filePath -Value $(IsStableBuild)
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish ReleaseConfigs Artifact
- inputs:
- PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - task: powershell@2
- displayName: Check if SymbolPublishingExclusionsFile.txt exists
- inputs:
- targetType: inline
- script: |
- $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
- if(Test-Path -Path $symbolExclusionfile)
- {
- Write-Host "SymbolExclusionFile exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
- }
- else{
- Write-Host "Symbols Exclusion file does not exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
- }
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish SymbolPublishingExclusionsFile Artifact
- condition: eq(variables['SymbolExclusionFile'], 'true')
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates-official/post-build/setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: PowerShell@2
- displayName: Publish Using Darc
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion 3
- -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
- -MaestroToken '$(MaestroApiAccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+- template: /eng/common/core-templates/job/publish-build-assets.yml
+ parameters:
+ is1ESPipeline: true
- - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- - template: /eng/common/templates-official/steps/publish-logs.yml
- parameters:
- JobLabel: 'Publish_Artifacts_Logs'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- # This template adds arcade-powered source-build to CI. The template produces a server job with a
- # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
-
- # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
- jobNamePrefix: 'Source_Build'
-
- # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
- # managed-only repositories. This is an object with these properties:
- #
- # name: ''
- # The name of the job. This is included in the job ID.
- # targetRID: ''
- # The name of the target RID to use, instead of the one auto-detected by Arcade.
- # nonPortable: false
- # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
- # linux-x64), and compiling against distro-provided packages rather than portable ones.
- # skipPublishValidation: false
- # Disables publishing validation. By default, a check is performed to ensure no packages are
- # published by source-build.
- # container: ''
- # A container to use. Runs in docker.
- # pool: {}
- # A pool to use. Runs directly on an agent.
- # buildScript: ''
- # Specifies the build script to invoke to perform the build in the repo. The default
- # './build.sh' should work for typical Arcade repositories, but this is customizable for
- # difficult situations.
- # jobProperties: {}
- # A list of job properties to inject at the top level, for potential extensibility beyond
- # container and pool.
- platform: {}
-
jobs:
-- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
- displayName: Source-Build (${{ parameters.platform.name }})
-
- ${{ each property in parameters.platform.jobProperties }}:
- ${{ property.key }}: ${{ property.value }}
-
- ${{ if ne(parameters.platform.container, '') }}:
- container: ${{ parameters.platform.container }}
-
- ${{ if eq(parameters.platform.pool, '') }}:
- # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
- # source-build builds run in Docker, including the default managed platform.
- # /eng/common/templates-official/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
- demands: ImageOverride -equals build.ubuntu.1804.amd64
-
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
- image: 1es-mariner-2-pt
- os: linux
-
- ${{ if ne(parameters.platform.pool, '') }}:
- pool: ${{ parameters.platform.pool }}
-
- workspace:
- clean: all
+- template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: true
- steps:
- - template: /eng/common/templates-official/steps/source-build.yml
- parameters:
- platform: ${{ parameters.platform }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- runAsPublic: false
- sourceIndexPackageVersion: 1.0.1-20240129.2
- sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
- sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
- preSteps: []
- binlogPath: artifacts/log/Debug/Build.binlog
- condition: ''
- dependsOn: ''
- pool: ''
-
jobs:
-- job: SourceIndexStage1
- dependsOn: ${{ parameters.dependsOn }}
- condition: ${{ parameters.condition }}
- variables:
- - name: SourceIndexPackageVersion
- value: ${{ parameters.sourceIndexPackageVersion }}
- - name: SourceIndexPackageSource
- value: ${{ parameters.sourceIndexPackageSource }}
- - name: BinlogPath
- value: ${{ parameters.binlogPath }}
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: source-dot-net stage1 variables
- - template: /eng/common/templates-official/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $(DncEngPublicBuildPool)
- image: windows.vs2022.amd64.open
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $(DncEngInternalBuildPool)
- image: windows.vs2022.amd64
-
- steps:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - task: UseDotNet@2
- displayName: Use .NET 8 SDK
- inputs:
- packageType: sdk
- version: 8.0.x
- installationPath: $(Agent.TempDirectory)/dotnet
- workingDirectory: $(Agent.TempDirectory)
-
- - script: |
- $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- displayName: Download Tools
- # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
- workingDirectory: $(Agent.TempDirectory)
-
- - script: ${{ parameters.sourceIndexBuildCommand }}
- displayName: Build Repository
-
- - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
- displayName: Process Binlog into indexable sln
+- template: /eng/common/core-templates/job/source-index-stage1.yml
+ parameters:
+ is1ESPipeline: true
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
- displayName: Upload stage1 artifacts to source index
- env:
- BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
-
jobs:
-- template: /eng/common/templates-official/jobs/jobs.yml
+- template: /eng/common/core-templates/jobs/codeql-build.yml
parameters:
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishTestResults: false
- enablePublishBuildAssets: false
- enablePublishUsingPipelines: false
- enableTelemetry: true
+ is1ESPipeline: true
- variables:
- - group: Publish-Build-Assets
- # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
- # sync with the packages.config file.
- - name: DefaultGuardianVersion
- value: 0.109.0
- - name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
- - name: GuardianVersion
- value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
-
- jobs: ${{ parameters.jobs }}
-
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: Enable publishing using release pipelines
- enablePublishUsingPipelines: false
-
- # Optional: Enable running the source-build jobs to build repo from source
- enableSourceBuild: false
-
- # Optional: Parameters for source-build template.
- # See /eng/common/templates-official/jobs/source-build.yml for options
- sourceBuildParameters: []
-
- graphFileGeneration:
- # Optional: Enable generating the graph files at the end of the build
- enabled: false
- # Optional: Include toolset dependencies in the generated graph files
- includeToolset: false
-
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
-
- # Optional: Override automatically derived dependsOn value for "publish build assets" job
- publishBuildAssetsDependsOn: ''
-
- # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
- publishAssetsImmediately: false
-
- # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
- artifactsPublishingAdditionalParameters: ''
- signingValidationAdditionalParameters: ''
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- enableSourceIndex: false
- sourceIndexParams: {}
-
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
jobs:
-- ${{ each job in parameters.jobs }}:
- - template: ../job/job.yml
- parameters:
- # pass along parameters
- ${{ each parameter in parameters }}:
- ${{ if ne(parameter.key, 'jobs') }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
- # pass along job properties
- ${{ each property in job }}:
- ${{ if ne(property.key, 'job') }}:
- ${{ property.key }}: ${{ property.value }}
-
- name: ${{ job.job }}
-
-- ${{ if eq(parameters.enableSourceBuild, true) }}:
- - template: /eng/common/templates-official/jobs/source-build.yml
- parameters:
- allCompletedJobId: Source_Build_Complete
- ${{ each parameter in parameters.sourceBuildParameters }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
- - template: ../job/source-index-stage1.yml
- parameters:
- runAsPublic: ${{ parameters.runAsPublic }}
- ${{ each parameter in parameters.sourceIndexParams }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- - template: ../job/publish-build-assets.yml
- parameters:
- continueOnError: ${{ parameters.continueOnError }}
- dependsOn:
- - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.jobs }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.enableSourceBuild, true) }}:
- - Source_Build_Complete
+- template: /eng/common/core-templates/jobs/jobs.yml
+ parameters:
+ is1ESPipeline: true
- runAsPublic: ${{ parameters.runAsPublic }}
- publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
- publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
- enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- # This template adds arcade-powered source-build to CI. A job is created for each platform, as
- # well as an optional server job that completes when all platform jobs complete.
-
- # The name of the "join" job for all source-build platforms. If set to empty string, the job is
- # not included. Existing repo pipelines can use this job depend on all source-build jobs
- # completing without maintaining a separate list of every single job ID: just depend on this one
- # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
- allCompletedJobId: ''
-
- # See /eng/common/templates-official/job/source-build.yml
- jobNamePrefix: 'Source_Build'
-
- # This is the default platform provided by Arcade, intended for use by a managed-only repo.
- defaultManagedPlatform:
- name: 'Managed'
- container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8'
-
- # Defines the platforms on which to run build jobs. One job is created for each platform, and the
- # object in this array is sent to the job template as 'platform'. If no platforms are specified,
- # one job runs on 'defaultManagedPlatform'.
- platforms: []
-
jobs:
+- template: /eng/common/core-templates/jobs/source-build.yml
+ parameters:
+ is1ESPipeline: true
-- ${{ if ne(parameters.allCompletedJobId, '') }}:
- - job: ${{ parameters.allCompletedJobId }}
- displayName: Source-Build Complete
- pool: server
- dependsOn:
- - ${{ each platform in parameters.platforms }}:
- - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- - ${{ if eq(length(parameters.platforms), 0) }}:
- - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
-
-- ${{ each platform in parameters.platforms }}:
- - template: /eng/common/templates-official/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ platform }}
-
-- ${{ if eq(length(parameters.platforms), 0) }}:
- - template: /eng/common/templates-official/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ parameters.defaultManagedPlatform }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
variables:
- - group: Publish-Build-Assets
+- template: /eng/common/core-templates/post-build/common-variables.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: true
- # Whether the build is internal or not
- - name: IsInternalBuild
- value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
-
- # Default Maestro++ API Endpoint and API Version
- - name: MaestroApiEndPoint
- value: "https://maestro.dot.net"
- - name: MaestroApiAccessToken
- value: $(MaestroAccessToken)
- - name: MaestroApiVersion
- value: "2020-02-20"
-
- - name: SourceLinkCLIVersion
- value: 3.0.0
- - name: SymbolToolVersion
- value: 1.0.1
- - name: BinlogToolVersion
- value: 1.0.11
-
- - name: runCodesignValidationInjection
- value: false
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
-parameters:
- # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
- # Publishing V1 is no longer supported
- # Publishing V2 is no longer supported
- # Publishing V3 is the default
- - name: publishingInfraVersion
- displayName: Which version of publishing should be used to promote the build definition?
- type: number
- default: 3
- values:
- - 3
-
- - name: BARBuildId
- displayName: BAR Build Id
- type: number
- default: 0
-
- - name: PromoteToChannelIds
- displayName: Channel to promote BARBuildId to
- type: string
- default: ''
-
- - name: enableSourceLinkValidation
- displayName: Enable SourceLink validation
- type: boolean
- default: false
-
- - name: enableSigningValidation
- displayName: Enable signing validation
- type: boolean
- default: true
-
- - name: enableSymbolValidation
- displayName: Enable symbol validation
- type: boolean
- default: false
-
- - name: enableNugetValidation
- displayName: Enable NuGet validation
- type: boolean
- default: true
-
- - name: publishInstallersAndChecksums
- displayName: Publish installers and checksums
- type: boolean
- default: true
-
- - name: SDLValidationParameters
- type: object
- default:
- enable: false
- publishGdn: false
- continueOnError: false
- params: ''
- artifactNames: ''
- downloadArtifacts: true
-
- # These parameters let the user customize the call to sdk-task.ps1 for publishing
- # symbols & general artifacts as well as for signing validation
- - name: symbolPublishingAdditionalParameters
- displayName: Symbol publishing additional parameters
- type: string
- default: ''
-
- - name: artifactsPublishingAdditionalParameters
- displayName: Artifact publishing additional parameters
- type: string
- default: ''
-
- - name: signingValidationAdditionalParameters
- displayName: Signing validation additional parameters
- type: string
- default: ''
-
- # Which stages should finish execution before post-build stages start
- - name: validateDependsOn
- type: object
- default:
- - build
-
- - name: publishDependsOn
- type: object
- default:
- - Validate
-
- # Optional: Call asset publishing rather than running in a separate stage
- - name: publishAssetsImmediately
- type: boolean
- default: false
-
stages:
-- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- - stage: Validate
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Validate Build Assets
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates-official/variables/pool-providers.yml
- jobs:
- - job:
- displayName: NuGet Validation
- condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022-pt
- os: windows
-
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
- arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
- -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
-
- - job:
- displayName: Signing Validation
- condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022-pt
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
- itemPattern: |
- **
- !**/Microsoft.SourceBuild.Intermediate.*.nupkg
-
- # This is necessary whenever we want to publish/restore to an AzDO private feed
- # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
- # otherwise it'll complain about accessing a private feed.
- - task: NuGetAuthenticate@1
- displayName: 'Authenticate to AzDO Feeds'
-
- # Signing validation will optionally work with the buildmanifest file which is downloaded from
- # Azure DevOps above.
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task SigningValidation -restore -msbuildEngine vs
- /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
- /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
- ${{ parameters.signingValidationAdditionalParameters }}
-
- - template: ../steps/publish-logs.yml
- parameters:
- StageLabel: 'Validation'
- JobLabel: 'Signing'
- BinlogToolVersion: $(BinlogToolVersion)
-
- - job:
- displayName: SourceLink Validation
- condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022-pt
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: BlobArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
- arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
- -ExtractPath $(Agent.BuildDirectory)/Extract/
- -GHRepoName $(Build.Repository.Name)
- -GHCommit $(Build.SourceVersion)
- -SourcelinkCliVersion $(SourceLinkCLIVersion)
- continueOnError: true
-
-- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- - stage: publish_using_darc
- ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- dependsOn: ${{ parameters.publishDependsOn }}
- ${{ else }}:
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Publish using Darc
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates-official/variables/pool-providers.yml
- jobs:
- - job:
- displayName: Publish Using Darc
- timeoutInMinutes: 120
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: NetCore1ESPool-Publishing-Internal
- image: windows.vs2019.amd64
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: NuGetAuthenticate@1
+- template: /eng/common/core-templates/post-build/post-build.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: true
- - task: PowerShell@2
- displayName: Publish Using Darc
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
- -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
- -MaestroToken '$(MaestroApiAccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- BARBuildId: ''
- PromoteToChannelIds: ''
-
steps:
- - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Release Configs
- inputs:
- buildType: current
- artifactName: ReleaseConfigs
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- name: setReleaseVars
- displayName: Set Release Configs Vars
- inputs:
- targetType: inline
- pwsh: true
- script: |
- try {
- if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
- $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
-
- $BarId = $Content | Select -Index 0
- $Channels = $Content | Select -Index 1
- $IsStableBuild = $Content | Select -Index 2
-
- $AzureDevOpsProject = $Env:System_TeamProject
- $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
- $AzureDevOpsBuildId = $Env:Build_BuildId
- }
- else {
- $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
-
- $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
- $apiHeaders.Add('Accept', 'application/json')
- $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
-
- $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
-
- $BarId = $Env:BARBuildId
- $Channels = $Env:PromoteToMaestroChannels -split ","
- $Channels = $Channels -join "]["
- $Channels = "[$Channels]"
-
- $IsStableBuild = $buildInfo.stable
- $AzureDevOpsProject = $buildInfo.azureDevOpsProject
- $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
- $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
- }
-
- Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
- Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
- Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: true
- Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
- Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
- Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
- }
- catch {
- Write-Host $_
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
- exit 1
- }
- env:
- MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
-parameters:
- ChannelId: 0
-
steps:
-- task: PowerShell@2
- displayName: Add Build to Channel
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
- arguments: -BuildId $(BARBuildId)
- -ChannelId ${{ parameters.ChannelId }}
- -MaestroApiAccessToken $(MaestroApiAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
+- template: /eng/common/core-templates/steps/add-build-to-channel.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- disableComponentGovernance: false
- componentGovernanceIgnoreDirectories: ''
-
steps:
-- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
- - script: "echo ##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
- displayName: Set skipComponentGovernanceDetection variable
-- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
- - task: ComponentGovernanceComponentDetection@0
- continueOnError: true
- inputs:
- ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
\ No newline at end of file
+- template: /eng/common/core-templates/steps/component-governance.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
--- /dev/null
+# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
+# variable with the base64-encoded SAS token, by default
+steps:
+- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
--- /dev/null
+steps:
+- template: /eng/common/core-templates/steps/enable-internal-sources.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
-# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
-# PackageName - The name of the package this SBOM represents.
-# PackageVersion - The version of the package this SBOM represents.
-# ManifestDirPath - The path of the directory where the generated manifest files will be placed
-# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
-
-parameters:
- PackageVersion: 7.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
- PackageName: '.NET'
- ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
- IgnoreDirectories: ''
- sbomContinueOnError: true
-
steps:
-- task: PowerShell@2
- displayName: Prep for SBOM generation in (Non-linux)
- condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
- inputs:
- filePath: ./eng/common/generate-sbom-prep.ps1
- arguments: ${{parameters.manifestDirPath}}
-
-# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
-- script: |
- chmod +x ./eng/common/generate-sbom-prep.sh
- ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
- displayName: Prep for SBOM generation in (Linux)
- condition: eq(variables['Agent.Os'], 'Linux')
- continueOnError: ${{ parameters.sbomContinueOnError }}
-
-- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
- displayName: 'Generate SBOM manifest'
- continueOnError: ${{ parameters.sbomContinueOnError }}
- inputs:
- PackageName: ${{ parameters.packageName }}
- BuildDropPath: ${{ parameters.buildDropPath }}
- PackageVersion: ${{ parameters.packageVersion }}
- ManifestDirPath: ${{ parameters.manifestDirPath }}
- ${{ if ne(parameters.IgnoreDirectories, '') }}:
- AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
-
-- task: 1ES.PublishPipelineArtifact@1
- displayName: Publish SBOM manifest
- continueOnError: ${{parameters.sbomContinueOnError}}
- inputs:
- targetPath: '${{parameters.manifestDirPath}}'
- artifactName: $(ARTIFACT_NAME)
+- template: /eng/common/core-templates/steps/generate-sbom.yml
+ parameters:
+ is1ESPipeline: true
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
--- /dev/null
+steps:
+- template: /eng/common/core-templates/steps/get-delegation-sas.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
--- /dev/null
+steps:
+- template: /eng/common/core-templates/steps/get-federated-access-token.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
--- /dev/null
+parameters:
+- name: displayName
+ type: string
+ default: 'Publish to Build Artifact'
+
+- name: condition
+ type: string
+ default: succeeded()
+
+- name: artifactName
+ type: string
+
+- name: pathToPublish
+ type: string
+
+- name: continueOnError
+ type: boolean
+ default: false
+
+- name: publishLocation
+ type: string
+ default: 'Container'
+
+- name: is1ESPipeline
+ type: boolean
+ default: true
+
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
+- task: 1ES.PublishBuildArtifacts@1
+ displayName: ${{ parameters.displayName }}
+ condition: ${{ parameters.condition }}
+ ${{ if parameters.continueOnError }}:
+ continueOnError: ${{ parameters.continueOnError }}
+ inputs:
+ PublishLocation: ${{ parameters.publishLocation }}
+ PathtoPublish: ${{ parameters.pathToPublish }}
+ ${{ if parameters.artifactName }}:
+ ArtifactName: ${{ parameters.artifactName }}
+
-parameters:
- StageLabel: ''
- JobLabel: ''
- CustomSensitiveDataList: ''
- # A default - in case value from eng/common/templates-official/post-build/common-variables.yml is not passed
- BinlogToolVersion: '1.0.11'
-
steps:
-- task: Powershell@2
- displayName: Prepare Binlogs to Upload
- inputs:
- targetType: inline
- script: |
- New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- continueOnError: true
- condition: always()
-
-- task: PowerShell@2
- displayName: Redact Logs
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1
- # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
- # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
- # If the file exists - sensitive data for redaction will be sourced from it
- # (single entry per line, lines starting with '# ' are considered comments and skipped)
- arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs'
- -BinlogToolVersion ${{parameters.BinlogToolVersion}}
- -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
- '$(publishing-dnceng-devdiv-code-r-build-re)'
- '$(MaestroAccessToken)'
- '$(dn-bot-all-orgs-artifact-feeds-rw)'
- '$(akams-client-id)'
- '$(akams-client-secret)'
- '$(microsoft-symbol-server-pat)'
- '$(symweb-symbol-server-pat)'
- '$(dn-bot-all-orgs-build-rw-code-rw)'
- ${{parameters.CustomSensitiveDataList}}
- continueOnError: true
- condition: always()
-
-- task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
- PublishLocation: Container
- ArtifactName: PostBuildLogs
- continueOnError: true
- condition: always()
+- template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
--- /dev/null
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: true
+
+- name: args
+ type: object
+ default: {}
+
+steps:
+- ${{ if ne(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error
+- task: 1ES.PublishPipelineArtifact@1
+ displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }}
+ ${{ if parameters.args.condition }}:
+ condition: ${{ parameters.args.condition }}
+ ${{ else }}:
+ condition: succeeded()
+ ${{ if parameters.args.continueOnError }}:
+ continueOnError: ${{ parameters.args.continueOnError }}
+ inputs:
+ targetPath: ${{ parameters.args.targetPath }}
+ ${{ if parameters.args.artifactName }}:
+ artifactName: ${{ parameters.args.artifactName }}
+ ${{ if parameters.args.properties }}:
+ properties: ${{ parameters.args.properties }}
+ ${{ if parameters.args.sbomEnabled }}:
+ sbomEnabled: ${{ parameters.args.sbomEnabled }}
-parameters:
- # Optional azure devops PAT with build execute permissions for the build's organization,
- # only needed if the build that should be retained ran on a different organization than
- # the pipeline where this template is executing from
- Token: ''
- # Optional BuildId to retain, defaults to the current running build
- BuildId: ''
- # Azure devops Organization URI for the build in the https://dev.azure.com/<organization> format.
- # Defaults to the organization the current pipeline is running on
- AzdoOrgUri: '$(System.CollectionUri)'
- # Azure devops project for the build. Defaults to the project the current pipeline is running on
- AzdoProject: '$(System.TeamProject)'
-
steps:
- - task: powershell@2
- inputs:
- targetType: 'filePath'
- filePath: eng/common/retain-build.ps1
- pwsh: true
- arguments: >
- -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
- -AzdoProject ${{parameters.AzdoProject}}
- -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
- -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
- displayName: Enable permanent build retention
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- BUILD_ID: $(Build.BuildId)
\ No newline at end of file
+- template: /eng/common/core-templates/steps/retain-build.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-# Please remember to update the documentation if you make changes to these parameters!
-parameters:
- HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
- HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
- HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
- HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
- HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
- HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY
- HelixProjectArguments: '' # optional -- arguments passed to the build command
- HelixConfiguration: '' # optional -- additional property attached to a job
- HelixPreCommands: '' # optional -- commands to run before Helix work item execution
- HelixPostCommands: '' # optional -- commands to run after Helix work item execution
- WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
- WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
- WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
- CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
- XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
- XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
- XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
- XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
- XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
- IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
- DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
- IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
- HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
- Creator: '' # optional -- if the build is external, use this to specify who is sending the job
- DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
- condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
- continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
-
steps:
- - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
- displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
- displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
+- template: /eng/common/core-templates/steps/send-to-helix.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- # This template adds arcade-powered source-build to CI.
-
- # This is a 'steps' template, and is intended for advanced scenarios where the existing build
- # infra has a careful build methodology that must be followed. For example, a repo
- # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
- # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
- # GitHub. Using this steps template leaves room for that infra to be included.
-
- # Defines the platform on which to run the steps. See 'eng/common/templates-official/job/source-build.yml'
- # for details. The entire object is described in the 'job' template for simplicity, even though
- # the usage of the properties on this object is split between the 'job' and 'steps' templates.
- platform: {}
-
steps:
-# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
-- script: |
- set -x
- df -h
-
- # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
- # In that case, call the feed setup script to add internal feeds corresponding to public ones.
- # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
- # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
- # changes.
- internalRestoreArgs=
- if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
- # Temporarily work around https://github.com/dotnet/arcade/issues/7709
- chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
- $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
- internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
-
- # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
- # This only works if there is a username/email configured, which won't be the case in most CI runs.
- git config --get user.email
- if [ $? -ne 0 ]; then
- git config user.email dn-bot@microsoft.com
- git config user.name dn-bot
- fi
- fi
-
- # If building on the internal project, the internal storage variable may be available (usually only if needed)
- # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
- # in the default public locations.
- internalRuntimeDownloadArgs=
- if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
- internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
- fi
-
- buildConfig=Release
- # Check if AzDO substitutes in a build config from a variable, and use it if so.
- if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
- buildConfig='$(_BuildConfig)'
- fi
-
- officialBuildArgs=
- if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
- officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
- fi
-
- targetRidArgs=
- if [ '${{ parameters.platform.targetRID }}' != '' ]; then
- targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
- fi
-
- runtimeOsArgs=
- if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
- runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
- fi
-
- baseOsArgs=
- if [ '${{ parameters.platform.baseOS }}' != '' ]; then
- baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
- fi
-
- publishArgs=
- if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
- publishArgs='--publish'
- fi
-
- assetManifestFileName=SourceBuild_RidSpecific.xml
- if [ '${{ parameters.platform.name }}' != '' ]; then
- assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
- fi
-
- ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
- --configuration $buildConfig \
- --restore --build --pack $publishArgs -bl \
- $officialBuildArgs \
- $internalRuntimeDownloadArgs \
- $internalRestoreArgs \
- $targetRidArgs \
- $runtimeOsArgs \
- $baseOsArgs \
- /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
- /p:ArcadeBuildFromSource=true \
- /p:DotNetBuildSourceOnly=true \
- /p:DotNetBuildRepo=true \
- /p:AssetManifestFileName=$assetManifestFileName
- displayName: Build
-
-# Upload build logs for diagnosis.
-- task: CopyFiles@2
- displayName: Prepare BuildLogs staging directory
- inputs:
- SourceFolder: '$(Build.SourcesDirectory)'
- Contents: |
- **/*.log
- **/*.binlog
- artifacts/sb/prebuilt-report/**
- TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
- CleanTargetFolder: true
- continueOnError: true
- condition: succeededOrFailed()
-
-- task: 1ES.PublishPipelineArtifact@1
- displayName: Publish BuildLogs
- inputs:
- targetPath: '$(Build.StagingDirectory)/BuildLogs'
- artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
- continueOnError: true
- condition: succeededOrFailed()
+- template: /eng/common/core-templates/steps/source-build.yml
+ parameters:
+ is1ESPipeline: true
-# Manually inject component detection so that we can ignore the source build upstream cache, which contains
-# a nupkg cache of input packages (a local feed).
-# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
-# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
-- task: ComponentGovernanceComponentDetection@0
- displayName: Component Detection (Exclude upstream cache)
- inputs:
- ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
#
# pool:
# name: $(DncEngInternalBuildPool)
-# image: 1es-windows-2022-pt
+# image: 1es-windows-2022
variables:
# Coalesce the target and source branches so we know when a PR targets a release branch
+++ /dev/null
-parameters:
- enable: 'false' # Whether the SDL validation job should execute or not
- overrideParameters: '' # Optional: to override values for parameters.
- additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
- # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
- # diagnosis of problems with specific tool configurations.
- publishGuardianDirectoryToPipeline: false
- # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
- # parameters rather than relying on YAML. It may be better to use a local script, because you can
- # reproduce results locally without piecing together a command based on the YAML.
- executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
- # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
- # 'continueOnError', the parameter value is not correctly picked up.
- # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
- sdlContinueOnError: false # optional: determines whether to continue the build if the step errors;
- # optional: determines if build artifacts should be downloaded.
- downloadArtifacts: true
- # optional: determines if this job should search the directory of downloaded artifacts for
- # 'tar.gz' and 'zip' archive files and extract them before running SDL validation tasks.
- extractArchiveArtifacts: false
- dependsOn: '' # Optional: dependencies of the job
- artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts
- # Usage:
- # artifactNames:
- # - 'BlobArtifacts'
- # - 'Artifacts_Windows_NT_Release'
- # Optional: download a list of pipeline artifacts. 'downloadArtifacts' controls build artifacts,
- # not pipeline artifacts, so doesn't affect the use of this parameter.
- pipelineArtifactNames: []
-
-jobs:
-- job: Run_SDL
- dependsOn: ${{ parameters.dependsOn }}
- displayName: Run SDL tool
- condition: and(succeededOrFailed(), eq( ${{ parameters.enable }}, 'true'))
- variables:
- - group: DotNet-VSTS-Bot
- - name: AzDOProjectName
- value: ${{ parameters.AzDOProjectName }}
- - name: AzDOPipelineId
- value: ${{ parameters.AzDOPipelineId }}
- - name: AzDOBuildId
- value: ${{ parameters.AzDOBuildId }}
- - template: /eng/common/templates/variables/sdl-variables.yml
- - name: GuardianVersion
- value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
- - template: /eng/common/templates/variables/pool-providers.yml
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - checkout: self
- clean: true
-
- # If the template caller didn't provide an AzDO parameter, set them all up as Maestro vars.
- - ${{ if not(and(parameters.AzDOProjectName, parameters.AzDOPipelineId, parameters.AzDOBuildId)) }}:
- - template: /eng/common/templates/post-build/setup-maestro-vars.yml
-
- - ${{ if ne(parameters.downloadArtifacts, 'false')}}:
- - ${{ if ne(parameters.artifactNames, '') }}:
- - ${{ each artifactName in parameters.artifactNames }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Build Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: ${{ artifactName }}
- downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- checkDownloadedFiles: true
- - ${{ if eq(parameters.artifactNames, '') }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Build Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- downloadType: specific files
- itemPattern: "**"
- downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- checkDownloadedFiles: true
-
- - ${{ each artifactName in parameters.pipelineArtifactNames }}:
- - task: DownloadPipelineArtifact@2
- displayName: Download Pipeline Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: ${{ artifactName }}
- downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
- checkDownloadedFiles: true
-
- - powershell: eng/common/sdl/trim-assets-version.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts
- displayName: Trim the version from the NuGet packages
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - powershell: eng/common/sdl/extract-artifact-packages.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
- -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
- displayName: Extract Blob Artifacts
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - powershell: eng/common/sdl/extract-artifact-packages.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
- -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
- displayName: Extract Package Artifacts
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}:
- - powershell: eng/common/sdl/extract-artifact-archives.ps1
- -InputPath $(Build.ArtifactStagingDirectory)\artifacts
- -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts
- displayName: Extract Archive Artifacts
- continueOnError: ${{ parameters.sdlContinueOnError }}
-
- - template: /eng/common/templates/steps/execute-sdl.yml
- parameters:
- overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
- executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
- overrideParameters: ${{ parameters.overrideParameters }}
- additionalParameters: ${{ parameters.additionalParameters }}
- publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
- sdlContinueOnError: ${{ parameters.sdlContinueOnError }}
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
-parameters:
-# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- cancelTimeoutInMinutes: ''
- condition: ''
- container: ''
- continueOnError: false
- dependsOn: ''
- displayName: ''
- pool: ''
- steps: []
- strategy: ''
- timeoutInMinutes: ''
- variables: []
- workspace: ''
- templateContext: ''
-
-# Job base template specific parameters
- # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
- artifacts: ''
- enableMicrobuild: false
+parameters:
enablePublishBuildArtifacts: false
- enablePublishBuildAssets: false
- enablePublishTestResults: false
- enablePublishUsingPipelines: false
- enableBuildRetry: false
- disableComponentGovernance: ''
- componentGovernanceIgnoreDirectories: ''
- mergeTestResults: false
- testRunTitle: ''
- testResultsFormat: ''
- name: ''
- preSteps: []
- runAsPublic: false
-# Sbom related params
- enableSbom: true
- PackageVersion: 7.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
jobs:
-- job: ${{ parameters.name }}
-
- ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
- cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
-
- ${{ if ne(parameters.condition, '') }}:
- condition: ${{ parameters.condition }}
-
- ${{ if ne(parameters.container, '') }}:
- container: ${{ parameters.container }}
-
- ${{ if ne(parameters.continueOnError, '') }}:
- continueOnError: ${{ parameters.continueOnError }}
-
- ${{ if ne(parameters.dependsOn, '') }}:
- dependsOn: ${{ parameters.dependsOn }}
-
- ${{ if ne(parameters.displayName, '') }}:
- displayName: ${{ parameters.displayName }}
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
-
- ${{ if ne(parameters.strategy, '') }}:
- strategy: ${{ parameters.strategy }}
-
- ${{ if ne(parameters.timeoutInMinutes, '') }}:
- timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
-
- ${{ if ne(parameters.templateContext, '') }}:
- templateContext: ${{ parameters.templateContext }}
-
- variables:
- - ${{ if ne(parameters.enableTelemetry, 'false') }}:
- - name: DOTNET_CLI_TELEMETRY_PROFILE
- value: '$(Build.Repository.Uri)'
- - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- - name: EnableRichCodeNavigation
- value: 'true'
- # Retry signature validation up to three times, waiting 2 seconds between attempts.
- # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
- value: 3,2000
- - ${{ each variable in parameters.variables }}:
- # handle name-value variable syntax
- # example:
- # - name: [key]
- # value: [value]
- - ${{ if ne(variable.name, '') }}:
- - name: ${{ variable.name }}
- value: ${{ variable.value }}
-
- # handle variable groups
- - ${{ if ne(variable.group, '') }}:
- - group: ${{ variable.group }}
-
- # handle template variable syntax
- # example:
- # - template: path/to/template.yml
- # parameters:
- # [key]: [value]
- - ${{ if ne(variable.template, '') }}:
- - template: ${{ variable.template }}
- ${{ if ne(variable.parameters, '') }}:
- parameters: ${{ variable.parameters }}
-
- # handle key-value variable syntax.
- # example:
- # - [key]: [value]
- - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
- - ${{ each pair in variable }}:
- - name: ${{ pair.key }}
- value: ${{ pair.value }}
-
- # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
- - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: DotNet-HelixApi-Access
-
- ${{ if ne(parameters.workspace, '') }}:
- workspace: ${{ parameters.workspace }}
-
- steps:
- - ${{ if ne(parameters.preSteps, '') }}:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - task: MicroBuildSigningPlugin@3
- displayName: Install MicroBuild plugin
- inputs:
- signType: $(_SignType)
- zipSources: false
- feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
- env:
- TeamName: $(_TeamName)
- continueOnError: ${{ parameters.continueOnError }}
- condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- - task: NuGetAuthenticate@1
-
- - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- - task: DownloadPipelineArtifact@2
- inputs:
- buildType: current
- artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
- targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
- itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
-
- - ${{ each step in parameters.steps }}:
- - ${{ step }}
-
- - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- - task: RichCodeNavIndexer@0
- displayName: RichCodeNav Upload
- inputs:
- languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
- environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
- richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
- uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
- continueOnError: true
-
- - template: /eng/common/templates/steps/component-governance.yml
- parameters:
- ${{ if eq(parameters.disableComponentGovernance, '') }}:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
- disableComponentGovernance: false
- ${{ else }}:
- disableComponentGovernance: true
- ${{ else }}:
- disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
- componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: MicroBuildCleanup@1
- displayName: Execute Microbuild cleanup tasks
- condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- env:
- TeamName: $(_TeamName)
-
- - ${{ if ne(parameters.artifacts.publish, '') }}:
- - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- - task: CopyFiles@2
- displayName: Gather binaries for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/bin'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
- - task: CopyFiles@2
- displayName: Gather packages for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/packages'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
- - task: PublishBuildArtifacts@1
- displayName: Publish pipeline artifacts
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
- continueOnError: true
- condition: always()
- - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- - publish: artifacts/log
- artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
- displayName: Publish logs
- continueOnError: true
- condition: always()
-
- - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- - task: PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
- continueOnError: true
- condition: always()
-
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
- - task: PublishTestResults@2
- displayName: Publish XUnit Test Results
- inputs:
- testResultsFormat: 'xUnit'
- testResultsFiles: '*.xml'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
- - task: PublishTestResults@2
- displayName: Publish TRX Test Results
- inputs:
- testResultsFormat: 'VSTest'
- testResultsFiles: '*.trx'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- - template: /eng/common/templates/steps/generate-sbom.yml
- parameters:
- PackageVersion: ${{ parameters.packageVersion}}
- BuildDropPath: ${{ parameters.buildDropPath }}
- IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- - publish: $(Build.SourcesDirectory)\eng\common\BuildConfiguration
- artifact: BuildConfiguration
- displayName: Publish build retry configuration
- continueOnError: true
+- template: /eng/common/core-templates/job/job.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ if and(ne(parameter.key, 'steps'), ne(parameter.key, 'is1ESPipeline')) }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ steps:
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+
+ artifactPublishSteps:
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ displayName: Publish pipeline artifacts
+ pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
+ publishLocation: Container
+ artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ continueOnError: true
+ condition: always()
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
+ displayName: 'Publish logs'
+ continueOnError: true
+ condition: always()
+ sbomEnabled: false # we don't need SBOM for logs
+
+ - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
+ - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ displayName: Publish Logs
+ pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
+ publishLocation: Container
+ artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ continueOnError: true
+ condition: always()
+
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: false
+ args:
+ targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration'
+ artifactName: 'BuildConfiguration'
+ displayName: 'Publish build retry configuration'
+ continueOnError: true
+ sbomEnabled: false # we don't need SBOM for BuildConfiguration
-parameters:
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: ''
-
- CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
- GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
-
- SourcesDirectory: $(Build.SourcesDirectory)
- CreatePr: true
- AutoCompletePr: false
- ReusePr: true
- UseLfLineEndings: true
- UseCheckedInLocProjectJson: false
- SkipLocProjectJsonGeneration: false
- LanguageSet: VS_Main_Languages
- LclSource: lclFilesInRepo
- LclPackageId: ''
- RepoType: gitHub
- GitHubOrg: dotnet
- MirrorRepo: ''
- MirrorBranch: main
- condition: ''
- JobNameSuffix: ''
-
jobs:
-- job: OneLocBuild${{ parameters.JobNameSuffix }}
-
- dependsOn: ${{ parameters.dependsOn }}
-
- displayName: OneLocBuild${{ parameters.JobNameSuffix }}
-
- variables:
- - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
- - name: _GenerateLocProjectArguments
- value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
- -LanguageSet "${{ parameters.LanguageSet }}"
- -CreateNeutralXlfs
- - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
- - name: _GenerateLocProjectArguments
- value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
- - template: /eng/common/templates/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
-
- steps:
- - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- - task: Powershell@2
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
- arguments: $(_GenerateLocProjectArguments)
- displayName: Generate LocProject.json
- condition: ${{ parameters.condition }}
-
- - task: OneLocBuild@2
- displayName: OneLocBuild
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- inputs:
- locProj: eng/Localize/LocProject.json
- outDir: $(Build.ArtifactStagingDirectory)
- lclSource: ${{ parameters.LclSource }}
- lclPackageId: ${{ parameters.LclPackageId }}
- isCreatePrSelected: ${{ parameters.CreatePr }}
- isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
- ${{ if eq(parameters.CreatePr, true) }}:
- isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- isShouldReusePrSelected: ${{ parameters.ReusePr }}
- packageSourceAuth: patAuth
- patVariable: ${{ parameters.CeapexPat }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- repoType: ${{ parameters.RepoType }}
- gitHubPatVariable: "${{ parameters.GithubPat }}"
- ${{ if ne(parameters.MirrorRepo, '') }}:
- isMirrorRepoSelected: true
- gitHubOrganization: ${{ parameters.GitHubOrg }}
- mirrorRepo: ${{ parameters.MirrorRepo }}
- mirrorBranch: ${{ parameters.MirrorBranch }}
- condition: ${{ parameters.condition }}
-
- - task: PublishBuildArtifacts@1
- displayName: Publish Localization Files
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
+- template: /eng/common/core-templates/job/onelocbuild.yml
+ parameters:
+ is1ESPipeline: false
- - task: PublishBuildArtifacts@1
- displayName: Publish LocProject.json
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
\ No newline at end of file
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- configuration: 'Debug'
-
- # Optional: condition for the job to run
- condition: ''
-
- # Optional: 'true' if future jobs should run even if this job fails
- continueOnError: false
-
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: {}
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishUsingPipelines: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishAssetsImmediately: false
-
- artifactsPublishingAdditionalParameters: ''
-
- signingValidationAdditionalParameters: ''
-
jobs:
-- job: Asset_Registry_Publish
-
- dependsOn: ${{ parameters.dependsOn }}
- timeoutInMinutes: 150
-
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- displayName: Publish Assets
- ${{ else }}:
- displayName: Publish to Build Asset Registry
-
- variables:
- - template: /eng/common/templates/variables/pool-providers.yml
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: Publish-Build-Assets
- - group: AzureDevOps-Artifact-Feeds-Pats
- - name: runCodesignValidationInjection
- value: false
- # unconditional - needed for logs publishing (redactor tool version)
- - template: /eng/common/templates/post-build/common-variables.yml
-
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: NetCore1ESPool-Publishing-Internal
- demands: ImageOverride -equals windows.vs2019.amd64
-
- steps:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - checkout: self
- fetchDepth: 3
- clean: true
-
- - task: DownloadBuildArtifacts@0
- displayName: Download artifact
- inputs:
- artifactName: AssetManifests
- downloadPath: '$(Build.StagingDirectory)/Download'
- checkDownloadedFiles: true
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: NuGetAuthenticate@1
-
- - task: PowerShell@2
- displayName: Publish Build Assets
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
- /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
- /p:BuildAssetRegistryToken=$(MaestroAccessToken)
- /p:MaestroApiEndpoint=https://maestro.dot.net
- /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
- /p:OfficialBuildId=$(Build.BuildNumber)
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: powershell@2
- displayName: Create ReleaseConfigs Artifact
- inputs:
- targetType: inline
- script: |
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId)
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)"
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild)
-
- - task: PublishBuildArtifacts@1
- displayName: Publish ReleaseConfigs Artifact
- inputs:
- PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - task: powershell@2
- displayName: Check if SymbolPublishingExclusionsFile.txt exists
- inputs:
- targetType: inline
- script: |
- $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
- if(Test-Path -Path $symbolExclusionfile)
- {
- Write-Host "SymbolExclusionFile exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
- }
- else{
- Write-Host "Symbols Exclusion file does not exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
- }
-
- - task: PublishBuildArtifacts@1
- displayName: Publish SymbolPublishingExclusionsFile Artifact
- condition: eq(variables['SymbolExclusionFile'], 'true')
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates/post-build/setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: PowerShell@2
- displayName: Publish Using Darc
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion 3
- -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
- -MaestroToken '$(MaestroApiAccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+- template: /eng/common/core-templates/job/publish-build-assets.yml
+ parameters:
+ is1ESPipeline: false
- - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- - template: /eng/common/templates/steps/publish-logs.yml
- parameters:
- JobLabel: 'Publish_Artifacts_Logs'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- # This template adds arcade-powered source-build to CI. The template produces a server job with a
- # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
-
- # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
- jobNamePrefix: 'Source_Build'
-
- # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
- # managed-only repositories. This is an object with these properties:
- #
- # name: ''
- # The name of the job. This is included in the job ID.
- # targetRID: ''
- # The name of the target RID to use, instead of the one auto-detected by Arcade.
- # nonPortable: false
- # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
- # linux-x64), and compiling against distro-provided packages rather than portable ones.
- # skipPublishValidation: false
- # Disables publishing validation. By default, a check is performed to ensure no packages are
- # published by source-build.
- # container: ''
- # A container to use. Runs in docker.
- # pool: {}
- # A pool to use. Runs directly on an agent.
- # buildScript: ''
- # Specifies the build script to invoke to perform the build in the repo. The default
- # './build.sh' should work for typical Arcade repositories, but this is customizable for
- # difficult situations.
- # jobProperties: {}
- # A list of job properties to inject at the top level, for potential extensibility beyond
- # container and pool.
- platform: {}
-
jobs:
-- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
- displayName: Source-Build (${{ parameters.platform.name }})
-
- ${{ each property in parameters.platform.jobProperties }}:
- ${{ property.key }}: ${{ property.value }}
-
- ${{ if ne(parameters.platform.container, '') }}:
- container: ${{ parameters.platform.container }}
-
- ${{ if eq(parameters.platform.pool, '') }}:
- # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
- # source-build builds run in Docker, including the default managed platform.
- # /eng/common/templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
- demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open
-
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
- demands: ImageOverride -equals Build.Ubuntu.1804.Amd64
-
- ${{ if ne(parameters.platform.pool, '') }}:
- pool: ${{ parameters.platform.pool }}
-
- workspace:
- clean: all
+- template: /eng/common/core-templates/job/source-build.yml
+ parameters:
+ is1ESPipeline: false
- steps:
- - template: /eng/common/templates/steps/source-build.yml
- parameters:
- platform: ${{ parameters.platform }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- runAsPublic: false
- sourceIndexPackageVersion: 1.0.1-20240129.2
- sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
- sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
- preSteps: []
- binlogPath: artifacts/log/Debug/Build.binlog
- condition: ''
- dependsOn: ''
- pool: ''
-
jobs:
-- job: SourceIndexStage1
- dependsOn: ${{ parameters.dependsOn }}
- condition: ${{ parameters.condition }}
- variables:
- - name: SourceIndexPackageVersion
- value: ${{ parameters.sourceIndexPackageVersion }}
- - name: SourceIndexPackageSource
- value: ${{ parameters.sourceIndexPackageSource }}
- - name: BinlogPath
- value: ${{ parameters.binlogPath }}
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: source-dot-net stage1 variables
- - template: /eng/common/templates/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $(DncEngPublicBuildPool)
- demands: ImageOverride -equals windows.vs2022.amd64.open
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2022.amd64
-
- steps:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - task: UseDotNet@2
- displayName: Use .NET 8 SDK
- inputs:
- packageType: sdk
- version: 8.0.x
- installationPath: $(Agent.TempDirectory)/dotnet
- workingDirectory: $(Agent.TempDirectory)
-
- - script: |
- $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- displayName: Download Tools
- # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
- workingDirectory: $(Agent.TempDirectory)
-
- - script: ${{ parameters.sourceIndexBuildCommand }}
- displayName: Build Repository
-
- - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
- displayName: Process Binlog into indexable sln
+- template: /eng/common/core-templates/job/source-index-stage1.yml
+ parameters:
+ is1ESPipeline: false
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
- displayName: Upload stage1 artifacts to source index
- env:
- BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
-
jobs:
-- template: /eng/common/templates/jobs/jobs.yml
+- template: /eng/common/core-templates/jobs/codeql-build.yml
parameters:
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishTestResults: false
- enablePublishBuildAssets: false
- enablePublishUsingPipelines: false
- enableTelemetry: true
+ is1ESPipeline: false
- variables:
- - group: Publish-Build-Assets
- # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
- # sync with the packages.config file.
- - name: DefaultGuardianVersion
- value: 0.109.0
- - name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
- - name: GuardianVersion
- value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
-
- jobs: ${{ parameters.jobs }}
-
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: Enable publishing using release pipelines
- enablePublishUsingPipelines: false
-
- # Optional: Enable running the source-build jobs to build repo from source
- enableSourceBuild: false
-
- # Optional: Parameters for source-build template.
- # See /eng/common/templates/jobs/source-build.yml for options
- sourceBuildParameters: []
-
- graphFileGeneration:
- # Optional: Enable generating the graph files at the end of the build
- enabled: false
- # Optional: Include toolset dependencies in the generated graph files
- includeToolset: false
-
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
-
- # Optional: Override automatically derived dependsOn value for "publish build assets" job
- publishBuildAssetsDependsOn: ''
-
- # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
- publishAssetsImmediately: false
-
- # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
- artifactsPublishingAdditionalParameters: ''
- signingValidationAdditionalParameters: ''
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- enableSourceIndex: false
- sourceIndexParams: {}
-
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
jobs:
-- ${{ each job in parameters.jobs }}:
- - template: ../job/job.yml
- parameters:
- # pass along parameters
- ${{ each parameter in parameters }}:
- ${{ if ne(parameter.key, 'jobs') }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
- # pass along job properties
- ${{ each property in job }}:
- ${{ if ne(property.key, 'job') }}:
- ${{ property.key }}: ${{ property.value }}
-
- name: ${{ job.job }}
-
-- ${{ if eq(parameters.enableSourceBuild, true) }}:
- - template: /eng/common/templates/jobs/source-build.yml
- parameters:
- allCompletedJobId: Source_Build_Complete
- ${{ each parameter in parameters.sourceBuildParameters }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
- - template: ../job/source-index-stage1.yml
- parameters:
- runAsPublic: ${{ parameters.runAsPublic }}
- ${{ each parameter in parameters.sourceIndexParams }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- - template: ../job/publish-build-assets.yml
- parameters:
- continueOnError: ${{ parameters.continueOnError }}
- dependsOn:
- - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.jobs }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.enableSourceBuild, true) }}:
- - Source_Build_Complete
+- template: /eng/common/core-templates/jobs/jobs.yml
+ parameters:
+ is1ESPipeline: false
- runAsPublic: ${{ parameters.runAsPublic }}
- publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
- publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
- enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- # This template adds arcade-powered source-build to CI. A job is created for each platform, as
- # well as an optional server job that completes when all platform jobs complete.
-
- # The name of the "join" job for all source-build platforms. If set to empty string, the job is
- # not included. Existing repo pipelines can use this job depend on all source-build jobs
- # completing without maintaining a separate list of every single job ID: just depend on this one
- # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
- allCompletedJobId: ''
-
- # See /eng/common/templates/job/source-build.yml
- jobNamePrefix: 'Source_Build'
-
- # This is the default platform provided by Arcade, intended for use by a managed-only repo.
- defaultManagedPlatform:
- name: 'Managed'
- container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8'
-
- # Defines the platforms on which to run build jobs. One job is created for each platform, and the
- # object in this array is sent to the job template as 'platform'. If no platforms are specified,
- # one job runs on 'defaultManagedPlatform'.
- platforms: []
-
jobs:
+- template: /eng/common/core-templates/jobs/source-build.yml
+ parameters:
+ is1ESPipeline: false
-- ${{ if ne(parameters.allCompletedJobId, '') }}:
- - job: ${{ parameters.allCompletedJobId }}
- displayName: Source-Build Complete
- pool: server
- dependsOn:
- - ${{ each platform in parameters.platforms }}:
- - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- - ${{ if eq(length(parameters.platforms), 0) }}:
- - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
-
-- ${{ each platform in parameters.platforms }}:
- - template: /eng/common/templates/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ platform }}
-
-- ${{ if eq(length(parameters.platforms), 0) }}:
- - template: /eng/common/templates/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ parameters.defaultManagedPlatform }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
variables:
- - group: Publish-Build-Assets
+- template: /eng/common/core-templates/post-build/common-variables.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: false
- # Whether the build is internal or not
- - name: IsInternalBuild
- value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
-
- # Default Maestro++ API Endpoint and API Version
- - name: MaestroApiEndPoint
- value: "https://maestro.dot.net"
- - name: MaestroApiAccessToken
- value: $(MaestroAccessToken)
- - name: MaestroApiVersion
- value: "2020-02-20"
-
- - name: SourceLinkCLIVersion
- value: 3.0.0
- - name: SymbolToolVersion
- value: 1.0.1
- - name: BinlogToolVersion
- value: 1.0.11
-
- - name: runCodesignValidationInjection
- value: false
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
-parameters:
- # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
- # Publishing V1 is no longer supported
- # Publishing V2 is no longer supported
- # Publishing V3 is the default
- - name: publishingInfraVersion
- displayName: Which version of publishing should be used to promote the build definition?
- type: number
- default: 3
- values:
- - 3
-
- - name: BARBuildId
- displayName: BAR Build Id
- type: number
- default: 0
-
- - name: PromoteToChannelIds
- displayName: Channel to promote BARBuildId to
- type: string
- default: ''
-
- - name: enableSourceLinkValidation
- displayName: Enable SourceLink validation
- type: boolean
- default: false
-
- - name: enableSigningValidation
- displayName: Enable signing validation
- type: boolean
- default: true
-
- - name: enableSymbolValidation
- displayName: Enable symbol validation
- type: boolean
- default: false
-
- - name: enableNugetValidation
- displayName: Enable NuGet validation
- type: boolean
- default: true
-
- - name: publishInstallersAndChecksums
- displayName: Publish installers and checksums
- type: boolean
- default: true
-
- - name: SDLValidationParameters
- type: object
- default:
- enable: false
- publishGdn: false
- continueOnError: false
- params: ''
- artifactNames: ''
- downloadArtifacts: true
-
- # These parameters let the user customize the call to sdk-task.ps1 for publishing
- # symbols & general artifacts as well as for signing validation
- - name: symbolPublishingAdditionalParameters
- displayName: Symbol publishing additional parameters
- type: string
- default: ''
-
- - name: artifactsPublishingAdditionalParameters
- displayName: Artifact publishing additional parameters
- type: string
- default: ''
-
- - name: signingValidationAdditionalParameters
- displayName: Signing validation additional parameters
- type: string
- default: ''
-
- # Which stages should finish execution before post-build stages start
- - name: validateDependsOn
- type: object
- default:
- - build
-
- - name: publishDependsOn
- type: object
- default:
- - Validate
-
- # Optional: Call asset publishing rather than running in a separate stage
- - name: publishAssetsImmediately
- type: boolean
- default: false
-
stages:
-- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- - stage: Validate
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Validate Build Assets
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates/variables/pool-providers.yml
- jobs:
- - job:
- displayName: NuGet Validation
- condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
-
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
- arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
- -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
-
- - job:
- displayName: Signing Validation
- condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
- itemPattern: |
- **
- !**/Microsoft.SourceBuild.Intermediate.*.nupkg
-
- # This is necessary whenever we want to publish/restore to an AzDO private feed
- # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
- # otherwise it'll complain about accessing a private feed.
- - task: NuGetAuthenticate@1
- displayName: 'Authenticate to AzDO Feeds'
-
- # Signing validation will optionally work with the buildmanifest file which is downloaded from
- # Azure DevOps above.
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task SigningValidation -restore -msbuildEngine vs
- /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
- /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
- ${{ parameters.signingValidationAdditionalParameters }}
-
- - template: ../steps/publish-logs.yml
- parameters:
- StageLabel: 'Validation'
- JobLabel: 'Signing'
- BinlogToolVersion: $(BinlogToolVersion)
-
- - job:
- displayName: SourceLink Validation
- condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: BlobArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
- arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
- -ExtractPath $(Agent.BuildDirectory)/Extract/
- -GHRepoName $(Build.Repository.Name)
- -GHCommit $(Build.SourceVersion)
- -SourcelinkCliVersion $(SourceLinkCLIVersion)
- continueOnError: true
-
- - template: /eng/common/templates/job/execute-sdl.yml
- parameters:
- enable: ${{ parameters.SDLValidationParameters.enable }}
- publishGuardianDirectoryToPipeline: ${{ parameters.SDLValidationParameters.publishGdn }}
- additionalParameters: ${{ parameters.SDLValidationParameters.params }}
- continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
- artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
- downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }}
-
-- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- - stage: publish_using_darc
- ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- dependsOn: ${{ parameters.publishDependsOn }}
- ${{ else }}:
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Publish using Darc
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates/variables/pool-providers.yml
- jobs:
- - job:
- displayName: Publish Using Darc
- timeoutInMinutes: 120
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: VSEngSS-MicroBuild2022-1ES
- demands: Cmd
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: NetCore1ESPool-Publishing-Internal
- demands: ImageOverride -equals windows.vs2019.amd64
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: NuGetAuthenticate@1
+- template: /eng/common/core-templates/post-build/post-build.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: false
- - task: PowerShell@2
- displayName: Publish Using Darc
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
- -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
- -MaestroToken '$(MaestroApiAccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
-parameters:
- BARBuildId: ''
- PromoteToChannelIds: ''
-
steps:
- - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Release Configs
- inputs:
- buildType: current
- artifactName: ReleaseConfigs
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- name: setReleaseVars
- displayName: Set Release Configs Vars
- inputs:
- targetType: inline
- pwsh: true
- script: |
- try {
- if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
- $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
-
- $BarId = $Content | Select -Index 0
- $Channels = $Content | Select -Index 1
- $IsStableBuild = $Content | Select -Index 2
-
- $AzureDevOpsProject = $Env:System_TeamProject
- $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
- $AzureDevOpsBuildId = $Env:Build_BuildId
- }
- else {
- $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
-
- $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
- $apiHeaders.Add('Accept', 'application/json')
- $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
-
- $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
-
- $BarId = $Env:BARBuildId
- $Channels = $Env:PromoteToMaestroChannels -split ","
- $Channels = $Channels -join "]["
- $Channels = "[$Channels]"
-
- $IsStableBuild = $buildInfo.stable
- $AzureDevOpsProject = $buildInfo.azureDevOpsProject
- $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
- $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
- }
-
- Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
- Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
- Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
+ parameters:
+ # Specifies whether to use 1ES
+ is1ESPipeline: false
- Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
- Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
- Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
- }
- catch {
- Write-Host $_
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
- exit 1
- }
- env:
- MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
-parameters:
- ChannelId: 0
-
steps:
-- task: PowerShell@2
- displayName: Add Build to Channel
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
- arguments: -BuildId $(BARBuildId)
- -ChannelId ${{ parameters.ChannelId }}
- -MaestroApiAccessToken $(MaestroApiAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
+- template: /eng/common/core-templates/steps/add-build-to-channel.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+++ /dev/null
-# build-reason.yml
-# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
-# to include steps (',' separated).
-parameters:
- conditions: ''
- steps: []
-
-steps:
- - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
- - ${{ parameters.steps }}
- - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
- - ${{ parameters.steps }}
-parameters:
- disableComponentGovernance: false
- componentGovernanceIgnoreDirectories: ''
-
steps:
-- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
- - script: "echo ##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
- displayName: Set skipComponentGovernanceDetection variable
-- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
- - task: ComponentGovernanceComponentDetection@0
- continueOnError: true
- inputs:
- ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
\ No newline at end of file
+- template: /eng/common/core-templates/steps/component-governance.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
--- /dev/null
+# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64'
+# variable with the base64-encoded SAS token, by default
+
+steps:
+- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
--- /dev/null
+steps:
+- template: /eng/common/core-templates/steps/enable-internal-sources.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
+++ /dev/null
-parameters:
- # Language that should be analyzed. Defaults to csharp
- language: csharp
- # Build Commands
- buildCommands: ''
- overrideParameters: '' # Optional: to override values for parameters.
- additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
- # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
- # diagnosis of problems with specific tool configurations.
- publishGuardianDirectoryToPipeline: false
- # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
- # parameters rather than relying on YAML. It may be better to use a local script, because you can
- # reproduce results locally without piecing together a command based on the YAML.
- executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
- # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
- # 'continueOnError', the parameter value is not correctly picked up.
- # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
- # optional: determines whether to continue the build if the step errors;
- sdlContinueOnError: false
-
-steps:
-- template: /eng/common/templates/steps/execute-sdl.yml
- parameters:
- overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
- executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
- overrideParameters: ${{ parameters.overrideParameters }}
- additionalParameters: '${{ parameters.additionalParameters }}
- -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")'
- publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
- sdlContinueOnError: ${{ parameters.sdlContinueOnError }}
\ No newline at end of file
+++ /dev/null
-parameters:
- overrideGuardianVersion: ''
- executeAllSdlToolsScript: ''
- overrideParameters: ''
- additionalParameters: ''
- publishGuardianDirectoryToPipeline: false
- sdlContinueOnError: false
- condition: ''
-
-steps:
-- task: NuGetAuthenticate@1
- inputs:
- nuGetServiceConnections: GuardianConnect
-
-- task: NuGetToolInstaller@1
- displayName: 'Install NuGet.exe'
-
-- ${{ if ne(parameters.overrideGuardianVersion, '') }}:
- - pwsh: |
- Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
- . .\sdl.ps1
- $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }}
- Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
- displayName: Install Guardian (Overridden)
-
-- ${{ if eq(parameters.overrideGuardianVersion, '') }}:
- - pwsh: |
- Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
- . .\sdl.ps1
- $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts
- Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
- displayName: Install Guardian
-
-- ${{ if ne(parameters.overrideParameters, '') }}:
- - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
- displayName: Execute SDL (Overridden)
- continueOnError: ${{ parameters.sdlContinueOnError }}
- condition: ${{ parameters.condition }}
-
-- ${{ if eq(parameters.overrideParameters, '') }}:
- - powershell: ${{ parameters.executeAllSdlToolsScript }}
- -GuardianCliLocation $(GuardianCliLocation)
- -NugetPackageDirectory $(Build.SourcesDirectory)\.packages
- -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
- ${{ parameters.additionalParameters }}
- displayName: Execute SDL
- continueOnError: ${{ parameters.sdlContinueOnError }}
- condition: ${{ parameters.condition }}
-
-- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}:
- # We want to publish the Guardian results and configuration for easy diagnosis. However, the
- # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default
- # tooling files. Some of these files are large and aren't useful during an investigation, so
- # exclude them by simply deleting them before publishing. (As of writing, there is no documented
- # way to selectively exclude a dir from the pipeline artifact publish task.)
- - task: DeleteFiles@1
- displayName: Delete Guardian dependencies to avoid uploading
- inputs:
- SourceFolder: $(Agent.BuildDirectory)/.gdn
- Contents: |
- c
- i
- condition: succeededOrFailed()
-
- - publish: $(Agent.BuildDirectory)/.gdn
- artifact: GuardianConfiguration
- displayName: Publish GuardianConfiguration
- condition: succeededOrFailed()
-
- # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration
- # with the "SARIF SAST Scans Tab" Azure DevOps extension
- - task: CopyFiles@2
- displayName: Copy SARIF files
- inputs:
- flattenFolders: true
- sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/
- contents: '**/*.sarif'
- targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs
- condition: succeededOrFailed()
-
- # Use PublishBuildArtifacts because the SARIF extension only checks this case
- # see microsoft/sarif-azuredevops-extension#4
- - task: PublishBuildArtifacts@1
- displayName: Publish SARIF files to CodeAnalysisLogs container
- inputs:
- pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs
- artifactName: CodeAnalysisLogs
- condition: succeededOrFailed()
\ No newline at end of file
-# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
-# PackageName - The name of the package this SBOM represents.
-# PackageVersion - The version of the package this SBOM represents.
-# ManifestDirPath - The path of the directory where the generated manifest files will be placed
-# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
-
-parameters:
- PackageVersion: 7.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
- PackageName: '.NET'
- ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
- IgnoreDirectories: ''
- sbomContinueOnError: true
-
steps:
-- task: PowerShell@2
- displayName: Prep for SBOM generation in (Non-linux)
- condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
- inputs:
- filePath: ./eng/common/generate-sbom-prep.ps1
- arguments: ${{parameters.manifestDirPath}}
-
-# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
-- script: |
- chmod +x ./eng/common/generate-sbom-prep.sh
- ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
- displayName: Prep for SBOM generation in (Linux)
- condition: eq(variables['Agent.Os'], 'Linux')
- continueOnError: ${{ parameters.sbomContinueOnError }}
-
-- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
- displayName: 'Generate SBOM manifest'
- continueOnError: ${{ parameters.sbomContinueOnError }}
- inputs:
- PackageName: ${{ parameters.packageName }}
- BuildDropPath: ${{ parameters.buildDropPath }}
- PackageVersion: ${{ parameters.packageVersion }}
- ManifestDirPath: ${{ parameters.manifestDirPath }}
- ${{ if ne(parameters.IgnoreDirectories, '') }}:
- AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
-
-- task: PublishPipelineArtifact@1
- displayName: Publish SBOM manifest
- continueOnError: ${{parameters.sbomContinueOnError}}
- inputs:
- targetPath: '${{parameters.manifestDirPath}}'
- artifactName: $(ARTIFACT_NAME)
+- template: /eng/common/core-templates/steps/generate-sbom.yml
+ parameters:
+ is1ESPipeline: false
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
--- /dev/null
+steps:
+- template: /eng/common/core-templates/steps/get-delegation-sas.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
--- /dev/null
+steps:
+- template: /eng/common/core-templates/steps/get-federated-access-token.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
\ No newline at end of file
--- /dev/null
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+- name: displayName
+ type: string
+ default: 'Publish to Build Artifact'
+
+- name: condition
+ type: string
+ default: succeeded()
+
+- name: artifactName
+ type: string
+
+- name: pathToPublish
+ type: string
+
+- name: continueOnError
+ type: boolean
+ default: false
+
+- name: publishLocation
+ type: string
+ default: 'Container'
+
+steps:
+- ${{ if eq(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates cannot be referenced from a 1ES managed template': error
+- task: PublishBuildArtifacts@1
+ displayName: ${{ parameters.displayName }}
+ condition: ${{ parameters.condition }}
+ ${{ if parameters.continueOnError }}:
+ continueOnError: ${{ parameters.continueOnError }}
+ inputs:
+ PublishLocation: ${{ parameters.publishLocation }}
+ PathtoPublish: ${{ parameters.pathToPublish }}
+ ${{ if parameters.artifactName }}:
+ ArtifactName: ${{ parameters.artifactName }}
\ No newline at end of file
-parameters:
- StageLabel: ''
- JobLabel: ''
- CustomSensitiveDataList: ''
- # A default - in case value from eng/common/templates/post-build/common-variables.yml is not passed
- BinlogToolVersion: '1.0.11'
-
steps:
-- task: Powershell@2
- displayName: Prepare Binlogs to Upload
- inputs:
- targetType: inline
- script: |
- New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- continueOnError: true
- condition: always()
-
-- task: PowerShell@2
- displayName: Redact Logs
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1
- # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
- # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
- # If the file exists - sensitive data for redaction will be sourced from it
- # (single entry per line, lines starting with '# ' are considered comments and skipped)
- arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs'
- -BinlogToolVersion ${{parameters.BinlogToolVersion}}
- -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
- '$(publishing-dnceng-devdiv-code-r-build-re)'
- '$(MaestroAccessToken)'
- '$(dn-bot-all-orgs-artifact-feeds-rw)'
- '$(akams-client-id)'
- '$(akams-client-secret)'
- '$(microsoft-symbol-server-pat)'
- '$(symweb-symbol-server-pat)'
- '$(dn-bot-all-orgs-build-rw-code-rw)'
- ${{parameters.CustomSensitiveDataList}}
- continueOnError: true
- condition: always()
-
-- task: PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
- PublishLocation: Container
- ArtifactName: PostBuildLogs
- continueOnError: true
- condition: always()
+- template: /eng/common/core-templates/steps/publish-logs.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
--- /dev/null
+parameters:
+- name: is1ESPipeline
+ type: boolean
+ default: false
+
+- name: args
+ type: object
+ default: {}
+
+steps:
+- ${{ if eq(parameters.is1ESPipeline, true) }}:
+ - 'eng/common/templates cannot be referenced from a 1ES managed template': error
+- task: PublishPipelineArtifact@1
+ displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }}
+ ${{ if parameters.args.condition }}:
+ condition: ${{ parameters.args.condition }}
+ ${{ else }}:
+ condition: succeeded()
+ ${{ if parameters.args.continueOnError }}:
+ continueOnError: ${{ parameters.args.continueOnError }}
+ inputs:
+ targetPath: ${{ parameters.args.targetPath }}
+ ${{ if parameters.args.artifactName }}:
+ artifactName: ${{ parameters.args.artifactName }}
+ ${{ if parameters.args.publishLocation }}:
+ publishLocation: ${{ parameters.args.publishLocation }}
+ ${{ if parameters.args.fileSharePath }}:
+ fileSharePath: ${{ parameters.args.fileSharePath }}
+ ${{ if parameters.args.Parallel }}:
+ parallel: ${{ parameters.args.Parallel }}
+ ${{ if parameters.args.parallelCount }}:
+ parallelCount: ${{ parameters.args.parallelCount }}
+ ${{ if parameters.args.properties }}:
+ properties: ${{ parameters.args.properties }}
\ No newline at end of file
-parameters:
- # Optional azure devops PAT with build execute permissions for the build's organization,
- # only needed if the build that should be retained ran on a different organization than
- # the pipeline where this template is executing from
- Token: ''
- # Optional BuildId to retain, defaults to the current running build
- BuildId: ''
- # Azure devops Organization URI for the build in the https://dev.azure.com/<organization> format.
- # Defaults to the organization the current pipeline is running on
- AzdoOrgUri: '$(System.CollectionUri)'
- # Azure devops project for the build. Defaults to the project the current pipeline is running on
- AzdoProject: '$(System.TeamProject)'
-
steps:
- - task: powershell@2
- inputs:
- targetType: 'filePath'
- filePath: eng/common/retain-build.ps1
- pwsh: true
- arguments: >
- -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
- -AzdoProject ${{parameters.AzdoProject}}
- -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
- -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
- displayName: Enable permanent build retention
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- BUILD_ID: $(Build.BuildId)
\ No newline at end of file
+- template: /eng/common/core-templates/steps/retain-build.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+++ /dev/null
-parameters:
- agentOs: ''
- steps: []
-
-steps:
-- ${{ if ne(parameters.agentOs, 'Windows_NT') }}:
- - ${{ parameters.steps }}
+++ /dev/null
-parameters:
- agentOs: ''
- steps: []
-
-steps:
-- ${{ if eq(parameters.agentOs, 'Windows_NT') }}:
- - ${{ parameters.steps }}
+++ /dev/null
-parameters:
- # if parameter1 equals parameter 2, run 'ifScript' command, else run 'elsescript' command
- parameter1: ''
- parameter2: ''
- ifScript: ''
- elseScript: ''
-
- # name of script step
- name: Script
-
- # display name of script step
- displayName: If-Equal-Else Script
-
- # environment
- env: {}
-
- # conditional expression for step execution
- condition: ''
-
-steps:
-- ${{ if and(ne(parameters.ifScript, ''), eq(parameters.parameter1, parameters.parameter2)) }}:
- - script: ${{ parameters.ifScript }}
- name: ${{ parameters.name }}
- displayName: ${{ parameters.displayName }}
- env: ${{ parameters.env }}
- condition: ${{ parameters.condition }}
-
-- ${{ if and(ne(parameters.elseScript, ''), ne(parameters.parameter1, parameters.parameter2)) }}:
- - script: ${{ parameters.elseScript }}
- name: ${{ parameters.name }}
- displayName: ${{ parameters.displayName }}
- env: ${{ parameters.env }}
- condition: ${{ parameters.condition }}
\ No newline at end of file
-# Please remember to update the documentation if you make changes to these parameters!
-parameters:
- HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
- HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
- HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
- HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
- HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
- HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY
- HelixProjectArguments: '' # optional -- arguments passed to the build command
- HelixConfiguration: '' # optional -- additional property attached to a job
- HelixPreCommands: '' # optional -- commands to run before Helix work item execution
- HelixPostCommands: '' # optional -- commands to run after Helix work item execution
- WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
- WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
- WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
- CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
- XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
- XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
- XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
- XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
- XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
- IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
- DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
- IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
- HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
- Creator: '' # optional -- if the build is external, use this to specify who is sending the job
- DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
- condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
- continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
-
steps:
- - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
- displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
- displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
+- template: /eng/common/core-templates/steps/send-to-helix.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
-parameters:
- # This template adds arcade-powered source-build to CI.
-
- # This is a 'steps' template, and is intended for advanced scenarios where the existing build
- # infra has a careful build methodology that must be followed. For example, a repo
- # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
- # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
- # GitHub. Using this steps template leaves room for that infra to be included.
-
- # Defines the platform on which to run the steps. See 'eng/common/templates/job/source-build.yml'
- # for details. The entire object is described in the 'job' template for simplicity, even though
- # the usage of the properties on this object is split between the 'job' and 'steps' templates.
- platform: {}
-
steps:
-# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
-- script: |
- set -x
- df -h
-
- # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
- # In that case, call the feed setup script to add internal feeds corresponding to public ones.
- # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
- # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
- # changes.
- internalRestoreArgs=
- if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
- # Temporarily work around https://github.com/dotnet/arcade/issues/7709
- chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
- $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
- internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
-
- # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
- # This only works if there is a username/email configured, which won't be the case in most CI runs.
- git config --get user.email
- if [ $? -ne 0 ]; then
- git config user.email dn-bot@microsoft.com
- git config user.name dn-bot
- fi
- fi
-
- # If building on the internal project, the internal storage variable may be available (usually only if needed)
- # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
- # in the default public locations.
- internalRuntimeDownloadArgs=
- if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
- internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
- fi
-
- buildConfig=Release
- # Check if AzDO substitutes in a build config from a variable, and use it if so.
- if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
- buildConfig='$(_BuildConfig)'
- fi
-
- officialBuildArgs=
- if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
- officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
- fi
-
- targetRidArgs=
- if [ '${{ parameters.platform.targetRID }}' != '' ]; then
- targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
- fi
-
- runtimeOsArgs=
- if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
- runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
- fi
-
- baseOsArgs=
- if [ '${{ parameters.platform.baseOS }}' != '' ]; then
- baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
- fi
-
- publishArgs=
- if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
- publishArgs='--publish'
- fi
-
- assetManifestFileName=SourceBuild_RidSpecific.xml
- if [ '${{ parameters.platform.name }}' != '' ]; then
- assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
- fi
-
- ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
- --configuration $buildConfig \
- --restore --build --pack $publishArgs -bl \
- $officialBuildArgs \
- $internalRuntimeDownloadArgs \
- $internalRestoreArgs \
- $targetRidArgs \
- $runtimeOsArgs \
- $baseOsArgs \
- /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
- /p:ArcadeBuildFromSource=true \
- /p:DotNetBuildSourceOnly=true \
- /p:DotNetBuildRepo=true \
- /p:AssetManifestFileName=$assetManifestFileName
- displayName: Build
-
-# Upload build logs for diagnosis.
-- task: CopyFiles@2
- displayName: Prepare BuildLogs staging directory
- inputs:
- SourceFolder: '$(Build.SourcesDirectory)'
- Contents: |
- **/*.log
- **/*.binlog
- artifacts/sb/prebuilt-report/**
- TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
- CleanTargetFolder: true
- continueOnError: true
- condition: succeededOrFailed()
-
-- task: PublishPipelineArtifact@1
- displayName: Publish BuildLogs
- inputs:
- targetPath: '$(Build.StagingDirectory)/BuildLogs'
- artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
- continueOnError: true
- condition: succeededOrFailed()
+- template: /eng/common/core-templates/steps/source-build.yml
+ parameters:
+ is1ESPipeline: false
-# Manually inject component detection so that we can ignore the source build upstream cache, which contains
-# a nupkg cache of input packages (a local feed).
-# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
-# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
-- task: ComponentGovernanceComponentDetection@0
- displayName: Component Detection (Exclude upstream cache)
- inputs:
- ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+++ /dev/null
-parameters:
- maxRetries: 5
- retryDelay: 10 # in seconds
-
-steps:
-- bash: |
- if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then
- errorCount=0
- else
- errorCount=1
- fi
- warningCount=0
-
- curlStatus=1
- retryCount=0
- # retry loop to harden against spotty telemetry connections
- # we don't retry successes and 4xx client errors
- until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
- do
- if [ $retryCount -gt 0 ]; then
- echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
- sleep $RetryDelay
- fi
-
- # create a temporary file for curl output
- res=`mktemp`
-
- curlResult=`
- curl --verbose --output $res --write-out "%{http_code}"\
- -H 'Content-Type: application/json' \
- -H "X-Helix-Job-Token: $Helix_JobToken" \
- -H 'Content-Length: 0' \
- -X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \
- --data-urlencode "errorCount=$errorCount" \
- --data-urlencode "warningCount=$warningCount"`
- curlStatus=$?
-
- if [ $curlStatus -eq 0 ]; then
- if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
- curlStatus=$curlResult
- fi
- fi
-
- let retryCount++
- done
-
- if [ $curlStatus -ne 0 ]; then
- echo "Failed to Send Build Finish information after $retryCount retries"
- vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus"
- echo "##$vstsLogOutput"
- exit 1
- fi
- displayName: Send Unix Build End Telemetry
- env:
- # defined via VSTS variables in start-job.sh
- Helix_JobToken: $(Helix_JobToken)
- Helix_WorkItemId: $(Helix_WorkItemId)
- MaxRetries: ${{ parameters.maxRetries }}
- RetryDelay: ${{ parameters.retryDelay }}
- condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
-- powershell: |
- if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) {
- $ErrorCount = 0
- } else {
- $ErrorCount = 1
- }
- $WarningCount = 0
-
- # Basic retry loop to harden against server flakiness
- $retryCount = 0
- while ($retryCount -lt $env:MaxRetries) {
- try {
- Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" `
- -Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
- break
- }
- catch {
- $statusCode = $_.Exception.Response.StatusCode.value__
- if ($statusCode -ge 400 -and $statusCode -le 499) {
- Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
- Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
- exit 1
- }
- Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
- $retryCount++
- sleep $env:RetryDelay
- continue
- }
- }
-
- if ($retryCount -ge $env:MaxRetries) {
- Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
- exit 1
- }
- displayName: Send Windows Build End Telemetry
- env:
- # defined via VSTS variables in start-job.ps1
- Helix_JobToken: $(Helix_JobToken)
- Helix_WorkItemId: $(Helix_WorkItemId)
- MaxRetries: ${{ parameters.maxRetries }}
- RetryDelay: ${{ parameters.retryDelay }}
- condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT'))
+++ /dev/null
-parameters:
- helixSource: 'undefined_defaulted_in_telemetry.yml'
- helixType: 'undefined_defaulted_in_telemetry.yml'
- buildConfig: ''
- runAsPublic: false
- maxRetries: 5
- retryDelay: 10 # in seconds
-
-steps:
-- ${{ if and(eq(parameters.runAsPublic, 'false'), not(eq(variables['System.TeamProject'], 'public'))) }}:
- - task: AzureKeyVault@1
- inputs:
- azureSubscription: 'HelixProd_KeyVault'
- KeyVaultName: HelixProdKV
- SecretsFilter: 'HelixApiAccessToken'
- condition: always()
-- bash: |
- # create a temporary file
- jobInfo=`mktemp`
-
- # write job info content to temporary file
- cat > $jobInfo <<JobListStuff
- {
- "QueueId": "$QueueId",
- "Source": "$Source",
- "Type": "$Type",
- "Build": "$Build",
- "Attempt": "$Attempt",
- "Properties": {
- "operatingSystem": "$OperatingSystem",
- "configuration": "$Configuration"
- }
- }
- JobListStuff
-
- cat $jobInfo
-
- # create a temporary file for curl output
- res=`mktemp`
-
- accessTokenParameter="?access_token=$HelixApiAccessToken"
-
- curlStatus=1
- retryCount=0
- # retry loop to harden against spotty telemetry connections
- # we don't retry successes and 4xx client errors
- until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
- do
- if [ $retryCount -gt 0 ]; then
- echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
- sleep $RetryDelay
- fi
-
- curlResult=`
- cat $jobInfo |\
- curl --trace - --verbose --output $res --write-out "%{http_code}" \
- -H 'Content-Type: application/json' \
- -X POST "https://helix.dot.net/api/2018-03-14/telemetry/job$accessTokenParameter" -d @-`
- curlStatus=$?
-
- if [ $curlStatus -eq 0 ]; then
- if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
- curlStatus=$curlResult
- fi
- fi
-
- let retryCount++
- done
-
- curlResult=`cat $res`
-
- # validate status of curl command
- if [ $curlStatus -ne 0 ]; then
- echo "Failed To Send Job Start information after $retryCount retries"
- # We have to append the ## vso prefix or vso will pick up the command when it dumps the inline script into the shell
- vstsLogOutput="vso[task.logissue type=error;sourcepath=telemetry/start-job.sh;code=1;]Failed to Send Job Start information: $curlStatus"
- echo "##$vstsLogOutput"
- exit 1
- fi
-
- # Set the Helix_JobToken variable
- export Helix_JobToken=`echo $curlResult | xargs echo` # Strip Quotes
- echo "##vso[task.setvariable variable=Helix_JobToken;issecret=true;]$Helix_JobToken"
- displayName: Send Unix Job Start Telemetry
- env:
- HelixApiAccessToken: $(HelixApiAccessToken)
- Source: ${{ parameters.helixSource }}
- Type: ${{ parameters.helixType }}
- Build: $(Build.BuildNumber)
- QueueId: $(Agent.Os)
- Attempt: 1
- OperatingSystem: $(Agent.Os)
- Configuration: ${{ parameters.buildConfig }}
- MaxRetries: ${{ parameters.maxRetries }}
- RetryDelay: ${{ parameters.retryDelay }}
- condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
-- bash: |
- curlStatus=1
- retryCount=0
- # retry loop to harden against spotty telemetry connections
- # we don't retry successes and 4xx client errors
- until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
- do
- if [ $retryCount -gt 0 ]; then
- echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
- sleep $RetryDelay
- fi
-
- res=`mktemp`
- curlResult=`
- curl --verbose --output $res --write-out "%{http_code}"\
- -H 'Content-Type: application/json' \
- -H "X-Helix-Job-Token: $Helix_JobToken" \
- -H 'Content-Length: 0' \
- -X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build" \
- --data-urlencode "buildUri=$BuildUri"`
- curlStatus=$?
-
- if [ $curlStatus -eq 0 ]; then
- if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
- curlStatus=$curlResult
- fi
- fi
-
- curlResult=`cat $res`
- let retryCount++
- done
-
- # validate status of curl command
- if [ $curlStatus -ne 0 ]; then
- echo "Failed to Send Build Start information after $retryCount retries"
- vstsLogOutput="vso[task.logissue type=error;sourcepath=telemetry/build/start.sh;code=1;]Failed to Send Build Start information: $curlStatus"
- echo "##$vstsLogOutput"
- exit 1
- fi
-
- export Helix_WorkItemId=`echo $curlResult | xargs echo` # Strip Quotes
- echo "##vso[task.setvariable variable=Helix_WorkItemId]$Helix_WorkItemId"
- displayName: Send Unix Build Start Telemetry
- env:
- BuildUri: $(System.TaskDefinitionsUri)$(System.TeamProject)/_build/index?buildId=$(Build.BuildId)&_a=summary
- Helix_JobToken: $(Helix_JobToken)
- MaxRetries: ${{ parameters.maxRetries }}
- RetryDelay: ${{ parameters.retryDelay }}
- condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
-
-- powershell: |
- $jobInfo = [pscustomobject]@{
- QueueId=$env:QueueId;
- Source=$env:Source;
- Type=$env:Type;
- Build=$env:Build;
- Attempt=$env:Attempt;
- Properties=[pscustomobject]@{ operatingSystem=$env:OperatingSystem; configuration=$env:Configuration };
- }
-
- $jobInfoJson = $jobInfo | ConvertTo-Json
-
- if ($env:HelixApiAccessToken) {
- $accessTokenParameter="?access_token=$($env:HelixApiAccessToken)"
- }
- Write-Host "Job Info: $jobInfoJson"
-
- # Basic retry loop to harden against server flakiness
- $retryCount = 0
- while ($retryCount -lt $env:MaxRetries) {
- try {
- $jobToken = Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job$($accessTokenParameter)" -Method Post -ContentType "application/json" -Body $jobInfoJson
- break
- }
- catch {
- $statusCode = $_.Exception.Response.StatusCode.value__
- if ($statusCode -ge 400 -and $statusCode -le 499) {
- Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
- Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
- exit 1
- }
- Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
- $retryCount++
- sleep $env:RetryDelay
- continue
- }
- }
-
- if ($retryCount -ge $env:MaxRetries) {
- Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
- exit 1
- }
-
- $env:Helix_JobToken = $jobToken
- Write-Host "##vso[task.setvariable variable=Helix_JobToken;issecret=true;]$env:Helix_JobToken"
- env:
- HelixApiAccessToken: $(HelixApiAccessToken)
- Source: ${{ parameters.helixSource }}
- Type: ${{ parameters.helixType }}
- Build: $(Build.BuildNumber)
- QueueId: $(Agent.Os)
- Attempt: 1
- OperatingSystem: $(Agent.Os)
- Configuration: ${{ parameters.buildConfig }}
- MaxRetries: ${{ parameters.maxRetries }}
- RetryDelay: ${{ parameters.retryDelay }}
- condition: and(always(), eq(variables['Agent.Os'], 'Windows_NT'))
- displayName: Send Windows Job Start Telemetry
-- powershell: |
- # Basic retry loop to harden against server flakiness
- $retryCount = 0
- while ($retryCount -lt $env:MaxRetries) {
- try {
- $workItemId = Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build?buildUri=$([Net.WebUtility]::UrlEncode($env:BuildUri))" -Method Post -ContentType "application/json" -Body "" `
- -Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
- break
- }
- catch {
- $statusCode = $_.Exception.Response.StatusCode.value__
- if ($statusCode -ge 400 -and $statusCode -le 499) {
- Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
- Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
- exit 1
- }
- Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
- $retryCount++
- sleep $env:RetryDelay
- continue
- }
- }
-
- if ($retryCount -ge $env:MaxRetries) {
- Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
- exit 1
- }
-
- $env:Helix_WorkItemId = $workItemId
- Write-Host "##vso[task.setvariable variable=Helix_WorkItemId]$env:Helix_WorkItemId"
- displayName: Send Windows Build Start Telemetry
- env:
- BuildUri: $(System.TaskDefinitionsUri)$(System.TeamProject)/_build/index?buildId=$(Build.BuildId)&_a=summary
- Helix_JobToken: $(Helix_JobToken)
- MaxRetries: ${{ parameters.maxRetries }}
- RetryDelay: ${{ parameters.retryDelay }}
- condition: and(always(), eq(variables['Agent.Os'], 'Windows_NT'))
# pool:
# name: $(DncEngInternalBuildPool)
# demands: ImageOverride -equals windows.vs2019.amd64
-
variables:
- # Coalesce the target and source branches so we know when a PR targets a release branch
- # If these variables are somehow missing, fall back to main (tends to have more capacity)
+ - ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+ - ${{ else }}:
+ # Coalesce the target and source branches so we know when a PR targets a release branch
+ # If these variables are somehow missing, fall back to main (tends to have more capacity)
- # Any new -Svc alternative pools should have variables added here to allow for splitting work
- - name: DncEngPublicBuildPool
- value: $[
- replace(
+ # Any new -Svc alternative pools should have variables added here to allow for splitting work
+ - name: DncEngPublicBuildPool
+ value: $[
replace(
- eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
- True,
- 'NetCore-Svc-Public'
- ),
- False,
- 'NetCore-Public'
- )
- ]
+ replace(
+ eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
+ True,
+ 'NetCore-Svc-Public'
+ ),
+ False,
+ 'NetCore-Public'
+ )
+ ]
- - name: DncEngInternalBuildPool
- value: $[
- replace(
+ - name: DncEngInternalBuildPool
+ value: $[
replace(
- eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
- True,
- 'NetCore1ESPool-Svc-Internal'
- ),
- False,
- 'NetCore1ESPool-Internal'
- )
- ]
+ replace(
+ eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
+ True,
+ 'NetCore1ESPool-Svc-Internal'
+ ),
+ False,
+ 'NetCore1ESPool-Internal'
+ )
+ ]
+++ /dev/null
-variables:
-# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
-# sync with the packages.config file.
-- name: DefaultGuardianVersion
- value: 0.109.0
-- name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
\ No newline at end of file
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to download file in $maxRetries attempts."
break
}
-
}
}
# If the version of msbuild is going to be xcopied,
# use this version. Version matches a package here:
- # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/17.8.5
- $defaultXCopyMSBuildVersion = '17.8.5'
+ # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/17.10.0-pre.4.0
+ $defaultXCopyMSBuildVersion = '17.10.0-pre.4.0'
if (!$vsRequirements) {
if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion
} else {
-
if (Get-Member -InputObject $GlobalJson.tools -Name 'xcopy-msbuild') {
$xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild'
$vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0]
Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath
})
+ if (!(Test-Path $packagePath)) {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "See https://dev.azure.com/dnceng/internal/_wiki/wikis/DNCEng%20Services%20Wiki/1074/Updating-Microsoft.DotNet.Arcade.MSBuild.Xcopy-WAS-RoslynTools.MSBuild-(xcopy-msbuild)-generation?anchor=troubleshooting for help troubleshooting issues with XCopy MSBuild"
+ throw
+ }
Unzip $packagePath $packageDir
}
}
function MSBuild {
- local args=$@
+ local args=( "$@" )
if [[ "$pipelines_log" == true ]]; then
InitializeBuildTool
InitializeToolset
args+=( "-logger:$selectedPath" )
fi
- MSBuild-Core ${args[@]}
+ MSBuild-Core "${args[@]}"
}
function MSBuild-Core {
add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4640>) # 'instance' : construction of local static object is not thread-safe
add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4806>) # Unsafe operation involving type 'bool'.
+ # SDL requires the below warnings to be treated as errors:
+ # More info: https://liquid.microsoft.com/Web/Object/Read/ms.security/Requirements/Microsoft.Security.SystemsADM.10086
+ # (Access to that URL restricted to Microsoft employees.)
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4018>) # 'expression' : signed/unsigned mismatch
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4055>) # 'conversion' : from data pointer 'type1' to function pointer 'type2'
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4146>) # unary minus operator applied to unsigned type, result still unsigned
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4242>) # 'identifier' : conversion from 'type1' to 'type2', possible loss of data
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4244>) # 'conversion' conversion from 'type1' to 'type2', possible loss of data
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4267>) # 'var' : conversion from 'size_t' to 'type', possible loss of data
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4302>) # 'conversion' : truncation from 'type 1' to 'type 2'
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4308>) # negative integral constant converted to unsigned type
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4509>) # nonstandard extension used: 'function' uses SEH and 'object' has destructor
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4510>) # 'class' : default constructor could not be generated
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4532>) # 'continue' : jump out of __finally/finally block has undefined behavior during termination handling
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4533>) # initialization of 'variable' is skipped by 'instruction'
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4610>) # object 'class' can never be instantiated - user-defined constructor required
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4611>) # interaction between 'function' and C++ object destruction is non-portable
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4700>) # uninitialized local variable 'name' used
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4701>) # Potentially uninitialized local variable 'name' used
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4703>) # Potentially uninitialized local pointer variable 'name' used
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4789>) # destination of memory copy is too small
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4995>) # 'function': name was marked as #pragma deprecated
+ add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/we4996>) # 'function': was declared deprecated
+
# Set Warning Level 3:
add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/w34092>) # Sizeof returns 'unsigned long'.
add_compile_options($<$<COMPILE_LANGUAGE:C,CXX>:/w34121>) # Structure is sensitive to alignment.
container: ${{ parameters.container }}
${{ if ne(parameters.strategy, '') }}:
- 'error, we can no longer support the strategy feature in the new pipeline system. Please remove the strategy from the job template.'
+ 'error, we can no longer support the strategy feature in the new pipeline system. Please remove the strategy from the job template.': error
${{ if ne(parameters.dependsOn, '') }}:
dependsOn: ${{ parameters.dependsOn }}_${{ config.architecture }}_${{ config.configuration }}
displayName: 'Download Build Artifacts'
inputs:
targetPath: '$(Build.ArtifactStagingDirectory)/__download__'
- itemPattern: |
- Build_${{ parameters.dependsOn }}_${{ config.architecture }}_${{ config.configuration }}/bin/**
+ itemPattern: Build_${{ parameters.dependsOn }}_${{ config.architecture }}_${{ config.configuration }}/bin/**
checkDownloadedFiles: true
- task: CopyFiles@2
displayName: 'Binplace Downloaded Product'
-architecture ${{ config.architecture }}
$(_ExtraBuildParams)
$(_TestArgs)
- /p:OfficialBuildId=$(BUILD.BUILDNUMBER)
$(_InternalInstallArgs)
+ /p:OfficialBuildId=$(BUILD.BUILDNUMBER)
${{ if eq(parameters.testOnly, 'true') }}:
displayName: Test
${{ elseif eq(parameters.buildOnly, 'true') }}:
-architecture ${{ config.architecture }}
-skipmanaged
$(_Cross)
- /p:OfficialBuildId=$(BUILD.BUILDNUMBER)
$(_InternalInstallArgs)
+ /p:OfficialBuildId=$(BUILD.BUILDNUMBER)
displayName: Build Native
target: ${{ parameters.nativeBuildContainer }}
displayName: Gather diagnostic artifacts on failure
inputs:
SourceFolder: '$(Build.SourcesDirectory)/artifacts'
- TargetFolder: $(Build.ArtifactStagingDirectory)/diagnostics
+ TargetFolder: $(Build.ArtifactStagingDirectory)/artifacts_on_failure
Contents: |
bin/**
log/**
parameters:
displayName: Publish diagnostic artifacts on failure
inputs:
- targetPath: $(Build.ArtifactStagingDirectory)/diagnostics
- artifactName: DiagnosticArtifacts_$(_PhaseName)_Attempt$(System.JobAttempt)
+ targetPath: $(Build.ArtifactStagingDirectory)/artifacts_on_failure
+ artifactName: Artifacts_On_Failure_$(_PhaseName)_$(System.JobAttempt)
+ sbomEnabled: false # we don't need SBOM for non-shipping diagnostics assets
continueOnError: true
condition: failed()
displayName: Publish Logs
inputs:
targetPath: '$(Build.StagingDirectory)/BuildLogs'
- artifactName: Logs_$(_PhaseName)_Attempt$(System.JobAttempt)
+ artifactName: Logs_$(_PhaseName)_$(System.JobAttempt)
+ sbomEnabled: false # we don't need SBOM for logs
continueOnError: true
condition: always()
- - ${{ if and(eq(parameters.buildOnly, 'false'), eq(parameters.isCodeQLRun, 'false')) }}:
+ - ${{ if and(ne(parameters.buildOnly, 'true'), ne(parameters.isCodeQLRun, 'true')) }}:
# Publish test results to Azure Pipelines
- task: PublishTestResults@2
inputs:
testResultsFormat: xUnit
- testResultsFiles: '**/*UnitTests*.xml'
+ testResultsFiles: '**/*.xml'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults'
- failTaskOnFailedTests: true
testRunTitle: 'Tests $(_PhaseName)'
+ failTaskOnFailedTests: true
publishRunAttachments: true
mergeTestResults: true
- buildConfiguration: ${{ parameters.name }}
+ buildConfiguration: ${{ config.configuration }}
continueOnError: true
condition: always()
# # Internal build pools.
- ${{ if ne(variables['System.TeamProject'], 'public') }}:
- group: DotNet-Diagnostics-SDL-Params
- - group: DotNet-MSRC-Storage
- template: /eng/common/templates-official/variables/pool-providers.yml@self
- name: BuildPool
value: $(DncEngInternalBuildPool)
- name: WindowsImage
- value: 1es-windows-2022-pt
+ value: 1es-windows-2022
- name: LinuxImage
- value: 1es-ubuntu-2204-pt
+ value: 1es-ubuntu-2204
- name: macOSImage
value: macOS-latest
- name: sourceBuildTemplate
parameters:
- name: stages
type: stageList
-- name: isOfficialBuild
- type: boolean
extends:
template: templateDispatch.yml
parameters:
- ${{ if parameters.isOfficialBuild }}:
- templatePath: template1es.yml
- ${{ else }}:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
templatePath: templatePublic.yml
+ ${{ elseif notin(variables['Build.Reason'], 'PullRequest') }}:
+ templatePath: templateInternal.yml
+ pipelineTemplate: v1/1ES.Official.PipelineTemplate.yml@1ESPipelineTemplates
+ ${{ else }}:
+ templatePath: templateInternal.yml
+ pipelineTemplate: v1/1ES.Unofficial.PipelineTemplate.yml@1ESPipelineTemplates
stages: ${{ parameters.stages }}
ROOTFS_DIR: /crossrootfs/arm64
linux_musl_x64:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.13-WithNode
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:azurelinux-3.0-cross-amd64-alpine-net8.0
+ env:
+ ROOTFS_DIR: /crossrootfs/x64
linux_musl_arm:
image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm-alpine
env:
ROOTFS_DIR: /crossrootfs/arm64
+ linux_s390x:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-cross-s390x
+ env:
+ ROOTFS_DIR: /crossrootfs/s390x
+
+ linux_ppc64le:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-cross-ppc64le
+ env:
+ ROOTFS_DIR: /crossrootfs/ppc64le
+
+ linux_riscv64:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-cross-riscv64
+ env:
+ ROOTFS_DIR: /crossrootfs/riscv64
+
test_linux_x64:
image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream-9
test_linux_musl_x64:
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.13-WithNode
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.19-WithNode
options: --cap-add=SYS_PTRACE
test_debian_11_amd64:
displayName: Release Preparation
jobs:
- job: PrepareReleaseJob
- displayName: Prepare release with Darc
+ displayName: Prepare Release
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), startsWith(variables['Build.SourceBranch'], 'refs/heads/release/')) }}:
templateContext:
outputs:
variables:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), startsWith(variables['Build.SourceBranch'], 'refs/heads/release/')) }}:
- group: DotNet-Diagnostics-Storage
- - group: DotNet-DotNetStage-Storage
+ - group: DotNetBuilds storage account read tokens
- group: Release-Pipeline
steps:
- ${{ if in(variables['Build.Reason'], 'PullRequest') }}:
- script: '$(Build.Repository.LocalPath)\dotnet.cmd build $(Build.Repository.LocalPath)\eng\release\DiagnosticsReleaseTool\DiagnosticsReleaseTool.csproj -c Release /bl'
workingDirectory: '$(System.ArtifactsDirectory)'
displayName: 'Build Manifest generation and asset publishing tool'
- - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), startsWith(variables['Build.SourceBranch'], 'refs/heads/release/')) }}:
+ - ${{ elseif and(ne(variables['System.TeamProject'], 'public'), startsWith(variables['Build.SourceBranch'], 'refs/heads/release/')) }}:
- task: UseDotNet@2
displayName: 'Use .NET Core runtime 6.x'
inputs:
filePath: '$(Build.Repository.LocalPath)/eng/release/Scripts/AcquireBuild.ps1'
arguments: >-
-BarBuildId "$(BARBuildId)"
- -AzdoToken "$(dn-bot-dotnet-all-scopes)"
- -MaestroToken "$(MaestroAccessToken)"
- -GitHubToken "$(BotAccount-dotnet-bot-repo-PAT)"
- -DownloadTargetPath "$(System.ArtifactsDirectory)\ReleaseTarget"
- -SasSuffixes "$(dotnetclichecksumsmsrc-dotnet-read-list-sas-token),$(dotnetclimsrc-read-sas-token)"
-ReleaseVersion "$(Build.BuildNumber)"
+ -DownloadTargetPath "$(System.ArtifactsDirectory)\ReleaseTarget"
+ -AzdoToken "$(dn-bot-all-drop-rw-code-rw-release-all)"
+ -MaestroToken "$(MaestroAccessToken)"
+ -SasSuffixes "$(dotnetbuilds-internal-checksums-container-read-token),$(dotnetbuilds-internal-container-read-token)"
workingDirectory: '$(Build.Repository.LocalPath)'
+ - task: AzureCLI@2
+ displayName: 'Use WIF to obtain credentials for Azure CLI'
+ inputs:
+ azureSubscription: 'dotnetstage-diagnostics-tools-rw'
+ scriptType: pscore
+ scriptLocation: inlineScript
+ addSpnToEnvironment: true
+ inlineScript: |
+ echo "##vso[task.setvariable variable=ARM_CLIENT_ID]$env:servicePrincipalId"
+ echo "##vso[task.setvariable variable=ARM_ID_TOKEN]$env:idToken"
+ echo "##vso[task.setvariable variable=ARM_TENANT_ID]$env:tenantId"
+ - script: az login --service-principal -u $(ARM_CLIENT_ID) --tenant $(ARM_TENANT_ID) --allow-no-subscriptions --federated-token $(ARM_ID_TOKEN)
+ displayName: 'Use az to authenticate using managed identity'
- script: >-
$(Build.Repository.LocalPath)\dotnet.cmd run --project $(Build.Repository.LocalPath)\eng\release\DiagnosticsReleaseTool\DiagnosticsReleaseTool.csproj -c Release
--
--staging-directory "$(System.ArtifactsDirectory)\ReleaseStaging"
--release-name "$(Build.BuildNumber)"
--account-name "$(dotnet-diagnostics-storage-accountname)"
- --account-key "$(dotnetstage-storage-key)"
+ --client-id $(ARM_CLIENT_ID)
--container-name "$(dotnet-diagnostics-container-name)"
- --sas-valid-days "$(dotnet-diagnostics-storage-retentiondays)"
-v True
workingDirectory: '$(Build.Repository.LocalPath)\'
displayName: 'Manifest generation and asset publishing'
displayName: 'Publish Pipeline Artifact'
condition: succeeded()
continueOnError: true
+ enableSbom: true
steps:
- ${{ if ne(variables['System.TeamProject'], 'public') }}:
inputs:
targetPath: ${{ parameters.inputs.targetPath }}
artifactName: ${{ parameters.inputs.artifactName }}
+ enableSbom: ${{ parameters.enableSbom }}
condition: ${{ parameters.condition }}
displayName: ${{ parameters.displayName }}
continueOnError: ${{ parameters.continueOnError }}
+++ /dev/null
-
-parameters:
- - name: templatePath
- type: string
- default: no
- - name: stages
- type: stageList
- - name: containers
- type: object
-
-
-resources:
- repositories:
- - repository: 1ESPipelineTemplates
- type: git
- name: 1ESPipelineTemplates/1ESPipelineTemplates
- ref: refs/tags/release
-
-extends:
- template: v1/1ES.Official.PipelineTemplate.yml@1ESPipelineTemplates
- parameters:
- pool:
- name: netcore1espool-internal
- image: 1es-windows-2022-pt
- os: windows
-
- containers:
- ${{ parameters.containers }}
-
- stages: ${{ parameters.stages }}
parameters:
- name: templatePath
type: string
+ - name: pipelineTemplate
+ type: string
+ default: ''
- name: stages
type: stageList
- name: containers
extends:
template: ${{ parameters.templatePath }}
parameters:
+ pipelineTemplate: ${{ parameters.pipelineTemplate }}
stages: ${{ parameters.stages }}
- containers: ${{ parameters.containers }}
\ No newline at end of file
+ containers: ${{ parameters.containers }}
--- /dev/null
+
+parameters:
+ - name: templatePath
+ type: string
+ default: no
+ - name: pipelineTemplate
+ type: string
+ - name: stages
+ type: stageList
+ - name: containers
+ type: object
+
+resources:
+ repositories:
+ - repository: 1ESPipelineTemplates
+ type: git
+ name: 1ESPipelineTemplates/1ESPipelineTemplates
+ ref: refs/tags/release
+
+extends:
+ template: ${{ parameters.pipelineTemplate }}
+ parameters:
+ pool:
+ name: $(BuildPool)
+ image: $(WindowsImage)
+ os: windows
+
+ containers:
+ ${{ parameters.containers }}
+
+ stages: ${{ parameters.stages }}
- name: templatePath
type: string
default: no
+ - name: pipelineTemplate
+ type: string
- name: stages
type: stageList
- name: containers
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
+using Azure.Core;
+using Azure.Identity;
using Azure.Storage;
using Azure.Storage.Blobs;
using Azure.Storage.Blobs.Models;
{
public class AzureBlobBublisher : IPublisher
{
- private const int ClockSkewSec = 15 * 60;
private const int MaxRetries = 15;
private const int MaxFullLoopRetries = 5;
private readonly TimeSpan FullLoopRetryDelay = TimeSpan.FromSeconds(1);
- private const string AccessPolicyDownloadId = "DownloadDrop";
private readonly string _accountName;
- private readonly string _accountKey;
+ private readonly string _clientId;
private readonly string _containerName;
private readonly string _releaseName;
- private readonly int _sasValidDays;
private readonly ILogger _logger;
private BlobContainerClient _client;
}
}
- private StorageSharedKeyCredential AccountCredential
+ private TokenCredential Credentials
{
get
{
- StorageSharedKeyCredential credential = new(_accountName, _accountKey);
- return credential;
+ if (_clientId == null)
+ {
+ // Local development scenario. Use the default credential.
+ return new DefaultAzureCredential();
+ }
+
+ return new DefaultAzureCredential(new DefaultAzureCredentialOptions { ManagedIdentityClientId = _clientId });
}
}
}
}
- public AzureBlobBublisher(string accountName, string accountKey, string containerName, string releaseName, int sasValidDays, ILogger logger)
+ public AzureBlobBublisher(string accountName, string clientId, string containerName, string releaseName, ILogger logger)
{
_accountName = accountName;
- _accountKey = accountKey;
+ _clientId = clientId;
_containerName = containerName;
_releaseName = releaseName;
- _sasValidDays = sasValidDays;
_logger = logger;
}
await blobClient.UploadAsync(srcStream, overwrite: true, ct);
- BlobSasBuilder sasBuilder = new()
- {
- BlobContainerName = client.Name,
- BlobName = blobClient.Name,
- Identifier = AccessPolicyDownloadId,
- Protocol = SasProtocol.Https
- };
- Uri accessUri = blobClient.GenerateSasUri(sasBuilder);
-
using BlobDownloadStreamingResult blobStream = (await blobClient.DownloadStreamingAsync(cancellationToken: ct)).Value;
srcStream.Position = 0;
completed = await VerifyFileStreamsMatchAsync(srcStream, blobStream, ct);
- result = accessUri;
+ result = blobClient.Uri;
}
catch (IOException ioEx) when (ioEx is not PathTooLongException)
{
{
if (_client == null)
{
- BlobServiceClient serviceClient = new(AccountBlobUri, AccountCredential, BlobOptions);
+ BlobServiceClient serviceClient = new(AccountBlobUri, Credentials, BlobOptions);
_logger.LogInformation($"Attempting to connect to {serviceClient.Uri} to store blobs.");
BlobContainerClient newClient;
try
{
newClient = serviceClient.GetBlobContainerClient(_containerName);
- if (!(await newClient.ExistsAsync(ct)).Value)
+ if (!await newClient.ExistsAsync(ct))
{
- newClient = (await serviceClient.CreateBlobContainerAsync(_containerName, PublicAccessType.None, metadata: null, ct));
+ newClient = await serviceClient.CreateBlobContainerAsync(_containerName, PublicAccessType.None, metadata: null, ct);
}
}
catch (Exception ex)
continue;
}
- try
- {
- DateTime baseTime = DateTime.UtcNow;
- // Add the new (or update existing) "download" policy to the container
- // This is used to mint the SAS tokens without an expiration policy
- // Expiration can be added later by modifying this policy
- BlobSignedIdentifier downloadPolicyIdentifier = new()
- {
- Id = AccessPolicyDownloadId,
- AccessPolicy = new BlobAccessPolicy()
- {
- Permissions = "r",
- PolicyStartsOn = new DateTimeOffset(baseTime.AddSeconds(-ClockSkewSec)),
- PolicyExpiresOn = new DateTimeOffset(DateTime.UtcNow.AddDays(_sasValidDays).AddSeconds(ClockSkewSec)),
- }
- };
- _logger.LogInformation($"Writing download access policy: {AccessPolicyDownloadId} to {_containerName}.");
- await newClient.SetAccessPolicyAsync(PublicAccessType.None, new BlobSignedIdentifier[] { downloadPolicyIdentifier }, cancellationToken: ct);
- }
- catch (Exception ex)
- {
- _logger.LogWarning(ex, $"Failed to write access policy for {_containerName}, retrying.");
- continue;
- }
-
_logger.LogInformation($"Container {_containerName} is ready.");
_client = newClient;
break;
public DirectoryInfo StagingDirectory { get; }
public string ReleaseName { get; }
public string AccountName { get; }
- public string AccountKey { get; }
+ public string ClientId { get; }
public string ContainerName { get; }
- public int SasValidDays { get; }
public Config(
FileInfo toolManifest,
DirectoryInfo stagingDirectory,
string releaseName,
string accountName,
- string accountKey,
- string containerName,
- int sasValidDays)
+ string clientId,
+ string containerName)
{
ToolManifest = toolManifest;
ShouldVerifyManifest = verifyToolManifest;
StagingDirectory = stagingDirectory;
ReleaseName = releaseName;
AccountName = accountName;
- AccountKey = accountKey;
+ ClientId = clientId;
ContainerName = containerName;
- SasValidDays = sasValidDays;
}
}
}
ToolManifestVerificationOption(), DiagnosticLoggingOption(),
// Outputs
StagingPathOption(),
- AzureStorageAccountNameOption(), AzureStorageAccountKeyOption(), AzureStorageContainerNameOption(), AzureStorageSasExpirationOption()
+ AzureStorageAccountNameOption(), AzureStorageAccountKeyOption(), AzureStorageContainerNameOption()
};
private static Option<bool> DiagnosticLoggingOption() =>
new(
- aliases: new[] { "-v", "--verbose" },
+ aliases: ["-v", "--verbose"],
description: "Enables diagnostic logging",
getDefaultValue: () => false);
private static Option ToolManifestPathOption() =>
new Option<FileInfo>(
- aliases: new[] { "--tool-manifest", "-t" },
+ aliases: ["--tool-manifest", "-t"],
description: "Full path to the manifest of tools and packages to publish.")
{
IsRequired = true
private static Option<DirectoryInfo> InputDropPathOption() =>
new Option<DirectoryInfo>(
- aliases: new[] { "-i", "--input-drop-path" },
+ aliases: ["-i", "--input-drop-path"],
description: "Path to drop generated by `darc gather-drop`")
{
IsRequired = true
private static Option<string> ReleaseNameOption() =>
new(
- aliases: new[] { "-r", "--release-name" },
+ aliases: ["-r", "--release-name"],
description: "Name of this release.")
{
IsRequired = true,
private static Option StagingPathOption() =>
new Option<DirectoryInfo>(
- aliases: new[] { "--staging-directory", "-s" },
+ aliases: ["--staging-directory", "-s"],
description: "Full path to the staging path.",
getDefaultValue: () => new DirectoryInfo(
Path.Join(Path.GetTempPath(), Path.GetRandomFileName())))
private static Option<string> AzureStorageAccountNameOption() =>
new(
- aliases: new[] { "-n", "--account-name" },
+ aliases: ["-n", "--account-name"],
description: "Storage account name, must be in public azure cloud.")
{
IsRequired = true,
private static Option<string> AzureStorageAccountKeyOption() =>
new(
- aliases: new[] { "-k", "--account-key" },
- description: "Storage account key, in base 64 format.")
+ aliases: ["-k", "--client-id"],
+ description: "Identity Client ID. If left blank, ambient identity will be used.",
+ getDefaultValue: () => null)
{
IsRequired = true,
};
private static Option<string> AzureStorageContainerNameOption() =>
new(
- aliases: new[] { "-c", "--container-name" },
+ aliases: ["-c", "--container-name"],
description: "Storage account container name where the files will be uploaded.")
{
IsRequired = true,
};
-
- private static Option<int> AzureStorageSasExpirationOption() =>
- new(
- aliases: new[] { "--sas-valid-days" },
- description: "Number of days to allow access to the blobs via the provided SAS URIs.",
- getDefaultValue: () => 1);
}
}
DirectoryInfo basePublishDirectory = darcLayoutHelper.GetShippingDirectoryForSingleProjectVariants(DiagnosticsRepoHelpers.ProductNames);
string publishManifestPath = Path.Combine(releaseConfig.StagingDirectory.FullName, ManifestName);
- IPublisher releasePublisher = new AzureBlobBublisher(releaseConfig.AccountName, releaseConfig.AccountKey, releaseConfig.ContainerName, releaseConfig.ReleaseName, releaseConfig.SasValidDays, logger);
+ IPublisher releasePublisher = new AzureBlobBublisher(releaseConfig.AccountName, releaseConfig.ClientId, releaseConfig.ContainerName, releaseConfig.ReleaseName, logger);
IManifestGenerator manifestGenerator = new DiagnosticsManifestGenerator(releaseMetadata, releaseConfig.ToolManifest, logger);
using Release diagnosticsRelease = new(
<PackageReference Include="Microsoft.Extensions.Logging.Configuration" Version="6.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="6.0.0" />
- <PackageReference Include="Azure.Storage.Blobs" Version="[12.13.0]" />
+ <PackageReference Include="Azure.Identity" Version="[1.11.3]" />
+ <PackageReference Include="Azure.Storage.Blobs" Version="[12.20.0]" />
<PackageReference Include="System.CommandLine" Version="2.0.0-beta1.20468.1" />
</ItemGroup>
{
public static class DiagnosticsRepoHelpers
{
- public static readonly string[] ProductNames = new[] { "diagnostics", "dotnet-diagnostics" };
- public static readonly string[] RepositoryUrls = new[] { "https://github.com/dotnet/diagnostics", "https://dev.azure.com/dnceng/internal/_git/dotnet-diagnostics" };
+ public static readonly string[] ProductNames = ["diagnostics", "dotnet-diagnostics"];
+ public static readonly string[] RepositoryUrls = ["https://github.com/dotnet/diagnostics", "https://dev.azure.com/dnceng/internal/_git/dotnet-diagnostics"];
public static string BundleToolsPathInDrop => System.IO.Path.Combine("diagnostics", "bundledtools");
public const string BundledToolsPrefix = "diagnostic-tools-";
public const string BundledToolsCategory = "ToolBundleAssets";
[Parameter(Mandatory=$true)][string] $SasSuffixes,
[Parameter(Mandatory=$true)][string] $AzdoToken,
[Parameter(Mandatory=$true)][string] $MaestroToken,
- [Parameter(Mandatory=$true)][string] $GitHubToken,
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
[Parameter(Mandatory=$false)][string] $DarcVersion = $null,
[switch] $help,
Write-Host " -SasSuffixes <value> Comma separated list of potential uri suffixes that can be used if anonymous access to a blob uri fails. Appended directly to the end of the URI. Use full SAS syntax with ?."
Write-Host " -AzdoToken <value> Azure DevOps token to use for builds queries"
Write-Host " -MaestroToken <value> Maestro token to use for querying BAR"
- Write-Host " -GitHubToken <value> GitHub token to use for querying repository information"
Write-Host " -MaestroApiEndPoint <value> BAR endpoint to use for build queries."
Write-Host ""
}
--output-dir $DownloadTargetPath `
--overwrite `
--sas-suffixes $SasSuffixes `
- --github-pat $GitHubToken `
--azdev-pat $AzdoToken `
--bar-uri $MaestroApiEndPoint `
--password $MaestroToken `
{
"name": "dotnet-dsrouter",
"rids": ["win-x64", "win-x86", "win-arm", "win-arm64", "linux-x64", "linux-musl-arm64", "osx-x64", "linux-arm64", "linux-musl-x64", "linux-arm"]
+ },
+ {
+ "name": "dotnet-symbol",
+ "rids": ["win-x64", "win-x86", "win-arm", "win-arm64", "linux-x64", "linux-musl-arm64", "osx-x64", "linux-arm64", "linux-musl-x64", "linux-arm"]
}
]
},
"dotnet-sos",
"dotnet-trace",
"dotnet-stack",
+ "dotnet-symbol",
"dotnet-dsrouter",
"Microsoft.Diagnostics.NETCore.Client"
]
{
"tools": {
- "dotnet": "9.0.100-preview.1.24101.2",
+ "dotnet": "9.0.100-preview.4.24267.66",
"runtimes": {
"dotnet": [
"$(MicrosoftNETCoreApp60Version)",
},
"msbuild-sdks": {
"Microsoft.Build.NoTargets": "3.5.0",
- "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.24207.1"
+ "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.24281.1"
}
}
<ItemGroup>
<PackageReference Include="Microsoft.Diagnostics.Runtime" Version="$(MicrosoftDiagnosticsRuntimeVersion)" />
- <PackageReference Include="Microsoft.SymbolStore" Version="$(MicrosoftSymbolStoreVersion)" />
<PackageReference Include="System.Reflection.Metadata" Version="$(SystemReflectionMetadataVersion)" />
<PackageReference Include="System.CommandLine" Version="$(SystemCommandLineVersion)" />
<PackageReference Include="System.Memory" Version="$(SystemMemoryVersion)" />
<ItemGroup>
<ProjectReference Include="$(MSBuildThisFileDirectory)..\Microsoft.Diagnostics.DebugServices\Microsoft.Diagnostics.DebugServices.csproj" />
+ <ProjectReference Include="$(MSBuildThisFileDirectory)..\Microsoft.SymbolStore\Microsoft.SymbolStore.csproj" />
</ItemGroup>
</Project>
/// Symbol server URLs
/// </summary>
public const string MsdlSymbolServer = "https://msdl.microsoft.com/download/symbols/";
- public const string SymwebSymbolServer = "https://symweb/";
private readonly IHost _host;
private string _defaultSymbolCache;
/// </summary>
public bool IsSymbolStoreEnabled => _symbolStore != null;
+ /// <summary>
+ /// The default symbol server URL (normally msdl) when not overridden in AddSymbolServer.
+ /// </summary>
+ public string DefaultSymbolPath { get; set; } = MsdlSymbolServer;
+
/// <summary>
/// The default symbol cache path:
- ///
/// * dbgeng on Windows uses the dbgeng symbol cache path: %PROGRAMDATA%\dbg\sym
/// * dotnet-dump on Windows uses the VS symbol cache path: %TEMPDIR%\SymbolCache
/// * dotnet-dump/lldb on Linux/MacOS uses: $HOME/.dotnet/symbolcache
}
if (symbolServerPath != null)
{
- if (!AddSymbolServer(msdl: false, symweb: false, symbolServerPath.Trim()))
+ if (!AddSymbolServer(symbolServerPath: symbolServerPath.Trim()))
{
return false;
}
/// <summary>
/// Add symbol server to search path.
/// </summary>
- /// <param name="msdl">if true, use the public Microsoft server</param>
- /// <param name="symweb">if true, use symweb internal server and protocol (file.ptr)</param>
- /// <param name="symbolServerPath">symbol server url (optional)</param>
+ /// <param name="symbolServerPath">symbol server url (optional, uses <see cref="DefaultSymbolPath"/> if null)</param>
/// <param name="authToken">PAT for secure symbol server (optional)</param>
/// <param name="timeoutInMinutes">symbol server timeout in minutes (optional uses <see cref="DefaultTimeout"/> if null)</param>
/// <param name="retryCount">number of retries (optional uses <see cref="DefaultRetryCount"/> if null)</param>
/// <returns>if false, failure</returns>
public bool AddSymbolServer(
- bool msdl,
- bool symweb,
string symbolServerPath = null,
string authToken = null,
int? timeoutInMinutes = null,
int? retryCount = null)
{
- bool internalServer = false;
-
// Add symbol server URL if exists
- if (symbolServerPath == null)
- {
- if (msdl)
- {
- symbolServerPath = MsdlSymbolServer;
- }
- else if (symweb)
- {
- symbolServerPath = SymwebSymbolServer;
- internalServer = true;
- }
- }
- else
- {
- // Use the internal symbol store for symweb
- internalServer = symbolServerPath.Contains("symweb");
- }
-
- // Return error if symbol server path is null and msdl and symweb are false.
- if (symbolServerPath == null)
- {
- return false;
- }
+ symbolServerPath ??= DefaultSymbolPath;
// Validate symbol server path
if (!Uri.TryCreate(symbolServerPath.TrimEnd('/') + '/', UriKind.Absolute, out Uri uri))
if (!IsDuplicateSymbolStore<HttpSymbolStore>(store, (httpSymbolStore) => uri.Equals(httpSymbolStore.Uri)))
{
// Create http symbol server store
- HttpSymbolStore httpSymbolStore;
- if (internalServer)
- {
- httpSymbolStore = new SymwebHttpSymbolStore(Tracer.Instance, store, uri);
- }
- else
- {
- httpSymbolStore = new HttpSymbolStore(Tracer.Instance, store, uri, personalAccessToken: authToken);
- }
+ HttpSymbolStore httpSymbolStore = new(Tracer.Instance, store, uri, personalAccessToken: authToken);
httpSymbolStore.Timeout = TimeSpan.FromMinutes(timeoutInMinutes.GetValueOrDefault(DefaultTimeout));
httpSymbolStore.RetryCount = retryCount.GetValueOrDefault(DefaultRetryCount);
SetSymbolStore(httpSymbolStore);
/// </summary>
bool IsSymbolStoreEnabled { get; }
+ /// <summary>
+ /// The default symbol server URL (normally msdl) when not overridden in AddSymbolServer.
+ /// </summary>
+ string DefaultSymbolPath { get; }
+
/// <summary>
/// The default symbol cache path:
- ///
/// * dbgeng on Windows uses the dbgeng symbol cache path: %PROGRAMDATA%\dbg\sym
/// * dotnet-dump on Windows uses the VS symbol cache path: %TEMPDIR%\SymbolCache
/// * dotnet-dump/lldb on Linux/MacOS uses: $HOME/.dotnet/symbolcache
/// </summary>
- string DefaultSymbolCache { get; set; }
+ string DefaultSymbolCache { get; }
/// <summary>
/// The time out in minutes passed to the HTTP symbol store when not overridden in AddSymbolServer.
/// </summary>
- int DefaultTimeout { get; set; }
+ int DefaultTimeout { get; }
/// <summary>
/// The retry count passed to the HTTP symbol store when not overridden in AddSymbolServer.
/// </summary>
- int DefaultRetryCount { get; set; }
+ int DefaultRetryCount { get; }
/// <summary>
/// Reset any HTTP symbol stores marked with a client failure
/// <summary>
/// Add symbol server to search path.
/// </summary>
- /// <param name="msdl">if true, use the public Microsoft server</param>
- /// <param name="symweb">if true, use symweb internal server and protocol (file.ptr)</param>
- /// <param name="symbolServerPath">symbol server url (optional)</param>
+ /// <param name="symbolServerPath">symbol server url (optional, uses <see cref="DefaultSymbolPath"/> if null)</param>
/// <param name="authToken">PAT for secure symbol server (optional)</param>
- /// <param name="timeoutInMinutes">symbol server timeout in minutes (optional uses <see cref="DefaultTimeout"/> if null)</param>
- /// <param name="retryCount">number of retries (optional uses <see cref="DefaultRetryCount"/> if null)</param>
+ /// <param name="timeoutInMinutes">symbol server timeout in minutes (optional, uses <see cref="DefaultTimeout"/> if null)</param>
+ /// <param name="retryCount">number of retries (optional, uses <see cref="DefaultRetryCount"/> if null)</param>
/// <returns>if false, failure</returns>
- bool AddSymbolServer(bool msdl, bool symweb, string symbolServerPath = null, string authToken = null, int? timeoutInMinutes = null, int? retryCount = null);
+ bool AddSymbolServer(string symbolServerPath = null, string authToken = null, int? timeoutInMinutes = null, int? retryCount = null);
/// <summary>
/// Add cache path to symbol search path
[Option(Name = "--ms", Aliases = new string[] { "-ms" }, Help = "Use the public Microsoft symbol server.")]
public bool MicrosoftSymbolServer { get; set; }
- [Option(Name = "--mi", Aliases = new string[] { "-mi" }, Help = "Use the internal symweb symbol server.")]
- public bool InternalSymbolServer { get; set; }
-
[Option(Name = "--disable", Aliases = new string[] { "-disable" }, Help = "Clear or disable symbol download support.")]
public bool Disable { get; set; }
public override void Invoke()
{
- if (MicrosoftSymbolServer && InternalSymbolServer)
- {
- throw new DiagnosticsException("Cannot have both -ms and -mi options");
- }
- if ((MicrosoftSymbolServer || InternalSymbolServer) && !string.IsNullOrEmpty(SymbolServerUrl))
+ if (MicrosoftSymbolServer && !string.IsNullOrEmpty(SymbolServerUrl))
{
- throw new DiagnosticsException("Cannot have -ms or -mi option and a symbol server path");
+ throw new DiagnosticsException("Cannot have -ms option and a symbol server path");
}
if (Disable)
{
{
SymbolService.Reset();
}
- if (MicrosoftSymbolServer || InternalSymbolServer || !string.IsNullOrEmpty(SymbolServerUrl))
+ if (MicrosoftSymbolServer || !string.IsNullOrEmpty(SymbolServerUrl))
{
if (string.IsNullOrEmpty(Cache))
{
Cache = SymbolService.DefaultSymbolCache;
}
- SymbolService.AddSymbolServer(MicrosoftSymbolServer, InternalSymbolServer, SymbolServerUrl, AccessToken, Timeout, RetryCount);
+ SymbolService.AddSymbolServer(SymbolServerUrl, AccessToken, Timeout, RetryCount);
}
if (!string.IsNullOrEmpty(Cache))
{
<ItemGroup>
<PackageReference Include="Microsoft.Diagnostics.Runtime" Version="$(MicrosoftDiagnosticsRuntimeVersion)" />
- <PackageReference Include="Microsoft.SymbolStore" Version="$(MicrosoftSymbolStoreVersion)" />
</ItemGroup>
<ItemGroup>
- <ProjectReference Include="..\Microsoft.Diagnostics.DebugServices\Microsoft.Diagnostics.DebugServices.csproj" />
+ <ProjectReference Include="$(MSBuildThisFileDirectory)..\Microsoft.Diagnostics.DebugServices\Microsoft.Diagnostics.DebugServices.csproj" />
+ <ProjectReference Include="$(MSBuildThisFileDirectory)..\Microsoft.SymbolStore\Microsoft.SymbolStore.csproj" />
</ItemGroup>
</Project>
public override void Invoke()
{
- if (TryParseAddress(SourceAddress, out ulong source))
+ if (!TryParseAddress(SourceAddress, out ulong source))
{
throw new ArgumentException($"Could not parse argument 'source': {source}");
}
- if (TryParseAddress(TargetAddress, out ulong target))
+ if (!TryParseAddress(TargetAddress, out ulong target))
{
- throw new ArgumentException($"Could not parse argument 'source': {target}");
+ throw new ArgumentException($"Could not parse argument 'target': {target}");
}
ClrHeap heap = Runtime.Heap;
return _configurations.SelectMany(c => c.GetProviders()).ToList();
}
- public override bool RequestRundown
+ public override long RundownKeyword
{
- get => _configurations.Any(c => c.RequestRundown);
+ get => _configurations.Select(c => c.RundownKeyword).Aggregate((x, y) => x | y);
+ set => throw new NotSupportedException();
+ }
+
+ public override RetryStrategy RetryStrategy
+ {
+ get
+ {
+ RetryStrategy result = RetryStrategy.NothingToRetry;
+ foreach (MonitoringSourceConfiguration configuration in _configurations)
+ {
+ if (configuration.RetryStrategy == RetryStrategy.ForbiddenToRetry)
+ {
+ // Nothing overrides ForbiddenToRetry
+ return RetryStrategy.ForbiddenToRetry;
+ }
+ else if (result == RetryStrategy.NothingToRetry)
+ {
+ // Anything override NothingToRetry
+ result = configuration.RetryStrategy;
+ }
+ else if (result == RetryStrategy.DropKeywordDropRundown)
+ {
+ if (configuration.RetryStrategy == RetryStrategy.DropKeywordKeepRundown)
+ {
+ // DropKeywordKeepRundown overrides DropKeywordDropRundown
+ result = RetryStrategy.DropKeywordKeepRundown;
+ }
+ }
+ }
+ return result;
+ }
set => throw new NotSupportedException();
}
}
public AspNetTriggerSourceConfiguration(float? heartbeatIntervalSeconds = null)
{
- RequestRundown = false;
+ RundownKeyword = 0;
_heartbeatIntervalSeconds = heartbeatIntervalSeconds;
}
public override IList<EventPipeProvider> GetProviders() =>
new EventPipeProvider[]
{
- new EventPipeProvider(SampleProfilerProviderName, System.Diagnostics.Tracing.EventLevel.Informational),
- new EventPipeProvider("Microsoft-Windows-DotNETRuntime", System.Diagnostics.Tracing.EventLevel.Informational, (long) Tracing.Parsers.ClrTraceEventParser.Keywords.Default)
+ new(SampleProfilerProviderName, System.Diagnostics.Tracing.EventLevel.Informational),
+ new("Microsoft-Windows-DotNETRuntime", System.Diagnostics.Tracing.EventLevel.Informational, (long) Tracing.Parsers.ClrTraceEventParser.Keywords.Default)
};
}
}
private readonly IEnumerable<EventPipeProvider> _providers;
private readonly int _bufferSizeInMB;
- public EventPipeProviderSourceConfiguration(bool requestRundown = true, int bufferSizeInMB = 256, params EventPipeProvider[] providers)
+ public EventPipeProviderSourceConfiguration(long rundownKeyword = EventPipeSession.DefaultRundownKeyword, int bufferSizeInMB = 256, params EventPipeProvider[] providers)
{
_providers = providers;
- RequestRundown = requestRundown;
+ RundownKeyword = rundownKeyword;
_bufferSizeInMB = bufferSizeInMB;
}
{
public GCDumpSourceConfiguration()
{
- RequestRundown = false;
+ RundownKeyword = 0;
}
public override IList<EventPipeProvider> GetProviders()
{
public GcCollectConfiguration()
{
- RequestRundown = false;
+ RundownKeyword = (long)Tracing.Parsers.ClrTraceEventParser.Keywords.GC;
+ RetryStrategy = RetryStrategy.DropKeywordDropRundown;
}
public override IList<EventPipeProvider> GetProviders() =>
new EventPipeProvider[]
{
- new EventPipeProvider("Microsoft-Windows-DotNETRuntime", System.Diagnostics.Tracing.EventLevel.Informational, (long) Tracing.Parsers.ClrTraceEventParser.Keywords.GC),
- new EventPipeProvider("Microsoft-Windows-DotNETRuntimePrivate", System.Diagnostics.Tracing.EventLevel.Informational, (long) Tracing.Parsers.ClrTraceEventParser.Keywords.GC),
+ new("Microsoft-Windows-DotNETRuntime", System.Diagnostics.Tracing.EventLevel.Informational, (long) Tracing.Parsers.ClrTraceEventParser.Keywords.GC),
+ new("Microsoft-Windows-DotNETRuntimePrivate", System.Diagnostics.Tracing.EventLevel.Informational, (long) Tracing.Parsers.ClrTraceEventParser.Keywords.GC),
};
}
}
public HttpRequestSourceConfiguration()
{
//CONSIDER removing rundown for this scenario.
- RequestRundown = true;
+ RundownKeyword = EventPipeSession.DefaultRundownKeyword;
}
// This string is shared between HttpRequestSourceConfiguration and AspNetTriggerSourceConfiguration
public LoggingSourceConfiguration(LogLevel level, LogMessageType messageType, IDictionary<string, LogLevel?> filterSpecs, bool useAppFilters,
bool collectScopes)
{
- RequestRundown = false;
+ RundownKeyword = 0;
_filterSpecs = ToFilterSpecsString(filterSpecs, useAppFilters);
_keywords = (long)ToKeywords(messageType);
_level = ToEventLevel(level);
throw new ArgumentNullException(nameof(providers));
}
- RequestRundown = false;
+ RundownKeyword = 0;
_eventPipeProviders = providers.Where(provider => provider.Type.HasFlag(MetricType.EventCounter))
.Select((MetricEventPipeProvider provider) => new EventPipeProvider(provider.Provider,
public abstract IList<EventPipeProvider> GetProviders();
- public virtual bool RequestRundown { get; set; } = true;
+ public virtual long RundownKeyword { get; set; } = EventPipeSession.DefaultRundownKeyword;
public virtual int BufferSizeInMB => 256;
+
+ public virtual RetryStrategy RetryStrategy { get; set; } = RetryStrategy.NothingToRetry;
}
}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+//
+// This class describes the various strategies for retrying a command.
+// The rough idea is that these numbers form a state machine.
+// Any time a command execution fails, a retry will be attempted by matching the
+// condition of the config as well as this strategy number to generate a
+// modified config as well as a modified strategy.
+//
+// This is designed with forward compatibility in mind. We might have newer
+// capabilities that only exists in newer runtimes, but we will never know exactly
+// how we should retry. So this give us a way to encode the retry strategy in the
+// profiles without having to introducing new concepts.
+//
+namespace Microsoft.Diagnostics.Monitoring.EventPipe
+{
+ public enum RetryStrategy
+ {
+ NothingToRetry = 0,
+ DropKeywordKeepRundown = 1,
+ DropKeywordDropRundown = 2,
+ ForbiddenToRetry = 3
+ }
+}
{
public sealed class SampleProfilerConfiguration : MonitoringSourceConfiguration
{
+ public SampleProfilerConfiguration()
+ {
+ RundownKeyword = 0;
+ }
+
public override IList<EventPipeProvider> GetProviders() =>
new EventPipeProvider[]
{
- new EventPipeProvider(SampleProfilerProviderName, EventLevel.Informational)
+ new(SampleProfilerProviderName, EventLevel.Informational)
};
public override int BufferSizeInMB => 1;
-
- public override bool RequestRundown
- {
- get => false;
- set => throw new NotSupportedException();
- }
}
}
// The .NET Foundation licenses this file to you under the MIT license.
using System;
+using System.Collections.Generic;
+using System.Diagnostics;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
EventPipeSession session = null;
try
{
- session = await client.StartEventPipeSessionAsync(_sourceConfig.GetProviders(), _sourceConfig.RequestRundown, _sourceConfig.BufferSizeInMB, cancellationToken).ConfigureAwait(false);
+ IEnumerable<EventPipeProvider> providers = _sourceConfig.GetProviders();
+ int bufferSizeInMB = _sourceConfig.BufferSizeInMB;
+ long rundownKeyword = _sourceConfig.RundownKeyword;
+ RetryStrategy retryStrategy = _sourceConfig.RetryStrategy;
+ try
+ {
+ EventPipeSessionConfiguration config = new(providers, bufferSizeInMB, rundownKeyword, true);
+ session = await client.StartEventPipeSessionAsync(config, cancellationToken).ConfigureAwait(false);
+ }
+ catch (UnsupportedCommandException) when (retryStrategy == RetryStrategy.DropKeywordKeepRundown)
+ {
+ //
+ // If you are building new profiles or options, you can test with these asserts to make sure you are writing
+ // the retry strategies correctly.
+ //
+ // If these assert ever fires, it means something is wrong with the option generation logic leading to unnecessary retries.
+ // unnecessary retries is not fatal.
+ //
+ // Debug.Assert(rundownKeyword != 0);
+ // Debug.Assert(rundownKeyword != EventPipeSession.DefaultRundownKeyword);
+ //
+ EventPipeSessionConfiguration config = new(providers, bufferSizeInMB, EventPipeSession.DefaultRundownKeyword, true);
+ session = await client.StartEventPipeSessionAsync(config, cancellationToken).ConfigureAwait(false);
+ }
+ catch (UnsupportedCommandException) when (retryStrategy == RetryStrategy.DropKeywordDropRundown)
+ {
+ //
+ // If you are building new profiles or options, you can test with these asserts to make sure you are writing
+ // the retry strategies correctly.
+ //
+ // If these assert ever fires, it means something is wrong with the option generation logic leading to unnecessary retries.
+ // unnecessary retries is not fatal.
+ //
+ // Debug.Assert(rundownKeyword != 0);
+ // Debug.Assert(rundownKeyword != EventPipeSession.DefaultRundownKeyword);
+ //
+ EventPipeSessionConfiguration config = new(providers, bufferSizeInMB, 0, true);
+ session = await client.StartEventPipeSessionAsync(config, cancellationToken).ConfigureAwait(false);
+ }
if (resumeRuntime)
{
try
return EventPipeSession.Start(_endpoint, config);
}
+ /// <summary>
+ /// Start tracing the application and return an EventPipeSession object
+ /// </summary>
+ /// <param name="config">The configuration for start tracing.</param>
+ /// <returns>
+ /// An EventPipeSession object representing the EventPipe session that just started.
+ /// </returns>
+ public EventPipeSession StartEventPipeSession(EventPipeSessionConfiguration config)
+ {
+ return EventPipeSession.Start(_endpoint, config);
+ }
+
/// <summary>
/// Start tracing the application and return an EventPipeSession object
/// </summary>
{
public class EventPipeSession : IDisposable
{
+ //! This is CoreCLR specific keywords for native ETW events (ending up in event pipe).
+ //! The keywords below seems to correspond to:
+ //! GCKeyword (0x00000001)
+ //! LoaderKeyword (0x00000008)
+ //! JitKeyword (0x00000010)
+ //! NgenKeyword (0x00000020)
+ //! unused_keyword (0x00000100)
+ //! JittedMethodILToNativeMapKeyword (0x00020000)
+ //! ThreadTransferKeyword (0x80000000)
+ internal const long DefaultRundownKeyword = 0x80020139;
+
private ulong _sessionId;
private IpcEndpoint _endpoint;
private bool _disposedValue; // To detect redundant calls
private static IpcMessage CreateStartMessage(EventPipeSessionConfiguration config)
{
// To keep backward compatibility with older runtimes we only use newer serialization format when needed
- // V3 has added support to disable the stacktraces
- bool shouldUseV3 = !config.RequestStackwalk;
- EventPipeCommandId command = shouldUseV3 ? EventPipeCommandId.CollectTracing3 : EventPipeCommandId.CollectTracing2;
- byte[] payload = shouldUseV3 ? config.SerializeV3() : config.SerializeV2();
+ EventPipeCommandId command;
+ byte[] payload;
+ if (config.RundownKeyword != DefaultRundownKeyword && config.RundownKeyword != 0)
+ {
+ // V4 has added support to specify rundown keyword
+ command = EventPipeCommandId.CollectTracing4;
+ payload = config.SerializeV4();
+ }
+ else if (!config.RequestStackwalk)
+ {
+ // V3 has added support to disable the stacktraces
+ command = EventPipeCommandId.CollectTracing3;
+ payload = config.SerializeV3();
+ }
+ else
+ {
+ command = EventPipeCommandId.CollectTracing2;
+ payload = config.SerializeV2();
+ }
+
return new IpcMessage(DiagnosticsServerCommandSet.EventPipe, (byte)command, payload);
}
IEnumerable<EventPipeProvider> providers,
int circularBufferSizeMB = 256,
bool requestRundown = true,
- bool requestStackwalk = true) : this(circularBufferSizeMB, EventPipeSerializationFormat.NetTrace, providers, requestRundown, requestStackwalk)
+ bool requestStackwalk = true) : this(circularBufferSizeMB, EventPipeSerializationFormat.NetTrace, providers, requestStackwalk, (requestRundown ? EventPipeSession.DefaultRundownKeyword : 0))
+ {}
+
+ /// <summary>
+ /// Creates a new configuration object for the EventPipeSession.
+ /// For details, see the documentation of each property of this object.
+ /// </summary>
+ /// <param name="providers">An IEnumerable containing the list of Providers to turn on.</param>
+ /// <param name="circularBufferSizeMB">The size of the runtime's buffer for collecting events in MB</param>
+ /// <param name="rundownKeyword">The keyword for rundown events.</param>
+ /// <param name="requestStackwalk">If true, record a stacktrace for every emitted event.</param>
+ public EventPipeSessionConfiguration(
+ IEnumerable<EventPipeProvider> providers,
+ int circularBufferSizeMB,
+ long rundownKeyword,
+ bool requestStackwalk = true) : this(circularBufferSizeMB, EventPipeSerializationFormat.NetTrace, providers, requestStackwalk, rundownKeyword)
{}
private EventPipeSessionConfiguration(
int circularBufferSizeMB,
EventPipeSerializationFormat format,
IEnumerable<EventPipeProvider> providers,
- bool requestRundown,
- bool requestStackwalk)
+ bool requestStackwalk,
+ long rundownKeyword)
{
if (circularBufferSizeMB == 0)
{
CircularBufferSizeInMB = circularBufferSizeMB;
Format = format;
- RequestRundown = requestRundown;
RequestStackwalk = requestStackwalk;
+ RundownKeyword = rundownKeyword;
}
/// <summary>
/// <item>Consider to set this parameter to false if you don't need stacktrace information or if you're analyzing events on the fly.</item>
/// </list>
/// </summary>
- public bool RequestRundown { get; }
+ public bool RequestRundown => this.RundownKeyword != 0;
/// <summary>
/// The size of the runtime's buffer for collecting events in MB.
/// </summary>
public bool RequestStackwalk { get; }
+ /// <summary>
+ /// The keywords enabled for the rundown provider.
+ /// </summary>
+ public long RundownKeyword { get; internal set; }
+
/// <summary>
/// Providers to enable for this session.
/// </summary>
return serializedData;
}
+ public static byte[] SerializeV4(this EventPipeSessionConfiguration config)
+ {
+ byte[] serializedData = null;
+ using (MemoryStream stream = new())
+ using (BinaryWriter writer = new(stream))
+ {
+ writer.Write(config.CircularBufferSizeInMB);
+ writer.Write((uint)config.Format);
+ writer.Write(config.RundownKeyword);
+ writer.Write(config.RequestStackwalk);
+
+ SerializeProviders(config, writer);
+
+ writer.Flush();
+ serializedData = stream.ToArray();
+ }
+
+ return serializedData;
+ }
+
private static void SerializeProviders(EventPipeSessionConfiguration config, BinaryWriter writer)
{
writer.Write(config.Providers.Count);
CollectTracing = 0x02,
CollectTracing2 = 0x03,
CollectTracing3 = 0x04,
+ CollectTracing4 = 0x05,
}
internal enum DumpCommandId : byte
_serviceContainer.AddService<ISymbolService>(_symbolService);
// Automatically enable symbol server support
- _symbolService.AddSymbolServer(msdl: true, symweb: false, timeoutInMinutes: 6, retryCount: 5);
+ _symbolService.AddSymbolServer(timeoutInMinutes: 6, retryCount: 5);
_symbolService.AddCachePath(_symbolService.DefaultSymbolCache);
}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ /// <summary>
+ /// An address space that starts at a fixed offset relative to another space
+ /// </summary>
+ public class RelativeAddressSpace : IAddressSpace
+ {
+ private IAddressSpace _baseAddressSpace;
+ private ulong _baseStart;
+ private ulong _length;
+ private long _baseToRelativeShift;
+
+
+ public RelativeAddressSpace(IAddressSpace baseAddressSpace, ulong startOffset, ulong length) :
+ this(baseAddressSpace, startOffset, length, -(long)startOffset)
+ { }
+
+ public RelativeAddressSpace(IAddressSpace baseAddressSpace, ulong startOffset, ulong length, long baseToRelativeShift)
+ {
+ /*
+ if (startOffset < 0 || startOffset >= baseAddressSpace.Length)
+ {
+ throw new BadInputFormatException("Invalid startOffset");
+ }
+ if (length < 0 || startOffset + length > baseAddressSpace.Length)
+ {
+ throw new BadInputFormatException("Invalid length");
+ }
+ if((long)startOffset + baseToRelativeShift < 0)
+ {
+ throw new BadInputFormatException("Invalid baseToRelativeShift");
+ }*/
+ _baseAddressSpace = baseAddressSpace;
+ _baseStart = startOffset;
+ _length = length;
+ _baseToRelativeShift = baseToRelativeShift;
+ }
+
+ /// <summary>
+ /// Reads a range of bytes from the address space
+ /// </summary>
+ /// <param name="position">The position in the address space to begin reading from</param>
+ /// <param name="buffer">The buffer that will receive the bytes that are read</param>
+ /// <param name="bufferOffset">The offset in the output buffer to begin writing the bytes</param>
+ /// <param name="count">The number of bytes to read into the buffer</param>
+ /// <returns>The number of bytes read</returns>
+ public uint Read(ulong position, byte[] buffer, uint bufferOffset, uint count)
+ {
+ ulong basePosition = (ulong)((long)position - _baseToRelativeShift);
+ if (basePosition < _baseStart)
+ {
+ return 0;
+ }
+ count = (uint)Math.Min(count, _length);
+ return _baseAddressSpace.Read(basePosition, buffer, bufferOffset, count);
+ }
+
+ /// <summary>
+ /// The upper bound (non-inclusive) of readable addresses
+ /// </summary>
+ public ulong Length { get { return unchecked(_baseStart + _length + (ulong)_baseToRelativeShift); } }
+ }
+
+ public class ZeroAddressSpace : IAddressSpace
+ {
+ public ZeroAddressSpace(ulong length)
+ {
+ Length = length;
+ }
+
+ public ulong Length { get; private set; }
+
+ public uint Read(ulong position, byte[] buffer, uint bufferOffset, uint count)
+ {
+ if (position >= Length)
+ {
+ return 0;
+ }
+ count = (uint)Math.Min(Length - position, count);
+ Array.Clear(buffer, (int)bufferOffset, (int)count);
+ return count;
+ }
+ }
+
+ public struct PiecewiseAddressSpaceRange
+ {
+ public ulong Start;
+ public ulong Length;
+ public IAddressSpace AddressSpace;
+ }
+
+ public class PiecewiseAddressSpace : IAddressSpace
+ {
+ private PiecewiseAddressSpaceRange[] _ranges;
+
+ public PiecewiseAddressSpace(params PiecewiseAddressSpaceRange[] ranges)
+ {
+ _ranges = ranges;
+ Length = _ranges.Max(r => r.Start + r.Length);
+ }
+
+ public ulong Length { get; private set; }
+
+ public uint Read(ulong position, byte[] buffer, uint bufferOffset, uint count)
+ {
+ uint bytesRead = 0;
+ while (bytesRead != count)
+ {
+ int i = 0;
+ for (; i < _ranges.Length; i++)
+ {
+ ulong upper = _ranges[i].Start + _ranges[i].Length;
+ if (_ranges[i].Start <= position && position < upper)
+ {
+ uint bytesToReadRange = (uint)Math.Min(count - bytesRead, upper - position);
+ uint bytesReadRange = _ranges[i].AddressSpace.Read(position, buffer, bufferOffset, bytesToReadRange);
+ if (bytesReadRange == 0)
+ {
+ return bytesRead;
+ }
+ position += bytesReadRange;
+ bufferOffset += bytesReadRange;
+ bytesRead += bytesReadRange;
+ break;
+ }
+ }
+ if (i == _ranges.Length)
+ {
+ return bytesRead;
+ }
+ }
+ return bytesRead;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ public static class ArrayHelper
+ {
+ /// <summary>
+ /// Safe array allocator - turns OverFlows and OutOfMemory into BIF's.
+ /// </summary>
+ public static E[] New<E>(uint count)
+ {
+ E[] a;
+ try
+ {
+ a = new E[count];
+ }
+ catch (Exception)
+ {
+ throw new BadInputFormatException("Internal overflow attempting to allocate an array of size " + count + ".");
+ }
+ return a;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Reflection;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ internal sealed class ArrayLayout : LayoutBase
+ {
+ public ArrayLayout(Type arrayType, ILayout elementLayout, uint numElements) :
+ base(arrayType, numElements * elementLayout.Size, elementLayout.NaturalAlignment)
+ {
+ _elementLayout = elementLayout;
+ _numElements = numElements;
+ }
+
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ ulong src = position;
+ uint elementSize = _elementLayout.Size;
+ Array a = Array.CreateInstance(_elementLayout.Type, (int)_numElements);
+ for (uint i = 0; i < _numElements; i++)
+ {
+ a.SetValue(_elementLayout.Read(dataSource, src), (int)i);
+ src += elementSize;
+ }
+ return a;
+ }
+
+ private uint _numElements;
+ private ILayout _elementLayout;
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+
+namespace Microsoft.FileFormats
+{
+ /// <summary>
+ /// Attach to an array-typed targeted field to indicate the number of elements
+ /// </summary>
+ [AttributeUsage(AttributeTargets.Field)]
+ public sealed class ArraySizeAttribute : Attribute
+ {
+ public ArraySizeAttribute(uint numElements)
+ {
+ NumElements = numElements;
+ }
+
+ public uint NumElements { get; private set; }
+ }
+
+ /// <summary>
+ /// Attach to a field to indicate that it should be only be included in the type
+ /// if a particular define has been enabled
+ /// </summary>
+ [AttributeUsage(AttributeTargets.Field)]
+ public sealed class IfAttribute : Attribute
+ {
+ public IfAttribute(string defineName)
+ {
+ DefineName = defineName;
+ }
+
+ public string DefineName { get; private set; }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Microsoft.FileFormats.ELF
+{
+ public class ELFCoreFile
+ {
+ private readonly ELFFile _elf;
+ private readonly Lazy<ELFFileTable> _fileTable;
+ private readonly Lazy<ELFLoadedImage[]> _images;
+
+ public ELFCoreFile(IAddressSpace dataSource)
+ {
+ _elf = new ELFFile(dataSource);
+ _fileTable = new Lazy<ELFFileTable>(ReadFileTable);
+ _images = new Lazy<ELFLoadedImage[]>(ReadLoadedImages);
+ }
+
+ public ELFFileTable FileTable { get { return _fileTable.Value; } }
+ public ELFLoadedImage[] LoadedImages { get { return _images.Value; } }
+ public IAddressSpace DataSource { get { return _elf.VirtualAddressReader.DataSource; } }
+
+ public bool IsValid()
+ {
+ return _elf.IsValid() && _elf.Header.Type == ELFHeaderType.Core;
+ }
+
+ public bool Is64Bit => _elf.Is64Bit;
+
+ public IEnumerable<ELFProgramSegment> Segments => _elf.Segments;
+
+ private ELFFileTable ReadFileTable()
+ {
+ foreach (ELFProgramSegment seg in _elf.Segments)
+ {
+ if (seg.Header.Type == ELFProgramHeaderType.Note)
+ {
+ ELFNoteList noteList = new(seg.Contents);
+ foreach (ELFNote note in noteList.Notes)
+ {
+ if (note.Header.Type == ELFNoteType.File)
+ {
+ return new ELFFileTable(note.Contents);
+ }
+ }
+ }
+ }
+
+ throw new BadInputFormatException("No ELF file table found");
+ }
+
+ private ELFLoadedImage[] ReadLoadedImages()
+ {
+ Dictionary<string, ELFLoadedImage> lookup = new();
+
+ foreach (ELFFileTableEntry fte in FileTable.Files.Where(fte => !fte.Path.StartsWith("/dev/zero") && !fte.Path.StartsWith("/run/shm")))
+ {
+ string path = fte.Path;
+ if (!lookup.TryGetValue(path, out ELFLoadedImage image))
+ {
+ image = lookup[path] = new ELFLoadedImage(path);
+ }
+ image.AddTableEntryPointers(fte);
+ }
+
+ List<ELFLoadedImage> result = new();
+ foreach (ELFLoadedImage image in lookup.Values)
+ {
+ image.Image = new ELFFile(_elf.VirtualAddressReader.DataSource, image.LoadAddress, isDataSourceVirtualAddressSpace: true);
+ result.Add(image);
+ }
+
+ return result.ToArray();
+ }
+ }
+
+ public class ELFLoadedImage
+ {
+ private ulong _loadAddress;
+ private ulong _minimumPointer = ulong.MaxValue;
+
+ public ELFLoadedImage(ELFFile image, ELFFileTableEntry entry)
+ {
+ Image = image;
+ Path = entry.Path;
+ _loadAddress = entry.LoadAddress;
+ }
+
+ public ELFLoadedImage(string path)
+ {
+ Path = path;
+ }
+
+ public ulong LoadAddress => _loadAddress == 0 ? _minimumPointer : _loadAddress;
+ public string Path { get; }
+ public ELFFile Image { get; internal set; }
+
+ internal void AddTableEntryPointers(ELFFileTableEntry entry)
+ {
+ // There are cases (like .NET single-file modules) where the first NT_FILE entry isn't the ELF
+ // or PE header (i.e the base address). The header is the first entry with PageOffset == 0. For
+ // ELF modules there should only be one PageOffset == 0 entry but with the memory mapped PE
+ // assemblies, there can be more than one PageOffset == 0 entry and the first one is the base
+ // address.
+ if (_loadAddress == 0 && entry.PageOffset == 0)
+ {
+ _loadAddress = entry.LoadAddress;
+ }
+ // If no load address was found, will use the lowest start address. There has to be at least one
+ // entry. This fixes the .NET 5.0 MacOS ELF dumps which have modules with no PageOffset == 0 entries.
+ _minimumPointer = Math.Min(entry.LoadAddress, _minimumPointer);
+ }
+ }
+
+ public class ELFFileTableEntry
+ {
+ private readonly ELFFileTableEntryPointers _ptrs;
+
+ public ELFFileTableEntry(string path, ELFFileTableEntryPointers ptrs)
+ {
+ Path = path;
+ _ptrs = ptrs;
+ }
+
+ public ulong PageOffset => _ptrs.PageOffset;
+ public ulong LoadAddress => _ptrs.Start;
+ public string Path { get; private set; }
+ }
+
+ public class ELFFileTable
+ {
+ private readonly Reader _noteReader;
+ private readonly Lazy<IEnumerable<ELFFileTableEntry>> _files;
+
+ public ELFFileTable(Reader noteReader)
+ {
+ _noteReader = noteReader;
+ _files = new Lazy<IEnumerable<ELFFileTableEntry>>(ReadFiles);
+ }
+
+ public IEnumerable<ELFFileTableEntry> Files { get { return _files.Value; } }
+
+ private IEnumerable<ELFFileTableEntry> ReadFiles()
+ {
+ List<ELFFileTableEntry> files = new();
+ ulong readPosition = 0;
+ ELFFileTableHeader header = _noteReader.Read<ELFFileTableHeader>(ref readPosition);
+
+ //TODO: sanity check the entryCount
+ ELFFileTableEntryPointers[] ptrs = _noteReader.ReadArray<ELFFileTableEntryPointers>(ref readPosition, (uint)(ulong)header.EntryCount);
+ for (int i = 0; i < (int)(ulong)header.EntryCount; i++)
+ {
+ string path = _noteReader.Read<string>(ref readPosition);
+
+ // This substitution is for unloaded modules for which Linux appends " (deleted)" to the module name.
+ path = path.Replace(" (deleted)", "");
+
+ files.Add(new ELFFileTableEntry(path, ptrs[i]));
+ }
+ return files;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Microsoft.FileFormats.ELF
+{
+ public class ELFFile : IDisposable
+ {
+ private readonly ulong _position;
+ private readonly bool _isDataSourceVirtualAddressSpace;
+ private readonly Reader _reader;
+ private readonly Lazy<ELFHeaderIdent> _ident;
+ private readonly Lazy<Reader> _dataSourceReader;
+ private readonly Lazy<ELFHeader> _header;
+ private readonly Lazy<IEnumerable<ELFProgramSegment>> _segments;
+ private readonly Lazy<ELFSection[]> _sections;
+ private readonly Lazy<Reader> _virtualAddressReader;
+ private readonly Lazy<byte[]> _buildId;
+ private readonly Lazy<byte[]> _sectionNameTable;
+
+ public ELFFile(IAddressSpace dataSource, ulong position = 0, bool isDataSourceVirtualAddressSpace = false)
+ {
+ _position = position;
+ _reader = new Reader(dataSource);
+ _isDataSourceVirtualAddressSpace = isDataSourceVirtualAddressSpace;
+ _ident = new Lazy<ELFHeaderIdent>(() => _reader.Read<ELFHeaderIdent>(_position));
+ _dataSourceReader = new Lazy<Reader>(() => new Reader(dataSource, new LayoutManager().AddELFTypes(IsBigEndian, Is64Bit)));
+ _header = new Lazy<ELFHeader>(() => DataSourceReader.Read<ELFHeader>(_position));
+ _segments = new Lazy<IEnumerable<ELFProgramSegment>>(ReadSegments);
+ _sections = new Lazy<ELFSection[]>(ReadSections);
+ _virtualAddressReader = new Lazy<Reader>(CreateVirtualAddressReader);
+ _buildId = new Lazy<byte[]>(ReadBuildId);
+ _sectionNameTable = new Lazy<byte[]>(ReadSectionNameTable);
+ }
+
+ public ELFHeaderIdent Ident { get { return _ident.Value; } }
+ public ELFHeader Header { get { return _header.Value; } }
+ private Reader DataSourceReader { get { return _dataSourceReader.Value; } }
+ public IEnumerable<ELFProgramSegment> Segments { get { return _segments.Value; } }
+ public ELFSection[] Sections { get { return _sections.Value; } }
+ public Reader VirtualAddressReader { get { return _virtualAddressReader.Value; } }
+ public byte[] BuildID { get { return _buildId.Value; } }
+ public byte[] SectionNameTable { get { return _sectionNameTable.Value; } }
+
+ public void Dispose()
+ {
+ if (_reader.DataSource is IDisposable disposable)
+ {
+ disposable.Dispose();
+ }
+ }
+
+ public bool IsValid()
+ {
+ if (_reader.Length > (_position + _reader.SizeOf<ELFHeaderIdent>()))
+ {
+ try
+ {
+ return Ident.IsIdentMagicValid.Check();
+ }
+ catch (Exception ex) when (ex is InvalidVirtualAddressException || ex is BadInputFormatException)
+ {
+ }
+ }
+ return false;
+ }
+
+ public bool IsBigEndian
+ {
+ get
+ {
+ Ident.IsIdentMagicValid.CheckThrowing();
+ Ident.IsDataValid.CheckThrowing();
+ return Ident.Data == ELFData.BigEndian;
+ }
+ }
+
+ public bool Is64Bit
+ {
+ get
+ {
+ Ident.IsIdentMagicValid.CheckThrowing();
+ Ident.IsClassValid.CheckThrowing();
+ return (Ident.Class == ELFClass.Class64);
+ }
+ }
+
+ public ulong PreferredVMBaseAddress
+ {
+ get
+ {
+ ulong minAddr = ulong.MaxValue;
+
+ foreach (ELFProgramSegment segment in Segments)
+ {
+ if (segment.Header.Type == ELFProgramHeaderType.Load)
+ {
+ minAddr = Math.Min(minAddr, segment.Header.VirtualAddress);
+ }
+ }
+
+ return minAddr;
+ }
+ }
+
+ public ELFSection FindSectionByName(string name)
+ {
+ foreach (ELFSection section in Sections)
+ {
+ if (string.Equals(section.Name, name))
+ {
+ return section;
+ }
+ }
+ return null;
+ }
+
+ private IEnumerable<ELFProgramSegment> ReadSegments()
+ {
+ Header.IsProgramHeaderCountReasonable.CheckThrowing();
+ IsHeaderProgramHeaderOffsetValid.CheckThrowing();
+ IsHeaderProgramHeaderEntrySizeValid.CheckThrowing();
+
+ // Calculate the loadBias. It is usually just the base address except for some executable modules.
+ ulong loadBias = _position;
+ if (loadBias > 0)
+ {
+ for (uint i = 0; i < Header.ProgramHeaderCount; i++)
+ {
+ ulong programHeaderOffset = _position + Header.ProgramHeaderOffset + i * Header.ProgramHeaderEntrySize;
+ ELFProgramHeader header = DataSourceReader.Read<ELFProgramHeader>(programHeaderOffset);
+ if (header.Type == ELFProgramHeaderType.Load && header.FileOffset == 0)
+ {
+ loadBias -= header.VirtualAddress;
+ }
+ }
+ }
+
+ // Build the program segments
+ List<ELFProgramSegment> segments = new();
+ for (uint i = 0; i < Header.ProgramHeaderCount; i++)
+ {
+ ulong programHeaderOffset = _position + Header.ProgramHeaderOffset + i * Header.ProgramHeaderEntrySize;
+ segments.Add(new ELFProgramSegment(DataSourceReader, loadBias, programHeaderOffset, _isDataSourceVirtualAddressSpace));
+ }
+ return segments;
+ }
+
+ private ELFSection[] ReadSections()
+ {
+ Header.IsSectionHeaderCountReasonable.CheckThrowing();
+ IsHeaderSectionHeaderOffsetValid.CheckThrowing();
+ IsHeaderSectionHeaderEntrySizeValid.CheckThrowing();
+
+ List<ELFSection> sections = new();
+ for (uint i = 0; i < Header.SectionHeaderCount; i++)
+ {
+ sections.Add(new ELFSection(this, DataSourceReader, _position, _position + Header.SectionHeaderOffset + i * Header.SectionHeaderEntrySize));
+ }
+ return sections.ToArray();
+ }
+
+ private Reader CreateVirtualAddressReader()
+ {
+ if (_isDataSourceVirtualAddressSpace)
+ {
+ return DataSourceReader;
+ }
+ else
+ {
+ return DataSourceReader.WithAddressSpace(new ELFVirtualAddressSpace(Segments));
+ }
+ }
+
+ private byte[] ReadBuildId()
+ {
+ byte[] buildId = null;
+
+ if (Header.ProgramHeaderOffset > 0 && Header.ProgramHeaderEntrySize > 0 && Header.ProgramHeaderCount > 0)
+ {
+ try
+ {
+ foreach (ELFProgramSegment segment in Segments)
+ {
+ if (segment.Header.Type == ELFProgramHeaderType.Note)
+ {
+ buildId = ReadBuildIdNote(segment.Contents);
+ if (buildId != null)
+ {
+ break;
+ }
+ }
+ }
+ }
+ catch (Exception ex) when (ex is InvalidVirtualAddressException || ex is BadInputFormatException || ex is OverflowException)
+ {
+ }
+ }
+
+ if (buildId == null)
+ {
+ // Use sections to find build id if there isn't any program headers (i.e. some FreeBSD .dbg files)
+ try
+ {
+ foreach (ELFSection section in Sections)
+ {
+ if (section.Header.Type == ELFSectionHeaderType.Note)
+ {
+ buildId = ReadBuildIdNote(section.Contents);
+ if (buildId != null)
+ {
+ break;
+ }
+ }
+ }
+ }
+ catch (Exception ex) when (ex is InvalidVirtualAddressException || ex is BadInputFormatException || ex is OverflowException)
+ {
+ }
+ }
+
+ return buildId;
+ }
+
+ private static byte[] ReadBuildIdNote(Reader noteReader)
+ {
+ if (noteReader != null)
+ {
+ ELFNoteList noteList = new(noteReader);
+ foreach (ELFNote note in noteList.Notes)
+ {
+ ELFNoteType type = note.Header.Type;
+ if (type == ELFNoteType.GnuBuildId && note.Name.Equals("GNU"))
+ {
+ return note.Contents.Read(0, (uint)note.Contents.Length);
+ }
+ }
+ }
+ return null;
+ }
+
+ private byte[] ReadSectionNameTable()
+ {
+ try
+ {
+ int nameTableIndex = Header.SectionHeaderStringIndex;
+ if (Header.SectionHeaderOffset != 0 && Header.SectionHeaderCount > 0 && nameTableIndex != 0)
+ {
+ ELFSection nameTableSection = Sections[nameTableIndex];
+ if (nameTableSection.Header.FileOffset > 0 && nameTableSection.Header.FileSize > 0)
+ {
+ return nameTableSection.Contents.Read(0, (uint)nameTableSection.Contents.Length);
+ }
+ }
+ }
+ catch (Exception ex) when (ex is InvalidVirtualAddressException || ex is BadInputFormatException || ex is OverflowException)
+ {
+ }
+ return null;
+ }
+
+ #region Validation Rules
+
+ public ValidationRule IsHeaderProgramHeaderOffsetValid
+ {
+ get
+ {
+ return new ValidationRule("ELF Header ProgramHeaderOffset is invalid or elf file is incomplete", () =>
+ {
+ return Header.ProgramHeaderOffset < _reader.Length &&
+ Header.ProgramHeaderOffset + (ulong)(Header.ProgramHeaderEntrySize * Header.ProgramHeaderCount) <= _reader.Length;
+ },
+ IsHeaderProgramHeaderEntrySizeValid,
+ Header.IsProgramHeaderCountReasonable);
+
+ }
+ }
+
+ public ValidationRule IsHeaderProgramHeaderEntrySizeValid
+ {
+ get { return new ValidationRule("ELF Header ProgramHeaderEntrySize is invalid", () => Header.ProgramHeaderEntrySize == DataSourceReader.SizeOf<ELFProgramHeader>()); }
+ }
+
+ public ValidationRule IsHeaderSectionHeaderOffsetValid
+ {
+ get
+ {
+ return new ValidationRule("ELF Header SectionHeaderOffset is invalid or elf file is incomplete", () => {
+ return Header.SectionHeaderOffset < _reader.Length &&
+ Header.SectionHeaderOffset + (ulong)(Header.SectionHeaderEntrySize * Header.SectionHeaderCount) <= _reader.Length;
+ },
+ IsHeaderSectionHeaderEntrySizeValid,
+ Header.IsSectionHeaderCountReasonable);
+ }
+ }
+
+ public ValidationRule IsHeaderSectionHeaderEntrySizeValid
+ {
+ get { return new ValidationRule("ELF Header SectionHeaderEntrySize is invalid", () => Header.SectionHeaderEntrySize == DataSourceReader.SizeOf<ELFSectionHeader>()); }
+ }
+
+ #endregion
+ }
+
+ public class ELFVirtualAddressSpace : IAddressSpace
+ {
+ private readonly ELFProgramSegment[] _segments;
+
+ public ELFVirtualAddressSpace(IEnumerable<ELFProgramSegment> segments)
+ {
+ _segments = segments.Where((programHeader) => programHeader.Header.FileSize > 0).ToArray();
+ Length = _segments.Max(s => s.Header.VirtualAddress + s.Header.VirtualSize);
+ }
+
+ public ulong Length { get; private set; }
+
+ public uint Read(ulong position, byte[] buffer, uint bufferOffset, uint count)
+ {
+ uint bytesRead = 0;
+ while (bytesRead != count)
+ {
+ int i = 0;
+ for (; i < _segments.Length; i++)
+ {
+ ELFProgramHeader header = _segments[i].Header;
+
+ ulong upperAddress = header.VirtualAddress + header.VirtualSize;
+ if (header.VirtualAddress <= position && position < upperAddress)
+ {
+ uint bytesToReadRange = (uint)Math.Min(count - bytesRead, upperAddress - position);
+ ulong segmentOffset = position - header.VirtualAddress;
+ uint bytesReadRange = _segments[i].Contents.Read(segmentOffset, buffer, bufferOffset, bytesToReadRange);
+ if (bytesReadRange == 0) {
+ goto done;
+ }
+ position += bytesReadRange;
+ bufferOffset += bytesReadRange;
+ bytesRead += bytesReadRange;
+ break;
+ }
+ }
+ if (i == _segments.Length) {
+ break;
+ }
+ }
+ done:
+ if (bytesRead == 0) {
+ throw new InvalidVirtualAddressException(string.Format("Virtual address range is not mapped {0:X16} {1}", position, count));
+ }
+ // Zero the rest of the buffer if read less than requested
+ Array.Clear(buffer, (int)bufferOffset, (int)(count - bytesRead));
+ return bytesRead;
+ }
+ }
+
+ public class ELFProgramSegment
+ {
+ private readonly Lazy<Reader> _contents;
+
+ public ELFProgramSegment(Reader dataSourceReader, ulong elfOffset, ulong programHeaderOffset, bool isDataSourceVirtualAddressSpace)
+ {
+ Header = dataSourceReader.Read<ELFProgramHeader>(programHeaderOffset);
+ if (isDataSourceVirtualAddressSpace)
+ {
+ _contents = new Lazy<Reader>(() => dataSourceReader.WithRelativeAddressSpace(elfOffset + Header.VirtualAddress, Header.VirtualSize));
+ }
+ else
+ {
+ _contents = new Lazy<Reader>(() => dataSourceReader.WithRelativeAddressSpace(elfOffset + Header.FileOffset, Header.FileSize));
+ }
+ }
+
+ public ELFProgramHeader Header { get; }
+ public Reader Contents { get { return _contents.Value; } }
+
+ public override string ToString()
+ {
+ return "Segment@[" + Header.VirtualAddress.ToString() + "-" + (Header.VirtualAddress + Header.VirtualSize).ToString("x") + ")";
+ }
+ }
+
+ public class ELFSection
+ {
+ private readonly ELFFile _elfFile;
+ private readonly Reader _dataSourceReader;
+ private readonly Lazy<ELFSectionHeader> _header;
+ private readonly Lazy<string> _name;
+ private readonly Lazy<Reader> _contents;
+
+ private static readonly ASCIIEncoding _decoder = new();
+
+ public ELFSection(ELFFile elfFile, Reader dataSourceReader, ulong elfOffset, ulong sectionHeaderOffset)
+ {
+ _elfFile = elfFile;
+ _dataSourceReader = dataSourceReader;
+ _header = new Lazy<ELFSectionHeader>(() => _dataSourceReader.Read<ELFSectionHeader>(sectionHeaderOffset));
+ _name = new Lazy<string>(ReadName);
+ _contents = new Lazy<Reader>(() => _dataSourceReader.WithRelativeAddressSpace(elfOffset + Header.FileOffset, Header.FileSize));
+ }
+
+ public ELFSectionHeader Header { get { return _header.Value; } }
+ public string Name { get { return _name.Value; } }
+ public Reader Contents { get { return _contents.Value; } }
+
+ private string ReadName()
+ {
+ if (Header.Type == ELFSectionHeaderType.Null)
+ {
+ return string.Empty;
+ }
+ byte[] sectionNameTable = _elfFile.SectionNameTable;
+ if (sectionNameTable == null || sectionNameTable.Length == 0)
+ {
+ return string.Empty;
+ }
+ if (Header.NameIndex > sectionNameTable.Length)
+ {
+ return string.Empty;
+ }
+ int index = (int)Header.NameIndex;
+ if (index == 0)
+ {
+ return string.Empty;
+ }
+ int count = 0;
+ for (; (index + count) < sectionNameTable.Length; count++)
+ {
+ if (sectionNameTable[index + count] == 0)
+ {
+ break;
+ }
+ }
+ return _decoder.GetString(sectionNameTable, index, count);
+ }
+ }
+
+ public class ELFNoteList
+ {
+ private readonly Reader _elfSegmentReader;
+ private readonly Lazy<IEnumerable<ELFNote>> _notes;
+
+ public ELFNoteList(Reader elfSegmentReader)
+ {
+ _elfSegmentReader = elfSegmentReader;
+ _notes = new Lazy<IEnumerable<ELFNote>>(ReadNotes);
+ }
+
+ public IEnumerable<ELFNote> Notes { get { return _notes.Value; } }
+
+ private IEnumerable<ELFNote> ReadNotes()
+ {
+ List<ELFNote> notes = new();
+ ulong position = 0;
+ while (position < _elfSegmentReader.Length)
+ {
+ ELFNote note = new(_elfSegmentReader, position);
+ notes.Add(note);
+ position += note.Size;
+ }
+ return notes;
+ }
+ }
+
+ public class ELFNote
+ {
+ private readonly Reader _elfSegmentReader;
+ private readonly ulong _noteHeaderOffset;
+ private readonly Lazy<ELFNoteHeader> _header;
+ private readonly Lazy<string> _name;
+ private readonly Lazy<Reader> _contents;
+
+ public ELFNote(Reader elfSegmentReader, ulong offset)
+ {
+ _elfSegmentReader = elfSegmentReader;
+ _noteHeaderOffset = offset;
+ _header = new Lazy<ELFNoteHeader>(() => _elfSegmentReader.Read<ELFNoteHeader>(_noteHeaderOffset));
+ _name = new Lazy<string>(ReadName);
+ _contents = new Lazy<Reader>(CreateContentsReader);
+ }
+
+ public ELFNoteHeader Header { get { return _header.Value; } }
+ //TODO: validate these fields
+ public uint Size { get { return HeaderSize + Align4(Header.NameSize) + Align4(Header.ContentSize); } }
+ public string Name { get { return _name.Value; } }
+ public Reader Contents { get { return _contents.Value; } }
+
+ private uint HeaderSize
+ {
+ get { return _elfSegmentReader.LayoutManager.GetLayout<ELFNoteHeader>().Size; }
+ }
+
+ private string ReadName()
+ {
+ ulong nameOffset = _noteHeaderOffset + HeaderSize;
+ return _elfSegmentReader.WithRelativeAddressSpace(nameOffset, Align4(Header.NameSize)).Read<string>(0);
+ }
+
+ private Reader CreateContentsReader()
+ {
+ ulong contentsOffset = _noteHeaderOffset + HeaderSize + Align4(Header.NameSize);
+ return _elfSegmentReader.WithRelativeAddressSpace(contentsOffset, Align4(Header.ContentSize));
+ }
+
+ private static uint Align4(uint x)
+ {
+ return (x + 3U) & ~3U;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+namespace Microsoft.FileFormats.ELF
+{
+ public enum ELFNoteType
+ {
+ PrpsInfo = 3, // NT_PRPSINFO
+ GnuBuildId = 3, // NT_GNU_BUILD_ID
+ File = 0x46494c45 // "FILE" in ascii
+ }
+
+ public class ELFNoteHeader : TStruct
+ {
+ public uint NameSize;
+ public uint ContentSize;
+ public ELFNoteType Type;
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+
+namespace Microsoft.FileFormats.ELF
+{
+ public enum ELFProgramHeaderType : uint
+ {
+ Null = 0,
+ Load = 1,
+ Dynamic = 2,
+ Interp = 3,
+ Note = 4,
+ Shlib = 5,
+ Phdr = 6,
+ GnuEHFrame = 0x6474e550,
+ }
+
+ [Flags]
+ public enum ELFProgramHeaderFlags : uint
+ {
+ Executable = 1, // PF_X
+ Writable = 2, // PF_W
+ Readable = 4, // PF_R
+ ReadWriteExecute = Executable | Writable | Readable,
+ OSMask = 0x0FF00000, // PF_MASKOS
+ ProcessorMask = 0xF0000000, // PF_MASKPROC
+ }
+
+ public class ELFProgramHeader : TStruct
+ {
+ public ELFProgramHeaderType Type; // p_type
+ [If("64BIT")]
+ public uint Flags; // p_flags
+ public FileOffset FileOffset; // p_offset
+ public VirtualAddress VirtualAddress; // p_vaddr
+ public SizeT PhysicalAddress; // p_paddr
+ public SizeT FileSize; // p_filesz
+ public SizeT VirtualSize; // p_memsz
+ [If("32BIT")]
+ public uint Flags32; // p_flags
+ public SizeT Alignment; // p_align
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+namespace Microsoft.FileFormats.ELF
+{
+ public enum ELFSectionHeaderType : uint
+ {
+ Null = 0,
+ ProgBits = 1,
+ SymTab = 2,
+ StrTab = 3,
+ Rela = 4,
+ Hash = 5,
+ Dynamic = 6,
+ Note = 7,
+ NoBits = 8,
+ Rel = 9,
+ ShLib = 10,
+ DynSym = 11,
+ InitArray = 14,
+ FiniArray = 15,
+ PreInitArray = 16,
+ Group = 17,
+ SymTabIndexes = 18,
+ Num = 19,
+ GnuAttributes = 0x6ffffff5,
+ GnuHash = 0x6ffffff6,
+ GnuLibList = 0x6ffffff7,
+ CheckSum = 0x6ffffff8,
+ GnuVerDef = 0x6ffffffd,
+ GnuVerNeed = 0x6ffffffe,
+ GnuVerSym = 0x6fffffff,
+ }
+
+ public class ELFSectionHeader : TStruct
+ {
+ public uint NameIndex; // sh_name
+ public ELFSectionHeaderType Type; // sh_type
+ public SizeT Flags; // sh_flags
+ public VirtualAddress VirtualAddress; // sh_addr
+ public FileOffset FileOffset; // sh_offset
+ public SizeT FileSize; // sh_size
+ public uint Link; // sh_link
+ public uint Info; // sh_info
+ public SizeT Alignment; // sh_addralign
+ public SizeT EntrySize; // sh_entsize
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+namespace Microsoft.FileFormats.ELF
+{
+ public class FileOffset<T> : Pointer<T, SizeT> { }
+ public class FileOffset : FileOffset<byte> { }
+ public class VirtualAddress<T> : Pointer<T, SizeT> { }
+ public class VirtualAddress : VirtualAddress<byte> { }
+
+ public static class ELFLayoutManagerExtensions
+ {
+ public static LayoutManager AddELFTypes(this LayoutManager layouts, bool isBigEndian, bool is64Bit)
+ {
+ return layouts
+ .AddPrimitives(isBigEndian)
+ .AddEnumTypes()
+ .AddSizeT(is64Bit ? 8 : 4)
+ .AddPointerTypes()
+ .AddNullTerminatedString()
+ .AddTStructTypes(is64Bit ? new string[] { "64BIT" } : new string[] { "32BIT" });
+ }
+ }
+
+ public enum ELFClass : byte
+ {
+ None = 0,
+ Class32 = 1,
+ Class64 = 2
+ }
+
+ public enum ELFData : byte
+ {
+ None = 0,
+ LittleEndian = 1,
+ BigEndian = 2
+ }
+
+ /// <summary>
+ /// The leading 16 bytes of the ELF file format
+ /// </summary>
+ /// <remarks>
+ /// Although normally this is described as being part of the ELFHeader, its
+ /// useful to parse this independently. The endianess and bitness
+ /// described in the identity bytes are needed to calculate the size of and
+ /// offset of fields in the remainder of the header
+ /// </remarks>
+ public class ELFHeaderIdent : TStruct
+ {
+ [ArraySize(16)]
+ public byte[] Ident;
+
+ public ELFClass Class
+ {
+ get
+ {
+ return (ELFClass)Ident[4];
+ }
+ }
+
+ public ELFData Data
+ {
+ get
+ {
+ return (ELFData)Ident[5];
+ }
+ }
+
+ #region Validation Rules
+ public ValidationRule IsIdentMagicValid
+ {
+ get
+ {
+ return new ValidationRule("Invalid ELFHeader Ident magic", () =>
+ {
+ return Ident[0] == 0x7f &&
+ Ident[1] == 0x45 &&
+ Ident[2] == 0x4c &&
+ Ident[3] == 0x46;
+ });
+ }
+ }
+
+ public ValidationRule IsClassValid
+ {
+ get
+ {
+ return new ValidationRule("Invalid ELFHeader Ident Class", () =>
+ {
+ return Class == ELFClass.Class32 || Class == ELFClass.Class64;
+ });
+ }
+ }
+
+ public ValidationRule IsDataValid
+ {
+ get
+ {
+ return new ValidationRule("Invalid ELFHeader Ident Data", () =>
+ {
+ return Data == ELFData.BigEndian || Data == ELFData.LittleEndian;
+ });
+ }
+ }
+ #endregion
+ }
+
+ public enum ELFHeaderType : ushort
+ {
+ Relocatable = 1,
+ Executable = 2,
+ Shared = 3,
+ Core = 4
+ }
+
+ public class ELFHeader : ELFHeaderIdent
+ {
+ public ELFHeaderType Type;
+ public ushort Machine;
+ public uint Version;
+ public VirtualAddress Entry;
+ public FileOffset ProgramHeaderOffset;
+ public FileOffset SectionHeaderOffset;
+ public uint Flags;
+ public ushort EHSize;
+ public ushort ProgramHeaderEntrySize;
+ public ushort ProgramHeaderCount;
+ public ushort SectionHeaderEntrySize;
+ public ushort SectionHeaderCount;
+ public ushort SectionHeaderStringIndex;
+
+ #region Validation Rules
+
+ public ValidationRule IsProgramHeaderCountReasonable
+ {
+ get
+ {
+ return new ValidationRule("Unreasonably large ELFHeader ProgramHeaderCount", () => ProgramHeaderCount <= 30000);
+ }
+ }
+
+ public ValidationRule IsSectionHeaderCountReasonable
+ {
+ get
+ {
+ return new ValidationRule("Unreasonably large ELFHeader SectionHeaderCount", () => SectionHeaderCount <= 30000);
+ }
+ }
+
+ #endregion
+ }
+
+ public class ELFFileTableHeader : TStruct
+ {
+ public SizeT EntryCount;
+ public SizeT PageSize;
+ }
+
+ public class ELFFileTableEntryPointers : TStruct
+ {
+ public VirtualAddress Start;
+ public VirtualAddress Stop;
+ public SizeT PageOffset;
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Reflection;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ public class EnumLayout : LayoutBase
+ {
+ public EnumLayout(Type enumType, ILayout underlyingIntegralLayout) :
+ base(enumType, underlyingIntegralLayout.Size, underlyingIntegralLayout.NaturalAlignment)
+ {
+ _underlyingIntegralLayout = underlyingIntegralLayout;
+ }
+
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ return _underlyingIntegralLayout.Read(dataSource, position);
+ }
+
+ private ILayout _underlyingIntegralLayout;
+ }
+
+ public static partial class LayoutManagerExtensions
+ {
+ public static LayoutManager AddEnumTypes(this LayoutManager layoutManager)
+ {
+ layoutManager.AddLayoutProvider(GetEnumLayout);
+ return layoutManager;
+ }
+
+ private static ILayout GetEnumLayout(Type enumType, LayoutManager layoutManager)
+ {
+ if (!enumType.GetTypeInfo().IsEnum)
+ {
+ return null;
+ }
+ Type elementType = enumType.GetTypeInfo().GetEnumUnderlyingType();
+ return new EnumLayout(enumType, layoutManager.GetLayout(elementType));
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ /// <summary>
+ /// Exception thrown to indicate that bits in the input cannot be parsed for whatever reason.
+ /// </summary>
+ public abstract class InputParsingException : Exception
+ {
+ public InputParsingException(string message)
+ : base(message)
+ {
+ }
+ }
+
+ /// <summary>
+ /// Exception thrown to indicate unparsable bits found in the input data being parsed.
+ /// </summary>
+ public class BadInputFormatException : InputParsingException
+ {
+ public BadInputFormatException(string message)
+ : base(message)
+ {
+ }
+ }
+
+ /// <summary>
+ /// Exception thrown to the virtual address/position is invalid
+ /// </summary>
+ public class InvalidVirtualAddressException : InputParsingException
+ {
+ public InvalidVirtualAddressException(string message)
+ : base(message)
+ {
+ }
+ }
+
+ /// <summary>
+ /// Exception thrown to indicate errors during Layout construction. These errors are usually
+ /// attributable to bugs in the parsing code, not errors in the input data.
+ /// </summary>
+ public class LayoutException : Exception
+ {
+ public LayoutException(string message)
+ : base(message)
+ {
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+
+namespace Microsoft.FileFormats
+{
+ internal static class Helpers
+ {
+ // Based on Convert.FromHexString, as we don't yet target TFMs where it's available.
+ public static bool TryConvertHexStringToBytes(string hexString, out byte[] bytes)
+ {
+ bytes = null;
+ if (hexString is null)
+ {
+ return false;
+ }
+ if (hexString.Length % 2 != 0)
+ {
+ return false;
+ }
+ if (hexString.Length == 0)
+ {
+ bytes = Array.Empty<byte>();
+ return true;
+ }
+ bytes = new byte[hexString.Length >> 1];
+ for (int i = 0; i < hexString.Length; i += 2)
+ {
+ byte byteHigh = byte.MaxValue >= (int)hexString[i] ? CharToHexLookup[hexString[i]] : (byte)0xFF;
+ byte byteLow = byte.MaxValue >= (int)hexString[i+1] ? CharToHexLookup[hexString[i+1]] : (byte)0xFF;
+
+ if ((byteLow | byteHigh) == 0xFF)
+ {
+ return false;
+ }
+ bytes[i >> 1] = (byte)(byteHigh << 4 | byteLow);
+ }
+ return true;
+ }
+
+ // From System.HexConverter, FF means not valid
+ private static byte[] CharToHexLookup => new byte[]
+ {
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 15
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 31
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 47
+ 0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 63
+ 0xFF, 0xA, 0xB, 0xC, 0xD, 0xE, 0xF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 79
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 95
+ 0xFF, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 111
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 127
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 143
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 159
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 175
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 191
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 207
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 223
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // 239
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF // 255
+ };
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ /// <summary>
+ /// Abstracts a flat randomly-accessible array of bytes
+ /// </summary>
+ public interface IAddressSpace
+ {
+ /// <summary>
+ /// Reads a range of bytes from the address space
+ /// </summary>
+ /// <param name="position">The position in the address space to begin reading from</param>
+ /// <param name="buffer">The buffer that will receive the bytes that are read</param>
+ /// <param name="bufferOffset">The offset in the output buffer to begin writing the bytes</param>
+ /// <param name="count">The number of bytes to read into the buffer</param>
+ /// <returns>The number of bytes read</returns>
+ uint Read(ulong position, byte[] buffer, uint bufferOffset, uint count);
+
+ /// <summary>
+ /// The upper bound (non-inclusive) of readable addresses
+ /// </summary>
+ /// <remarks>
+ /// Some address spaces may be sparse, there is no guarantee reads will succeed even
+ /// at addresses less than the Length
+ /// </remarks>
+ ulong Length { get; }
+ }
+
+ public static class AddressSpaceExtensions
+ {
+ /// <summary>
+ /// Read the specified number of bytes.
+ /// </summary>
+ /// <param name="addressSpace">The address space to read from</param>
+ /// <param name="position">The position in the address space to start reading from</param>
+ /// <param name="count">The number of bytes to read</param>
+ /// <returns>
+ /// Returns an array of exactly "count" bytes or throw an exception.
+ /// </returns>
+ /// <throws>
+ /// BadInputFormatException to indicate an "unexpected end of stream" condition
+ /// </throws>
+ public static byte[] Read(this IAddressSpace addressSpace, ulong position, uint count)
+ {
+ byte[] bytes = ArrayHelper.New<byte>(count);
+ if (count != addressSpace.Read(position, bytes, 0, count))
+ {
+ throw new BadInputFormatException("Unable to read bytes at offset 0x" + position.ToString("x"));
+ }
+ return bytes;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ public interface IField
+ {
+ string Name { get; }
+ ILayout Layout { get; }
+ ILayout DeclaringLayout { get; }
+ uint Offset { get; }
+
+ object GetValue(TStruct tStruct);
+ void SetValue(TStruct tStruct, object fieldValue);
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ /// <summary>
+ /// Information about a type that allows it to be parsed from a byte sequence either on its own
+ /// or as a sub-component of another type
+ /// </summary>
+ public interface ILayout
+ {
+ /// <summary>
+ /// The type being layed out
+ /// </summary>
+ Type Type { get; }
+
+ /// <summary>
+ /// Size in bytes for the serialized representation of the type.
+ /// </summary>
+ /// <throws>InvalidOperationException if IsFixedSize == false</throws>
+ uint Size { get; }
+
+ /// <summary>
+ /// Returns true if all instances of the type serialize to the same number of bytes
+ /// </summary>
+ bool IsFixedSize { get; }
+
+ /// <summary>
+ /// The preferred alignment of this type
+ /// </summary>
+ uint NaturalAlignment { get; }
+
+ /// <summary>
+ /// The set of fields that compose the type
+ /// </summary>
+ IEnumerable<IField> Fields { get; }
+
+ /// <summary>
+ /// Size in bytes for the serialized representation of all the fields in this type.
+ /// This may be less than Size because it does not account for trailing padding bytes
+ /// after the last field.
+ /// </summary>
+ /// <throws>InvalidOperationException if IsFixedSize == false</throws>
+ uint SizeAsBaseType { get; }
+
+ /// <summary>
+ /// Parse an instance from the dataSource starting at position
+ /// </summary>
+ object Read(IAddressSpace dataSource, ulong position);
+
+ /// <summary>
+ /// Parse an instance from the dataSource starting at position and report the number of bytes
+ /// that were read
+ /// </summary>
+ object Read(IAddressSpace dataSource, ulong position, out uint bytesRead);
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ /// <summary>
+ /// A common base class to assist in implementing ILayout
+ /// </summary>
+ public abstract class LayoutBase : ILayout
+ {
+ public LayoutBase(Type type) : this(type, 0) { }
+ public LayoutBase(Type type, uint size) : this(type, size, size) { }
+ public LayoutBase(Type type, uint size, uint naturalAlignment) : this(type, size, naturalAlignment, size) { }
+ public LayoutBase(Type type, uint size, uint naturalAlignment, uint sizeAsBaseType) : this(type, size, naturalAlignment, sizeAsBaseType, Array.Empty<IField>()) { }
+ public LayoutBase(Type type, uint size, uint naturalAlignment, uint sizeAsBaseType, IField[] fields)
+ {
+ Type = type;
+ IsFixedSize = true;
+ Size = size;
+ SizeAsBaseType = sizeAsBaseType;
+ NaturalAlignment = naturalAlignment;
+ Fields = fields;
+ }
+
+ public IEnumerable<IField> Fields { get; private set; }
+
+ public uint NaturalAlignment { get; private set; }
+
+ public bool IsFixedSize { get; private set; }
+
+ public uint Size { get; private set; }
+
+ public uint SizeAsBaseType { get; private set; }
+
+ public Type Type { get; private set; }
+
+ public virtual object Read(IAddressSpace dataSource, ulong position, out uint bytesRead)
+ {
+ bytesRead = Size;
+ return Read(dataSource, position);
+ }
+
+ public virtual object Read(IAddressSpace dataSource, ulong position)
+ {
+ throw new NotImplementedException();
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Linq;
+using System.Reflection;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ /// <summary>
+ /// A container that can provide ILayout instances for Types.
+ /// </summary>
+ public class LayoutManager
+ {
+ private Dictionary<Type, ILayout> _layouts = new();
+ private List<Func<Type, LayoutManager, ILayout>> _layoutProviders = new();
+ private Dictionary<Tuple<Type, uint>, ILayout> _arrayLayouts = new();
+
+ public LayoutManager() { }
+
+ public void AddLayout(ILayout layout)
+ {
+ _layouts.Add(layout.Type, layout);
+ }
+
+ public void AddLayoutProvider(Func<Type, LayoutManager, ILayout> layoutProvider)
+ {
+ _layoutProviders.Add(layoutProvider);
+ }
+
+ public ILayout GetArrayLayout(Type arrayType, uint numElements)
+ {
+ if (!arrayType.IsArray)
+ {
+ throw new ArgumentException("The type parameter must be an array");
+ }
+ if (arrayType.GetArrayRank() != 1)
+ {
+ throw new ArgumentException("Multidimensional arrays are not supported");
+ }
+
+ ILayout layout;
+ Tuple<Type, uint> key = new(arrayType, numElements);
+ if (!_arrayLayouts.TryGetValue(key, out layout))
+ {
+ Type elemType = arrayType.GetElementType();
+ layout = new ArrayLayout(arrayType, GetLayout(elemType), numElements);
+ _arrayLayouts.Add(key, layout);
+ }
+ return layout;
+ }
+
+ public ILayout GetArrayLayout<T>(uint numElements)
+ {
+ return GetArrayLayout(typeof(T), numElements);
+ }
+
+ public ILayout GetLayout<T>()
+ {
+ return GetLayout(typeof(T));
+ }
+
+ public ILayout GetLayout(Type t)
+ {
+ ILayout layout;
+ if (!_layouts.TryGetValue(t, out layout))
+ {
+ foreach (Func<Type, LayoutManager, ILayout> provider in _layoutProviders)
+ {
+ layout = provider(t, this);
+ if (layout != null)
+ {
+ break;
+ }
+ }
+ if (layout == null)
+ {
+ throw new LayoutException("Unable to create layout for type " + t.FullName);
+ }
+ _layouts.Add(t, layout);
+ }
+ return layout;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats.MachO
+{
+ public class MachCore
+ {
+ private readonly MachOFile _machO;
+ private readonly ulong _dylinkerHintAddress;
+ private readonly Lazy<ulong> _dylinkerAddress;
+ private readonly Lazy<MachDyld> _dylinker;
+ private readonly Lazy<MachLoadedImage[]> _loadedImages;
+
+ public MachCore(IAddressSpace dataSource, ulong dylinkerHintAddress = 0)
+ {
+ _machO = new MachOFile(dataSource);
+ _dylinkerHintAddress = dylinkerHintAddress;
+ _dylinkerAddress = new Lazy<ulong>(FindDylinker);
+ _dylinker = new Lazy<MachDyld>(() => new MachDyld(new MachOFile(VirtualAddressReader.DataSource, DylinkerAddress, true)));
+ _loadedImages = new Lazy<MachLoadedImage[]>(ReadImages);
+ }
+
+ public Reader VirtualAddressReader { get { return _machO.VirtualAddressReader; } }
+ public ulong DylinkerAddress { get { return _dylinkerAddress.Value; } }
+ public MachDyld Dylinker { get { return _dylinker.Value; } }
+ public IEnumerable<MachLoadedImage> LoadedImages { get { return _loadedImages.Value; } }
+
+ public bool IsValid()
+ {
+ return _machO.IsValid() && _machO.Header.FileType == MachHeaderFileType.Core;
+ }
+
+ private ulong FindDylinker()
+ {
+ if (_dylinkerHintAddress != 0 && IsValidDylinkerAddress(_dylinkerHintAddress))
+ {
+ return _dylinkerHintAddress;
+ }
+ if (TryFindDylinker(firstPass: true, out ulong position))
+ {
+ return position;
+ }
+ if (TryFindDylinker(firstPass: false, out position))
+ {
+ return position;
+ }
+ throw new BadInputFormatException("No dylinker module found");
+ }
+
+ private bool TryFindDylinker(bool firstPass, out ulong position)
+ {
+ const uint skip = 0x1000;
+ const uint firstPassAttemptCount = 8;
+ foreach (MachSegment segment in _machO.Segments)
+ {
+ ulong start = 0;
+ ulong end = segment.LoadCommand.FileSize;
+ if (firstPass)
+ {
+ end = skip * firstPassAttemptCount;
+ }
+ else
+ {
+ start = skip * firstPassAttemptCount;
+ }
+ for (ulong offset = start; offset < end; offset += skip)
+ {
+ ulong possibleDylinker = segment.LoadCommand.VMAddress + offset;
+ if (IsValidDylinkerAddress(possibleDylinker))
+ {
+ position = possibleDylinker;
+ return true;
+ }
+ }
+ }
+ position = 0;
+ return false;
+ }
+
+ private bool IsValidDylinkerAddress(ulong possibleDylinkerAddress)
+ {
+ MachOFile dylinker = new(VirtualAddressReader.DataSource, possibleDylinkerAddress, true);
+ return dylinker.IsValid() && dylinker.Header.FileType == MachHeaderFileType.Dylinker;
+ }
+
+ private MachLoadedImage[] ReadImages()
+ {
+ return Dylinker.Images.Select(i => new MachLoadedImage(new MachOFile(VirtualAddressReader.DataSource, i.LoadAddress, true), i)).ToArray();
+ }
+ }
+
+ public class MachLoadedImage
+ {
+ private readonly DyldLoadedImage _dyldLoadedImage;
+
+ public MachLoadedImage(MachOFile image, DyldLoadedImage dyldLoadedImage)
+ {
+ Image = image;
+ _dyldLoadedImage = dyldLoadedImage;
+ }
+
+ public MachOFile Image { get; private set; }
+ public ulong LoadAddress { get { return _dyldLoadedImage.LoadAddress; } }
+ public string Path { get { return _dyldLoadedImage.Path; } }
+ }
+
+ public class MachDyld
+ {
+ private readonly MachOFile _dyldImage;
+ private readonly Lazy<ulong> _dyldAllImageInfosAddress;
+ private readonly Lazy<DyldImageAllInfosV2> _dyldAllImageInfos;
+ private readonly Lazy<DyldImageInfo[]> _imageInfos;
+ private readonly Lazy<DyldLoadedImage[]> _images;
+
+ public MachDyld(MachOFile dyldImage)
+ {
+ _dyldImage = dyldImage;
+ _dyldAllImageInfosAddress = new Lazy<ulong>(FindAllImageInfosAddress);
+ _dyldAllImageInfos = new Lazy<DyldImageAllInfosV2>(ReadAllImageInfos);
+ _imageInfos = new Lazy<DyldImageInfo[]>(ReadImageInfos);
+ _images = new Lazy<DyldLoadedImage[]>(ReadLoadedImages);
+ }
+
+ public ulong AllImageInfosAddress { get { return _dyldAllImageInfosAddress.Value; } }
+ public DyldImageAllInfosV2 AllImageInfos { get { return _dyldAllImageInfos.Value; } }
+ public IEnumerable<DyldImageInfo> ImageInfos { get { return _imageInfos.Value; } }
+ public IEnumerable<DyldLoadedImage> Images { get { return _images.Value; } }
+
+ private ulong FindAllImageInfosAddress()
+ {
+ if (!_dyldImage.Symtab.TryLookupSymbol("dyld_all_image_infos", out ulong offset))
+ {
+ throw new BadInputFormatException("Can not find dyld_all_image_infos");
+ }
+ return offset + _dyldImage.PreferredVMBaseAddress;
+ }
+
+ private DyldImageAllInfosV2 ReadAllImageInfos()
+ {
+ return _dyldImage.VirtualAddressReader.Read<DyldImageAllInfosV2>(AllImageInfosAddress);
+ }
+
+ private DyldImageInfo[] ReadImageInfos()
+ {
+ return _dyldImage.VirtualAddressReader.ReadArray<DyldImageInfo>(AllImageInfos.InfoArray, AllImageInfos.InfoArrayCount);
+ }
+
+ private DyldLoadedImage[] ReadLoadedImages()
+ {
+ return ImageInfos.Select(i => new DyldLoadedImage(_dyldImage.VirtualAddressReader.Read<string>(i.PathAddress), i)).ToArray();
+ }
+ }
+
+ public class DyldLoadedImage
+ {
+ private readonly DyldImageInfo _imageInfo;
+
+ public DyldLoadedImage(string path, DyldImageInfo imageInfo)
+ {
+ Path = path;
+ _imageInfo = imageInfo;
+ }
+
+ public string Path;
+ public ulong LoadAddress { get { return _imageInfo.Address; } }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+namespace Microsoft.FileFormats.MachO
+{
+ public static class MachFatHeaderLayoutManagerExtensions
+ {
+ public static LayoutManager AddMachFatHeaderTypes(this LayoutManager layoutManager, bool isBigEndian)
+ {
+ layoutManager
+ .AddPrimitives(isBigEndian)
+ .AddEnumTypes()
+ .AddTStructTypes();
+ return layoutManager;
+ }
+ }
+
+ public enum MachFatHeaderMagicKind : uint
+ {
+ LittleEndian = 0xcafebabe,
+ BigEndian = 0xbebafeca
+ }
+
+ public class MachFatHeaderMagic : TStruct
+ {
+ public MachFatHeaderMagicKind Magic;
+
+ #region Validation Rules
+ public ValidationRule IsMagicValid
+ {
+ get
+ {
+ return new ValidationRule("Invalid MachO Fat Header Magic", () =>
+ {
+ return Magic == MachFatHeaderMagicKind.BigEndian ||
+ Magic == MachFatHeaderMagicKind.LittleEndian;
+ });
+ }
+ }
+ #endregion
+ }
+
+ public class MachFatHeader : MachFatHeaderMagic
+ {
+ public uint CountFatArches;
+
+ #region Validation Rules
+ public ValidationRule IsCountFatArchesReasonable
+ {
+ get
+ {
+ return new ValidationRule("Unreasonable MachO Fat Header CountFatArches",
+ () => CountFatArches <= 20);
+ }
+ }
+ #endregion
+ }
+
+ public class MachFatArch : TStruct
+ {
+ public uint CpuType;
+ public uint CpuSubType;
+ public uint Offset;
+ public uint Size;
+ public uint Align;
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Microsoft.FileFormats.MachO
+{
+ public class MachOFatFile : IDisposable
+ {
+ private readonly Reader _reader;
+ private readonly Lazy<MachFatHeaderMagic> _headerMagic;
+ private readonly Lazy<Reader> _headerReader;
+ private readonly Lazy<MachFatHeader> _header;
+ private readonly Lazy<MachFatArch[]> _arches;
+ private readonly Lazy<MachOFile[]> _archSpecificFiles;
+
+ public MachOFatFile(IAddressSpace dataSource)
+ {
+ _reader = new Reader(dataSource);
+ _headerMagic = new Lazy<MachFatHeaderMagic>(() => _reader.Read<MachFatHeaderMagic>(0));
+ _headerReader = new Lazy<Reader>(() => new Reader(dataSource, new LayoutManager().AddMachFatHeaderTypes(IsBigEndian)));
+ _header = new Lazy<MachFatHeader>(() => _headerReader.Value.Read<MachFatHeader>(0));
+ _arches = new Lazy<MachFatArch[]>(ReadArches);
+ _archSpecificFiles = new Lazy<MachOFile[]>(ReadArchSpecificFiles);
+ }
+
+ public MachFatHeaderMagic HeaderMagic { get { return _headerMagic.Value; } }
+ public MachFatHeader Header { get { return _header.Value; } }
+ public MachFatArch[] Arches { get { return _arches.Value; } }
+ public MachOFile[] ArchSpecificFiles { get { return _archSpecificFiles.Value; } }
+
+ public void Dispose()
+ {
+ if (_reader.DataSource is IDisposable disposable)
+ {
+ disposable.Dispose();
+ }
+ }
+
+ public bool IsValid()
+ {
+ if (_reader.Length > (_reader.SizeOf<MachFatHeaderMagic>()))
+ {
+ try
+ {
+ return HeaderMagic.IsMagicValid.Check();
+ }
+ catch (Exception ex) when (ex is InvalidVirtualAddressException || ex is BadInputFormatException)
+ {
+ }
+ }
+ return false;
+ }
+
+ public bool IsBigEndian
+ {
+ get
+ {
+ HeaderMagic.IsMagicValid.CheckThrowing();
+ return HeaderMagic.Magic == MachFatHeaderMagicKind.BigEndian;
+ }
+ }
+
+ private MachFatArch[] ReadArches()
+ {
+ Header.IsCountFatArchesReasonable.CheckThrowing();
+ ulong position = _headerReader.Value.SizeOf<MachFatHeader>();
+ return _headerReader.Value.ReadArray<MachFatArch>(position, Header.CountFatArches);
+ }
+
+ private MachOFile[] ReadArchSpecificFiles()
+ {
+ return Arches.Select(a => new MachOFile(new RelativeAddressSpace(_reader.DataSource, a.Offset, a.Size))).ToArray();
+ }
+ }
+
+ public class MachOFile : IDisposable
+ {
+ private readonly ulong _position;
+ private readonly bool _dataSourceIsVirtualAddressSpace;
+ private readonly Reader _reader;
+ private readonly Lazy<MachHeaderMagic> _headerMagic;
+ private readonly Lazy<Reader> _dataSourceReader;
+ private readonly Lazy<MachHeader> _header;
+ private readonly Lazy<Tuple<MachLoadCommand, ulong>[]> _loadCommands;
+ private readonly Lazy<MachSegment[]> _segments;
+ private readonly Lazy<MachSection[]> _sections;
+ private readonly Lazy<Reader> _virtualAddressReader;
+ private readonly Lazy<Reader> _physicalAddressReader;
+ private readonly Lazy<byte[]> _uuid;
+ private readonly Lazy<MachSymtab> _symtab;
+
+ public MachOFile(IAddressSpace dataSource, ulong position = 0, bool dataSourceIsVirtualAddressSpace = false)
+ {
+ _position = position;
+ _dataSourceIsVirtualAddressSpace = dataSourceIsVirtualAddressSpace;
+ _reader = new Reader(dataSource);
+ _headerMagic = new Lazy<MachHeaderMagic>(() => _reader.Read<MachHeaderMagic>(_position));
+ _dataSourceReader = new Lazy<Reader>(CreateDataSourceReader);
+ _header = new Lazy<MachHeader>(() => DataSourceReader.Read<MachHeader>(_position));
+ _loadCommands = new Lazy<Tuple<MachLoadCommand, ulong>[]>(ReadLoadCommands);
+ _segments = new Lazy<MachSegment[]>(ReadSegments);
+ _sections = new Lazy<MachSection[]>(() => Segments.SelectMany(seg => seg.Sections).ToArray());
+ _virtualAddressReader = new Lazy<Reader>(CreateVirtualReader);
+ _physicalAddressReader = new Lazy<Reader>(CreatePhysicalReader);
+ _uuid = new Lazy<byte[]>(ReadUuid);
+ _symtab = new Lazy<MachSymtab>(ReadSymtab);
+ }
+
+ public MachHeaderMagic HeaderMagic { get { return _headerMagic.Value; } }
+ public MachHeader Header { get { return _header.Value; } }
+ public byte[] Uuid { get { return _uuid.Value; } }
+ public MachSegment[] Segments { get { return _segments.Value; } }
+ public MachSection[] Sections { get { return _sections.Value; } }
+ public Reader VirtualAddressReader { get { return _virtualAddressReader.Value; } }
+ public Reader PhysicalAddressReader { get { return _physicalAddressReader.Value; } }
+ public MachSymtab Symtab { get { return _symtab.Value; } }
+ private Reader DataSourceReader { get { return _dataSourceReader.Value; } }
+
+ public void Dispose()
+ {
+ if (_reader.DataSource is IDisposable disposable)
+ {
+ disposable.Dispose();
+ }
+ }
+ public bool IsValid()
+ {
+ if (_reader.Length > (_position + _reader.SizeOf<MachHeaderMagic>()))
+ {
+ try
+ {
+ return HeaderMagic.IsMagicValid.Check();
+ }
+ catch (Exception ex) when (ex is InvalidVirtualAddressException || ex is BadInputFormatException)
+ {
+ }
+ }
+ return false;
+ }
+
+ public bool IsBigEndian
+ {
+ get
+ {
+ HeaderMagic.IsMagicValid.CheckThrowing();
+ return (HeaderMagic.Magic == MachHeaderMagicType.BigEndian32Bit ||
+ HeaderMagic.Magic == MachHeaderMagicType.BigEndian64Bit);
+ }
+ }
+
+ public bool Is64Bit
+ {
+ get
+ {
+ HeaderMagic.IsMagicValid.CheckThrowing();
+ return (HeaderMagic.Magic == MachHeaderMagicType.LittleEndian64Bit ||
+ HeaderMagic.Magic == MachHeaderMagicType.BigEndian64Bit);
+ }
+ }
+
+ public ulong PreferredVMBaseAddress
+ {
+ get
+ {
+ MachSegment first = Segments.Where(s => s.LoadCommand.SegName.ToString() == "__TEXT").FirstOrDefault();
+ return first != null ? _position - first.LoadCommand.VMAddress : 0;
+ }
+ }
+
+ public ulong LoadAddress
+ {
+ get
+ {
+ if (_dataSourceIsVirtualAddressSpace)
+ {
+ return _position;
+ }
+ else
+ {
+ return PreferredVMBaseAddress;
+ }
+ }
+ }
+
+ private Reader CreateDataSourceReader()
+ {
+ return new Reader(_reader.DataSource, new LayoutManager().AddMachTypes(IsBigEndian, Is64Bit));
+ }
+
+ private Reader CreateVirtualReader()
+ {
+ if (_dataSourceIsVirtualAddressSpace)
+ {
+ return DataSourceReader;
+ }
+ else
+ {
+ return DataSourceReader.WithAddressSpace(new MachVirtualAddressSpace(Segments));
+ }
+ }
+
+ private Reader CreatePhysicalReader()
+ {
+ if (!_dataSourceIsVirtualAddressSpace)
+ {
+ return DataSourceReader;
+ }
+ else
+ {
+ return DataSourceReader.WithAddressSpace(new MachPhysicalAddressSpace(_reader.DataSource, PreferredVMBaseAddress, Segments));
+ }
+ }
+
+ private Tuple<MachLoadCommand, ulong>[] ReadLoadCommands()
+ {
+ Header.IsNumberCommandsReasonable.CheckThrowing();
+ ulong position = _position + DataSourceReader.SizeOf<MachHeader>();
+ //TODO: do this more cleanly
+ if (Is64Bit)
+ {
+ position += 4; // the 64 bit version has an extra padding field to align at an
+ // 8 byte boundary
+ }
+ List<Tuple<MachLoadCommand, ulong>> cmds = new();
+ for (uint i = 0; i < Header.NumberCommands; i++)
+ {
+ MachLoadCommand cmd = DataSourceReader.Read<MachLoadCommand>(position);
+ cmd.IsCmdSizeReasonable.CheckThrowing();
+ cmds.Add(new Tuple<MachLoadCommand, ulong>(cmd, position));
+ position += cmd.CommandSize;
+ }
+
+ return cmds.ToArray();
+ }
+
+ private byte[] ReadUuid()
+ {
+ IsAtLeastOneUuidLoadCommand.CheckThrowing();
+ IsAtMostOneUuidLoadCommand.CheckThrowing();
+ Tuple<MachLoadCommand, ulong> cmdAndPos = _loadCommands.Value.Where(c => c.Item1.Command == LoadCommandType.Uuid).First();
+ MachUuidLoadCommand uuidCmd = DataSourceReader.Read<MachUuidLoadCommand>(cmdAndPos.Item2);
+ uuidCmd.IsCommandSizeValid.CheckThrowing();
+ return uuidCmd.Uuid;
+ }
+
+ private MachSegment[] ReadSegments()
+ {
+ List<MachSegment> segs = new();
+ foreach (Tuple<MachLoadCommand, ulong> cmdAndPos in _loadCommands.Value)
+ {
+ LoadCommandType segType = Is64Bit ? LoadCommandType.Segment64 : LoadCommandType.Segment;
+ if (cmdAndPos.Item1.Command != segType)
+ {
+ continue;
+ }
+ MachSegment seg = new(DataSourceReader, cmdAndPos.Item2, _dataSourceIsVirtualAddressSpace);
+ segs.Add(seg);
+ }
+
+ return segs.ToArray();
+ }
+
+ private MachSymtab ReadSymtab()
+ {
+ IsAtLeastOneSymtabLoadCommand.CheckThrowing();
+ IsAtMostOneSymtabLoadCommand.CheckThrowing();
+ ulong symtabPosition = 0;
+ ulong dysymtabPosition = 0;
+ foreach (Tuple<MachLoadCommand, ulong> cmdAndPos in _loadCommands.Value)
+ {
+ switch (cmdAndPos.Item1.Command)
+ {
+ case LoadCommandType.Symtab:
+ symtabPosition = cmdAndPos.Item2;
+ break;
+ case LoadCommandType.DySymtab:
+ dysymtabPosition = cmdAndPos.Item2;
+ break;
+ }
+ }
+ if (symtabPosition == 0 || dysymtabPosition == 0)
+ {
+ return null;
+ }
+ return new MachSymtab(DataSourceReader, symtabPosition, dysymtabPosition, PhysicalAddressReader);
+ }
+
+ #region Validation Rules
+ public ValidationRule IsAtMostOneUuidLoadCommand
+ {
+ get
+ {
+ return new ValidationRule("Mach load command sequence has too many uuid elements",
+ () => _loadCommands.Value.Count(c => c.Item1.Command == LoadCommandType.Uuid) <= 1);
+ }
+ }
+ public ValidationRule IsAtLeastOneUuidLoadCommand
+ {
+ get
+ {
+ return new ValidationRule("Mach load command sequence has no uuid elements",
+ () => _loadCommands.Value.Any(c => c.Item1.Command == LoadCommandType.Uuid));
+ }
+ }
+ public ValidationRule IsAtMostOneSymtabLoadCommand
+ {
+ get
+ {
+ return new ValidationRule("Mach load command sequence has too many symtab elements",
+ () => _loadCommands.Value.Count(c => c.Item1.Command == LoadCommandType.Symtab) <= 1);
+ }
+ }
+ public ValidationRule IsAtLeastOneSymtabLoadCommand
+ {
+ get
+ {
+ return new ValidationRule("Mach load command sequence has no symtab elements",
+ () => _loadCommands.Value.Any(c => c.Item1.Command == LoadCommandType.Symtab));
+ }
+ }
+ public ValidationRule IsAtLeastOneSegmentAtFileOffsetZero
+ {
+ get
+ {
+ return new ValidationRule("Mach load command sequence has no segments which contain file offset zero",
+ () => Segments.Where(s => s.LoadCommand.FileOffset == 0 &&
+ s.LoadCommand.FileSize != 0).Any());
+ }
+ }
+ #endregion
+ }
+
+ public class MachSegment
+ {
+ private readonly Reader _dataSourceReader;
+ private readonly ulong _position;
+ private readonly bool _readerIsVirtualAddressSpace;
+ private readonly Lazy<MachSegmentLoadCommand> _loadCommand;
+ private readonly Lazy<MachSection[]> _sections;
+ private readonly Lazy<Reader> _physicalContents;
+ private readonly Lazy<Reader> _virtualContents;
+
+ public MachSegment(Reader machReader, ulong position, bool readerIsVirtualAddressSpace = false)
+ {
+ _dataSourceReader = machReader;
+ _position = position;
+ _readerIsVirtualAddressSpace = readerIsVirtualAddressSpace;
+ _loadCommand = new Lazy<MachSegmentLoadCommand>(() => _dataSourceReader.Read<MachSegmentLoadCommand>(_position));
+ _sections = new Lazy<MachSection[]>(ReadSections);
+ _physicalContents = new Lazy<Reader>(CreatePhysicalSegmentAddressSpace);
+ _virtualContents = new Lazy<Reader>(CreateVirtualSegmentAddressSpace);
+ }
+
+ public MachSegmentLoadCommand LoadCommand { get { return _loadCommand.Value; } }
+ public IEnumerable<MachSection> Sections { get { return _sections.Value; } }
+ public Reader PhysicalContents { get { return _physicalContents.Value; } }
+ public Reader VirtualContents { get { return _virtualContents.Value; } }
+
+ private MachSection[] ReadSections()
+ {
+ ulong sectionStartOffset = _position + _dataSourceReader.SizeOf<MachSegmentLoadCommand>();
+ return _dataSourceReader.ReadArray<MachSection>(sectionStartOffset, _loadCommand.Value.CountSections);
+ }
+
+ private Reader CreatePhysicalSegmentAddressSpace()
+ {
+ if (!_readerIsVirtualAddressSpace)
+ {
+ return _dataSourceReader.WithRelativeAddressSpace(LoadCommand.FileOffset, LoadCommand.FileSize, 0);
+ }
+ else
+ {
+ return _dataSourceReader.WithRelativeAddressSpace(LoadCommand.VMAddress, LoadCommand.FileSize,
+ (long)(LoadCommand.FileOffset - LoadCommand.VMAddress));
+ }
+ }
+
+ private Reader CreateVirtualSegmentAddressSpace()
+ {
+ if (_readerIsVirtualAddressSpace)
+ {
+ return _dataSourceReader.WithRelativeAddressSpace(LoadCommand.VMAddress, LoadCommand.VMSize, 0);
+ }
+ else
+ {
+ return _dataSourceReader.WithAddressSpace(
+ new PiecewiseAddressSpace(
+ new PiecewiseAddressSpaceRange()
+ {
+ AddressSpace = new RelativeAddressSpace(_dataSourceReader.DataSource, LoadCommand.FileOffset, LoadCommand.FileSize,
+ (long)(LoadCommand.VMAddress - LoadCommand.FileOffset)),
+ Start = LoadCommand.VMAddress,
+ Length = LoadCommand.FileSize
+ },
+ new PiecewiseAddressSpaceRange()
+ {
+ AddressSpace = new ZeroAddressSpace(LoadCommand.VMAddress + LoadCommand.VMSize),
+ Start = LoadCommand.VMAddress + LoadCommand.FileSize,
+ Length = LoadCommand.VMSize - LoadCommand.FileSize
+ }));
+ }
+ }
+ }
+
+
+ public class MachVirtualAddressSpace : PiecewiseAddressSpace
+ {
+ public MachVirtualAddressSpace(IEnumerable<MachSegment> segments) : base(segments.Select(s => ToRange(s)).ToArray())
+ {
+ }
+
+ private static PiecewiseAddressSpaceRange ToRange(MachSegment segment)
+ {
+ return new PiecewiseAddressSpaceRange()
+ {
+ AddressSpace = segment.VirtualContents.DataSource,
+ Start = segment.LoadCommand.VMAddress,
+ Length = segment.LoadCommand.VMSize
+ };
+ }
+ }
+
+ public class MachPhysicalAddressSpace : PiecewiseAddressSpace
+ {
+ public MachPhysicalAddressSpace(IAddressSpace virtualAddressSpace, ulong preferredVMBaseAddress, IEnumerable<MachSegment> segments) :
+ base(segments.Select(s => ToRange(virtualAddressSpace, preferredVMBaseAddress, s)).ToArray())
+ {
+ }
+
+ private static PiecewiseAddressSpaceRange ToRange(IAddressSpace virtualAddressSpace, ulong preferredVMBaseAddress, MachSegment segment)
+ {
+ ulong actualSegmentLoadAddress = preferredVMBaseAddress + segment.LoadCommand.VMAddress - segment.LoadCommand.FileOffset;
+ return new PiecewiseAddressSpaceRange()
+ {
+ AddressSpace = new RelativeAddressSpace(virtualAddressSpace, actualSegmentLoadAddress, segment.LoadCommand.FileSize),
+ Start = segment.LoadCommand.FileOffset,
+ Length = segment.LoadCommand.FileSize
+ };
+ }
+ }
+
+ public class MachSymbol
+ {
+ public string Name;
+ public ulong Value { get { return Raw.Value; } }
+ public NList Raw;
+
+ public override string ToString()
+ {
+ return Name + "@0x" + Value.ToString("x");
+ }
+ }
+
+ public class MachSymtab
+ {
+ private readonly Reader _machReader;
+ private readonly Reader _physicalAddressSpace;
+ private readonly Lazy<MachSymtabLoadCommand> _symtabLoadCommand;
+ private readonly Lazy<MachDySymtabLoadCommand> _dysymtabLoadCommand;
+ private readonly Lazy<MachSymbol[]> _symbols;
+ private readonly Lazy<Reader> _stringReader;
+ private readonly Lazy<NList[]> _symbolTable;
+
+ public MachSymtab(Reader machReader, ulong symtabPosition, ulong dysymtabPosition, Reader physicalAddressSpace)
+ {
+ _machReader = machReader;
+ _physicalAddressSpace = physicalAddressSpace;
+ _symtabLoadCommand = new Lazy<MachSymtabLoadCommand>(() => _machReader.Read<MachSymtabLoadCommand>(symtabPosition));
+ _dysymtabLoadCommand = new Lazy<MachDySymtabLoadCommand>(() => _machReader.Read<MachDySymtabLoadCommand>(dysymtabPosition));
+ _stringReader = new Lazy<Reader>(GetStringReader);
+ _symbolTable = new Lazy<NList[]>(ReadSymbolTable);
+ _symbols = new Lazy<MachSymbol[]>(ReadSymbols);
+ }
+
+ public IEnumerable<MachSymbol> Symbols { get { return _symbols.Value; } }
+
+ public bool TryLookupSymbol(string symbol, out ulong offset)
+ {
+ if (symbol is null)
+ {
+ throw new ArgumentNullException(nameof(symbol));
+ }
+ MachSymtabLoadCommand symtabLoadCommand = _symtabLoadCommand.Value;
+ MachDySymtabLoadCommand dysymtabLoadCommand = _dysymtabLoadCommand.Value;
+
+ // First, search just the "external" export symbols
+ if (TryLookupSymbol(dysymtabLoadCommand.IExtDefSym, dysymtabLoadCommand.NextDefSym, symbol, out offset))
+ {
+ return true;
+ }
+
+ // If not found in external symbols, search all of them
+ if (TryLookupSymbol(0, symtabLoadCommand.SymCount, symbol, out offset))
+ {
+ return true;
+ }
+
+ offset = 0;
+ return false;
+ }
+
+ private bool TryLookupSymbol(uint start, uint nsyms, string symbol, out ulong offset)
+ {
+ NList[] symTable = _symbolTable.Value;
+ if (symTable is not null)
+ {
+ for (uint i = 0; i < nsyms && start + i < symTable.Length; i++)
+ {
+ string name = _stringReader.Value.Read<string>(symTable[start + i].StringIndex);
+ if (name.Length > 0)
+ {
+ // Skip the leading underscores to match Linux externs
+ if (name[0] == '_')
+ {
+ name = name.Substring(1);
+ }
+ if (name == symbol)
+ {
+ offset = symTable[start + i].Value;
+ return true;
+ }
+ }
+ }
+ }
+ offset = 0;
+ return false;
+ }
+
+ private MachSymbol[] ReadSymbols()
+ {
+ Reader stringReader = _stringReader.Value;
+ return _symbolTable.Value?.Select(n => new MachSymbol() { Name = stringReader.Read<string>(n.StringIndex), Raw = n }).ToArray();
+ }
+
+ private Reader GetStringReader()
+ {
+ MachSymtabLoadCommand symtabLoadCommand = _symtabLoadCommand.Value;
+ return _physicalAddressSpace.WithRelativeAddressSpace(symtabLoadCommand.StringOffset, symtabLoadCommand.StringSize);
+ }
+
+ private NList[] ReadSymbolTable()
+ {
+ MachSymtabLoadCommand symtabLoadCommand = _symtabLoadCommand.Value;
+ if (symtabLoadCommand.IsNSymsReasonable.Check() && symtabLoadCommand.SymOffset > 0)
+ {
+ return _physicalAddressSpace.ReadArray<NList>(symtabLoadCommand.SymOffset, symtabLoadCommand.SymCount);
+ }
+ return null;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Linq;
+using System.Text;
+
+namespace Microsoft.FileFormats.MachO
+{
+ public static class MachLayoutManagerExtensions
+ {
+ public static LayoutManager AddMachTypes(this LayoutManager layoutManager, bool isBigEndian, bool is64Bit)
+ {
+ layoutManager
+ .AddPrimitives(isBigEndian)
+ .AddSizeT(is64Bit ? 8 : 4)
+ .AddEnumTypes()
+ .AddNullTerminatedString()
+ .AddTStructTypes();
+ return layoutManager;
+ }
+ }
+
+ public enum MachHeaderMagicType : uint
+ {
+ LittleEndian64Bit = 0xfeedfacf,
+ LittleEndian32Bit = 0xfeedface,
+ BigEndian64Bit = 0xcffaedfe,
+ BigEndian32Bit = 0xcefaedfe
+ }
+
+ public class MachHeaderMagic : TStruct
+ {
+ public MachHeaderMagicType Magic;
+ #region Validation Rules
+ public ValidationRule IsMagicValid
+ {
+ get
+ {
+ return new ValidationRule("Invalid MachO Header Magic", () =>
+ {
+ return Magic == MachHeaderMagicType.LittleEndian32Bit ||
+ Magic == MachHeaderMagicType.LittleEndian64Bit;
+ });
+ }
+ }
+ #endregion
+ }
+
+ public enum MachHeaderFileType : uint
+ {
+ Object = 1,
+ Execute = 2,
+ FvmLib = 3,
+ Core = 4,
+ Preload = 5,
+ Dylib = 6,
+ Dylinker = 7,
+ Bundle = 8,
+ DylibStub = 9,
+ Dsym = 10,
+ KextBundle = 11
+ }
+
+ public class MachHeader : MachHeaderMagic
+ {
+ public uint CpuType;
+ public uint CpuSubType;
+ public MachHeaderFileType FileType;
+ public uint NumberCommands;
+ public uint SizeOfCommands;
+ public uint Flags;
+
+ #region Validation Rules
+ public ValidationRule IsFileTypeValid
+ {
+ get
+ {
+ return new ValidationRule("Mach Header FileType is invalid",
+ () => Enum.IsDefined(typeof(MachHeaderFileType), FileType));
+ }
+ }
+
+ public ValidationRule IsNumberCommandsReasonable
+ {
+ get
+ {
+ return new ValidationRule("Mach Header NumberCommands is unreasonable",
+ () => NumberCommands <= 20000);
+ }
+ }
+ #endregion
+ }
+
+ public enum LoadCommandType
+ {
+ Segment = 1,
+ Symtab = 2,
+ Thread = 4,
+ DySymtab = 11,
+ Segment64 = 25,
+ Uuid = 27,
+ }
+
+ public class MachLoadCommand : TStruct
+ {
+ public LoadCommandType Command;
+ public uint CommandSize;
+
+ public override string ToString()
+ {
+ return "LoadCommand {" + Command + ", 0x" + CommandSize.ToString("x") + "}";
+ }
+
+ #region Validation Rules
+ public ValidationRule IsCmdSizeReasonable
+ {
+ get
+ {
+ return new ValidationRule("Mach Load Command Size is unreasonable",
+ () => CommandSize < 0x1000);
+ }
+ }
+
+ public ValidationRule IsCommandRecognized
+ {
+ get
+ {
+ return new ValidationRule("Mach Load Command is not recognized",
+ () => Enum.IsDefined(typeof(LoadCommandType), Command));
+ }
+ }
+ #endregion
+ }
+
+ public class MachFixedLengthString16 : TStruct
+ {
+ [ArraySize(16)]
+ public byte[] Bytes;
+
+ public override string ToString()
+ {
+ try
+ {
+ return Encoding.UTF8.GetString(Bytes.TakeWhile(b => b != 0).ToArray());
+ }
+ catch (FormatException)
+ {
+ throw new BadInputFormatException("Bytes could not be parsed with UTF8 encoding");
+ }
+ }
+ }
+
+ public class MachSegmentLoadCommand : MachLoadCommand
+ {
+ public MachFixedLengthString16 SegName;
+ public SizeT VMAddress;
+ public SizeT VMSize;
+ public SizeT FileOffset;
+ public SizeT FileSize;
+ public uint MaxProt;
+ public uint InitProt;
+ public uint CountSections;
+ public uint Flags;
+
+ #region Validation Rules
+ private ValidationRule IsCommandValid
+ {
+ get
+ {
+ return new ValidationRule("Mach Segment Command has invalid Command field",
+ () => Command == LoadCommandType.Segment || Command == LoadCommandType.Segment64);
+ }
+ }
+ #endregion
+ }
+
+ public class MachSection : TStruct
+ {
+ public MachFixedLengthString16 SectionName;
+ public MachFixedLengthString16 SegmentName;
+ public SizeT Address;
+ public SizeT Size;
+ public uint Offset;
+ public uint Align;
+ public uint RelativeOffset;
+ public uint CountRelocs;
+ public uint Flags;
+ public uint Reserved1;
+ public uint Reserved2;
+ }
+
+ public class MachUuidLoadCommand : MachLoadCommand
+ {
+ [ArraySize(16)]
+ public byte[] Uuid;
+
+ #region Validation Rules
+ public ValidationRule IsCommandValid
+ {
+ get
+ {
+ return new ValidationRule("Mach UUID LoadCommand has invalid command id",
+ () => Command == LoadCommandType.Uuid);
+ }
+ }
+
+ public ValidationRule IsCommandSizeValid
+ {
+ get
+ {
+ return new ValidationRule("Mach UUID LoadCommand has invalid size",
+ () => CommandSize == 24);
+ }
+ }
+ #endregion
+ }
+
+ public class MachSymtabLoadCommand : MachLoadCommand
+ {
+ public uint SymOffset;
+ public uint SymCount;
+ public uint StringOffset;
+ public uint StringSize;
+
+ #region Validation Rules
+ public ValidationRule IsCommandValid
+ {
+ get
+ {
+ return new ValidationRule("Mach Symtab LoadCommand has invalid command id",
+ () => Command == LoadCommandType.Symtab);
+ }
+ }
+
+ public ValidationRule IsCommandSizeValid
+ {
+ get
+ {
+ return new ValidationRule("Mach Symtab LoadCommand has invalid size",
+ () => CommandSize == 24);
+ }
+ }
+
+ public ValidationRule IsNSymsReasonable
+ {
+ get
+ {
+ return new ValidationRule("Mach symtab LoadCommand has unreasonable SymCount",
+ () => SymCount <= 0x100000);
+ }
+ }
+ #endregion
+ }
+
+ public class MachDySymtabLoadCommand : MachLoadCommand
+ {
+ public uint ILocalSym;
+ public uint NLocalSym;
+ public uint IExtDefSym;
+ public uint NextDefSym;
+ public uint IUndefSym;
+ public uint NUndefSym;
+ public uint ToCoff;
+ public uint NToc;
+ public uint ModTabOff;
+ public uint MModTab;
+ public uint ExtrefSymOff;
+ public uint NextrefSyms;
+ public uint IndirectSymOff;
+ public uint NindirectSyms;
+ public uint ExtrelOff;
+ public uint Nextrel;
+ public uint LocrelOff;
+ public uint NLocrel;
+ }
+
+ public class NList : TStruct
+ {
+ public uint StringIndex;
+ public byte Type;
+ public byte Section;
+ public ushort Desc;
+ public SizeT Value;
+ }
+
+ public class DyldImageAllInfosVersion : TStruct
+ {
+ public uint Version;
+ }
+
+ public class DyldImageAllInfosV2 : DyldImageAllInfosVersion
+ {
+ public uint InfoArrayCount;
+ public SizeT InfoArray;
+ public SizeT Notification;
+ public SizeT Undetermined; // there are some fields here but I haven't determined their size and purpose
+ public SizeT ImageLoadAddress;
+ }
+
+ public class DyldImageInfo : TStruct
+ {
+ public SizeT Address;
+ public SizeT PathAddress;
+ public SizeT ModDate;
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ /// <summary>
+ /// Creates an address space that reads from a byte array in memory.
+ /// </summary>
+ public sealed class MemoryBufferAddressSpace : IAddressSpace
+ {
+ public MemoryBufferAddressSpace(IEnumerable<byte> bytes)
+ {
+ _bytes = bytes.ToArray();
+ Length = (ulong)_bytes.Length;
+ }
+
+ /// <summary>
+ /// The upper bound (non-inclusive) of readable addresses
+ /// </summary>
+ public ulong Length { get; private set; }
+
+ /// <summary>
+ /// Reads a range of bytes from the address space
+ /// </summary>
+ /// <param name="position">The position in the address space to begin reading from</param>
+ /// <param name="buffer">The buffer that will receive the bytes that are read</param>
+ /// <param name="bufferOffset">The offset in the output buffer to begin writing the bytes</param>
+ /// <param name="count">The number of bytes to read into the buffer</param>
+ /// <returns>The number of bytes read</returns>
+ public uint Read(ulong position, byte[] buffer, uint bufferOffset, uint count)
+ {
+ if (position >= Length || position + count > Length)
+ {
+ throw new BadInputFormatException("Unexpected end of data: Expected " + count + " bytes.");
+ }
+ Array.Copy(_bytes, (int)position, buffer, (int)bufferOffset, (int)count);
+ return count;
+ }
+
+ private byte[] _bytes;
+ }
+}
--- /dev/null
+<Project Sdk="Microsoft.NET.Sdk">
+ <PropertyGroup>
+ <TargetFrameworks>net462;netstandard2.0</TargetFrameworks>
+ <NoWarn>;1591;1701</NoWarn>
+ <IsPackable>true</IsPackable>
+ <Description>File format readers</Description>
+ <PackageReleaseNotes>$(Description)</PackageReleaseNotes>
+ <PackageTags>File formats</PackageTags>
+ <IncludeSymbols>true</IncludeSymbols>
+ <IsShippingAssembly>true</IsShippingAssembly>
+ <!-- Preserve dotnet/symstore versioning scheme. -->
+ <PreReleaseVersionLabel />
+ <VersionPrefix>1.0.0</VersionPrefix>
+ </PropertyGroup>
+</Project>
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Collections.ObjectModel;
+using System.Diagnostics;
+using System.Linq;
+
+namespace Microsoft.FileFormats.Minidump
+{
+ /// <summary>
+ /// A class which represents a Minidump (Microsoft "crash dump").
+ /// </summary>
+ public class Minidump
+ {
+ private readonly ulong _position;
+ private readonly Reader _dataSourceReader;
+ private readonly MinidumpHeader _header;
+ private readonly MinidumpDirectory[] _directory;
+ private readonly MinidumpSystemInfo _systemInfo;
+ private readonly int _moduleListStream = -1;
+ private readonly Lazy<List<MinidumpLoadedImage>> _loadedImages;
+ private readonly Lazy<List<MinidumpSegment>> _memoryRanges;
+ private readonly Lazy<Reader> _virtualAddressReader;
+
+ /// <summary>
+ /// Returns true if the given address space is a minidump.
+ /// </summary>
+ /// <param name="addressSpace">The address space to check.</param>
+ /// <param name="position">The position of the minidump.</param>
+ /// <returns>True if the address space is a minidump, false otherwise.</returns>
+ public static bool IsValid(IAddressSpace addressSpace, ulong position = 0)
+ {
+ Reader headerReader = new(addressSpace);
+ return headerReader.TryRead(position, out MinidumpHeader header) && header.IsSignatureValid.Check();
+ }
+
+ /// <summary>
+ /// Constructor. This constructor will throw exceptions if the file is not a minidump or contains corrupted data
+ /// which interferes with parsing it.
+ /// </summary>
+ /// <param name="dataSource">The memory which backs this object.</param>
+ /// <param name="position">The offset within addressSpace this minidump is located at.</param>
+ public Minidump(IAddressSpace dataSource, ulong position = 0)
+ {
+ _position = position;
+
+ Reader headerReader = new(dataSource);
+ _header = headerReader.Read<MinidumpHeader>(_position);
+ _header.IsSignatureValid.CheckThrowing();
+
+ int systemIndex = -1;
+ _directory = new MinidumpDirectory[_header.NumberOfStreams];
+ ulong streamPos = _position + _header.StreamDirectoryRva;
+ for (int i = 0; i < _directory.Length; i++)
+ {
+ _directory[i] = headerReader.Read<MinidumpDirectory>(ref streamPos);
+
+ MinidumpStreamType streamType = _directory[i].StreamType;
+ if (streamType == MinidumpStreamType.SystemInfoStream)
+ {
+ Debug.Assert(systemIndex == -1);
+ systemIndex = i;
+ }
+ else if (streamType == MinidumpStreamType.ModuleListStream)
+ {
+ Debug.Assert(_moduleListStream == -1);
+ _moduleListStream = i;
+ }
+ }
+
+ if (systemIndex == -1)
+ {
+ throw new BadInputFormatException("Minidump does not contain a MINIDUMP_SYSTEM_INFO stream");
+ }
+ _systemInfo = headerReader.Read<MinidumpSystemInfo>(_position + _directory[systemIndex].Rva);
+
+ _dataSourceReader = new Reader(dataSource, new LayoutManager().AddCrashDumpTypes(false, Is64Bit));
+ _loadedImages = new Lazy<List<MinidumpLoadedImage>>(CreateLoadedImageList);
+ _memoryRanges = new Lazy<List<MinidumpSegment>>(CreateSegmentList);
+ _virtualAddressReader = new Lazy<Reader>(CreateVirtualAddressReader);
+ }
+
+ /// <summary>
+ /// Returns the architecture of the target process.
+ /// </summary>
+ public ProcessorArchitecture Architecture { get { return _systemInfo.ProcessorArchitecture; } }
+
+ /// <summary>
+ /// A raw data reader for the underlying minidump file itself.
+ /// </summary>
+ public Reader DataSourceReader { get { return _dataSourceReader; } }
+
+ /// <summary>
+ /// A raw data reader for the memory in virtual address space of this minidump.
+ /// </summary>
+ public Reader VirtualAddressReader { get { return _virtualAddressReader.Value; } }
+
+ /// <summary>
+ /// A collection of loaded images in the minidump. This does NOT contain unloaded modules.
+ /// </summary>
+ public ReadOnlyCollection<MinidumpLoadedImage> LoadedImages { get { return _loadedImages.Value.AsReadOnly(); } }
+
+ /// <summary>
+ /// A collection of all the memory segments in minidump.
+ /// </summary>
+ public ReadOnlyCollection<MinidumpSegment> Segments { get { return _memoryRanges.Value.AsReadOnly(); } }
+
+ /// <summary>
+ /// Returns true if the original process represented by this minidump was running as an x64 process or not.
+ /// </summary>
+ public bool Is64Bit
+ {
+ get
+ {
+ ProcessorArchitecture arch = _systemInfo.ProcessorArchitecture;
+ return arch == ProcessorArchitecture.Alpha64 || arch == ProcessorArchitecture.Amd64 || arch == ProcessorArchitecture.Ia64;
+ }
+ }
+
+ private Reader CreateVirtualAddressReader()
+ {
+ return _dataSourceReader.WithAddressSpace(new MinidumpVirtualAddressSpace(Segments, _dataSourceReader.DataSource));
+ }
+
+ private List<MinidumpLoadedImage> CreateLoadedImageList()
+ {
+ if (_moduleListStream == -1)
+ {
+ throw new BadInputFormatException("Minidump does not contain a ModuleStreamList in its directory.");
+ }
+ MinidumpModule[] modules = _dataSourceReader.ReadCountedArray<MinidumpModule>(_position + _directory[_moduleListStream].Rva);
+ return new List<MinidumpLoadedImage>(modules.Select(module => new MinidumpLoadedImage(this, module)));
+ }
+
+ private List<MinidumpSegment> CreateSegmentList()
+ {
+ List<MinidumpSegment> ranges = new();
+
+ foreach (MinidumpDirectory item in _directory)
+ {
+ if (item.StreamType == MinidumpStreamType.MemoryListStream)
+ {
+ MinidumpMemoryDescriptor[] memoryRegions = _dataSourceReader.ReadCountedArray<MinidumpMemoryDescriptor>(_position + item.Rva);
+
+ foreach (MinidumpMemoryDescriptor region in memoryRegions)
+ {
+ MinidumpSegment range = new(region);
+ ranges.Add(range);
+ }
+
+ }
+ else if (item.StreamType == MinidumpStreamType.Memory64ListStream)
+ {
+ ulong position = item.Rva;
+ ulong count = _dataSourceReader.Read<ulong>(ref position);
+ ulong rva = _dataSourceReader.Read<ulong>(ref position);
+
+ MinidumpMemoryDescriptor64[] memoryRegions = _dataSourceReader.ReadArray<MinidumpMemoryDescriptor64>(position + _position, checked((uint)count));
+ foreach (MinidumpMemoryDescriptor64 region in memoryRegions)
+ {
+ MinidumpSegment range = new(region, rva);
+ ranges.Add(range);
+
+ rva += region.DataSize;
+ }
+ }
+ }
+
+ ranges.Sort((MinidumpSegment a, MinidumpSegment b) => a.VirtualAddress.CompareTo(b.VirtualAddress));
+ return ranges;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Text;
+using Microsoft.FileFormats.PE;
+
+namespace Microsoft.FileFormats.Minidump
+{
+ public class MinidumpLoadedImage
+ {
+ private readonly MinidumpModule _module;
+ private readonly Lazy<PEFile> _peFile;
+ private readonly Lazy<string> _moduleName;
+
+ /// <summary>
+ /// The minidump containing this loaded image.
+ /// </summary>
+ public Minidump Minidump { get; private set; }
+
+ /// <summary>
+ /// The base address in the minidump's virtual address space that this image is mapped.
+ /// </summary>
+ public ulong BaseAddress { get { return _module.Baseofimage; } }
+
+ /// <summary>
+ /// The checksum of this image.
+ /// </summary>
+ public uint CheckSum { get { return _module.CheckSum; } }
+
+ /// <summary>
+ /// The TimeDateStamp of this image, as baked into the PE header. This value is used
+ /// for symbol sever requests to obtain a PE image.
+ /// </summary>
+ public uint TimeDateStamp { get { return _module.TimeDateStamp; } }
+
+ /// <summary>
+ /// The compile time size of this PE image as it is baked into the PE header. This
+ /// value is used for simple server requests to obtain a PE image.
+ /// </summary>
+ public uint ImageSize { get { return _module.SizeOfImage; } }
+
+
+ /// <summary>
+ /// The full name of this module (including path it was originally loaded from on disk).
+ /// </summary>
+ public string ModuleName { get { return _moduleName.Value; } }
+
+ /// <summary>
+ /// A PEFile representing this image.
+ /// </summary>
+ public PEFile Image { get { return _peFile.Value; } }
+
+ public uint Major { get { return _module.VersionInfo.FileVersionMS >> 16; } }
+ public uint Minor { get { return _module.VersionInfo.FileVersionMS & 0xffff; } }
+ public uint Revision { get { return _module.VersionInfo.FileVersionLS >> 16; } }
+ public uint Patch { get { return _module.VersionInfo.FileVersionLS & 0xffff; } }
+
+ internal MinidumpLoadedImage(Minidump minidump, MinidumpModule module)
+ {
+ Minidump = minidump;
+ _module = module;
+ _peFile = new Lazy<PEFile>(CreatePEFile);
+ _moduleName = new Lazy<string>(GetModuleName);
+ }
+
+ private PEFile CreatePEFile()
+ {
+ return new PEFile(new RelativeAddressSpace(Minidump.VirtualAddressReader.DataSource, BaseAddress, Minidump.VirtualAddressReader.Length), true);
+ }
+
+ private string GetModuleName()
+ {
+ return Minidump.DataSourceReader.ReadCountedString(_module.ModuleNameRva, Encoding.Unicode);
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+namespace Microsoft.FileFormats.Minidump
+{
+ /// <summary>
+ /// Represents a segment of memory in the minidump's virtual address space.
+ /// </summary>
+ public class MinidumpSegment
+ {
+ /// <summary>
+ /// The physical location in the minidump file where this memory segment resides.
+ /// </summary>
+ public ulong FileOffset { get; private set; }
+
+ /// <summary>
+ /// The base address of this chunk of virtual memory in the original process.
+ /// </summary>
+ public ulong VirtualAddress { get; private set; }
+
+ /// <summary>
+ /// The size of this chunk of memory. Note that this is both the size of the physical
+ /// memory in the minidump as well as the virtual memory in the original process.
+ /// </summary>
+ public ulong Size { get; private set; }
+
+ /// <summary>
+ /// Returns whether the given address is contained in this region of virtual memory.
+ /// </summary>
+ /// <param name="address">A virtual address in the original process's address space.</param>
+ /// <returns>True if this segment contains the address, false otherwise.</returns>
+ public bool Contains(ulong address)
+ {
+ return VirtualAddress <= address && address < VirtualAddress + Size;
+ }
+
+ internal MinidumpSegment(MinidumpMemoryDescriptor region)
+ {
+ FileOffset = region.Memory.Rva;
+ Size = region.Memory.DataSize;
+ VirtualAddress = region.StartOfMemoryRange;
+ }
+
+ internal MinidumpSegment(MinidumpMemoryDescriptor64 region, ulong rva)
+ {
+ FileOffset = rva;
+ Size = region.DataSize;
+ VirtualAddress = region.StartOfMemoryRange;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats.Minidump
+{
+ public static class CrashDumpLayoutManagerExtensions
+ {
+ public static LayoutManager AddCrashDumpTypes(this LayoutManager layouts, bool isBigEndian, bool is64Bit)
+ {
+ return layouts
+ .AddPrimitives(isBigEndian)
+ .AddEnumTypes()
+ .AddSizeT(is64Bit ? 8 : 4)
+ .AddPointerTypes()
+ .AddNullTerminatedString()
+ .AddTStructTypes();
+ }
+ }
+
+#pragma warning disable 0649
+#pragma warning disable 0169
+
+ internal sealed class MinidumpHeader : TStruct
+ {
+ public const int MinidumpVersion = 0x504d444d;
+
+ public uint Signature;
+ public uint Version;
+ public uint NumberOfStreams;
+ public uint StreamDirectoryRva;
+ public uint CheckSum;
+ public uint TimeDateStamp;
+ public ulong Flags;
+
+ public ValidationRule IsSignatureValid
+ {
+ get
+ {
+ return new ValidationRule("Invalid minidump header signature", () =>
+ {
+ return Signature == MinidumpVersion;
+ });
+ }
+ }
+ }
+
+ internal sealed class MinidumpDirectory : TStruct
+ {
+ public MinidumpStreamType StreamType;
+ public uint DataSize;
+ public uint Rva;
+ }
+
+ internal enum MinidumpStreamType
+ {
+ UnusedStream = 0,
+ ReservedStream0 = 1,
+ ReservedStream1 = 2,
+ ThreadListStream = 3,
+ ModuleListStream = 4,
+ MemoryListStream = 5,
+ ExceptionStream = 6,
+ SystemInfoStream = 7,
+ ThreadExListStream = 8,
+ Memory64ListStream = 9,
+ CommentStreamA = 10,
+ CommentStreamW = 11,
+ HandleDataStream = 12,
+ FunctionTableStream = 13,
+ UnloadedModuleListStream = 14,
+ MiscInfoStream = 15,
+ MemoryInfoListStream = 16,
+ ThreadInfoListStream = 17,
+ LastReservedStream = 0xffff,
+ }
+
+
+ internal sealed class MinidumpSystemInfo : TStruct
+ {
+ public ProcessorArchitecture ProcessorArchitecture;
+ public ushort ProcessorLevel;
+ public ushort ProcessorRevision;
+ public byte NumberOfProcessors;
+ public byte ProductType;
+ public uint MajorVersion;
+ public uint MinorVersion;
+ public uint BuildNumber;
+ public uint PlatformId;
+ public uint CSDVersionRva;
+ }
+
+ public enum ProcessorArchitecture : ushort
+ {
+ Intel = 0,
+ Mips = 1,
+ Alpha = 2,
+ Ppc = 3,
+ Shx = 4,
+ Arm = 5,
+ Ia64 = 6,
+ Alpha64 = 7,
+ Msil = 8,
+ Amd64 = 9,
+ Ia32OnWin64 = 10,
+ }
+
+ internal sealed class FixedFileInfo : TStruct
+ {
+ public uint Signature; /* e.g. 0xfeef04bd */
+ public uint StrucVersion; /* e.g. 0x00000042 = "0.42" */
+ public uint FileVersionMS; /* e.g. 0x00030075 = "3.75" */
+ public uint FileVersionLS; /* e.g. 0x00000031 = "0.31" */
+ public uint ProductVersionMS; /* e.g. 0x00030010 = "3.10" */
+ public uint ProductVersionLS; /* e.g. 0x00000031 = "0.31" */
+ public uint FileFlagsMask; /* = 0x3F for version "0.42" */
+ public uint FileFlags; /* e.g. VFF_DEBUG | VFF_PRERELEASE */
+ public uint FileOS; /* e.g. VOS_DOS_WINDOWS16 */
+ public uint FileType; /* e.g. VFT_DRIVER */
+ public uint FileSubtype; /* e.g. VFT2_DRV_KEYBOARD */
+
+ // Timestamps would be useful, but they're generally missing (0).
+ public uint FileDateMS; /* e.g. 0 */
+ public uint FileDateLS; /* e.g. 0 */
+ }
+
+
+ internal sealed class MinidumpLocationDescriptor : TStruct
+ {
+ public uint DataSize;
+ public uint Rva;
+ }
+
+ [Pack(4)]
+ internal sealed class MinidumpModule : TStruct
+ {
+ public ulong Baseofimage;
+ public uint SizeOfImage;
+ public uint CheckSum;
+ public uint TimeDateStamp;
+ public uint ModuleNameRva;
+ public FixedFileInfo VersionInfo;
+ public MinidumpLocationDescriptor CvRecord;
+ public MinidumpLocationDescriptor MiscRecord;
+#pragma warning disable CA1823 // Avoid unused private fields
+ private ulong _reserved0;
+ private ulong _reserved1;
+#pragma warning restore CA1823 // Avoid unused private fields
+ }
+
+ internal sealed class MinidumpMemoryDescriptor : TStruct
+ {
+ public ulong StartOfMemoryRange;
+ public MinidumpLocationDescriptor Memory;
+
+ }
+
+ internal sealed class MinidumpMemoryDescriptor64 : TStruct
+ {
+ public ulong StartOfMemoryRange;
+ public ulong DataSize;
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Collections.ObjectModel;
+using System.Diagnostics;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats.Minidump
+{
+ public class MinidumpVirtualAddressSpace : IAddressSpace
+ {
+ private readonly IAddressSpace _addressSpace;
+ private readonly ReadOnlyCollection<MinidumpSegment> _segments;
+ private readonly ulong _length;
+
+ public ulong Length
+ {
+ get
+ {
+ return _length;
+ }
+ }
+
+ public MinidumpVirtualAddressSpace(ReadOnlyCollection<MinidumpSegment> segments, IAddressSpace addressSpace)
+ {
+ _addressSpace = addressSpace;
+ _segments = segments;
+ MinidumpSegment last = segments.Last();
+ _length = last.VirtualAddress + last.Size;
+ }
+
+ public uint Read(ulong position, byte[] buffer, uint bufferOffset, uint count)
+ {
+ if (count == 0)
+ {
+ return 0;
+ }
+ MinidumpSegment seg = FindSegment(position);
+ if (seg == null)
+ {
+ return 0;
+ }
+ // TODO: What if they read past the end of the segment?
+ Debug.Assert(position >= seg.VirtualAddress);
+ ulong offset = position - seg.VirtualAddress + seg.FileOffset;
+ return _addressSpace.Read(offset, buffer, bufferOffset, count);
+ }
+
+ private MinidumpSegment FindSegment(ulong position)
+ {
+ int min = 0;
+ int max = _segments.Count - 1;
+
+ while (min <= max)
+ {
+ int mid = (min + max) / 2;
+ MinidumpSegment current = _segments[mid];
+
+ if (position < current.VirtualAddress)
+ {
+ max = mid - 1;
+ }
+ else if (position >= current.VirtualAddress + current.Size)
+ {
+ min = mid + 1;
+ }
+ else
+ {
+ Debug.Assert(current.Contains(position));
+ return current;
+ }
+ }
+
+ return null;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Text;
+
+namespace Microsoft.FileFormats.Minidump
+{
+ internal static class MinidumpReaderExtensions
+ {
+ public static string ReadCountedString(this Reader self, ulong position, Encoding encoding)
+ {
+ uint elementCount = self.Read<uint>(ref position);
+ byte[] buffer = self.Read(position, elementCount);
+ return encoding.GetString(buffer);
+ }
+
+ public static T[] ReadCountedArray<T>(this Reader self, ulong position)
+ {
+ uint elementCount = self.Read<uint>(ref position);
+ return (T[])self.LayoutManager.GetArrayLayout<T[]>(elementCount).Read(self.DataSource, position);
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ /// <summary>
+ /// Parses a string object from a standard null-terminated byte sequence.
+ /// </summary>
+ /// <remarks>
+ /// Currently this type only supports ASCII or UTF8 encoding.
+ /// </remarks>
+ public class NullTerminatedStringLayout : ILayout
+ {
+ private Encoding _encoding;
+
+ /// <summary>
+ /// Create a new NullTerminatedStringLayout
+ /// </summary>
+ /// <param name="encoding">The encoding used to parse the string characters. Currently on ASCII or UTF8 is supported</param>
+ public NullTerminatedStringLayout(Encoding encoding)
+ {
+ // Make sure we are scanning something where a single 0 byte means end of string.
+ // Although UTF8 does have code points that encode with more than one byte,
+ // byte 0 is never used except to encode code point 0.
+ if (encoding != Encoding.UTF8 && encoding != Encoding.ASCII)
+ {
+ throw new NotSupportedException("PEReader.ReadNullTerminatedString: Only UTF8 or ascii are supported for now");
+ }
+ _encoding = encoding;
+
+ // We could implement this for multi-byte encodings, there hasn't been a
+ // need yet. If you do change it, make sure to adjust NaturalAlignment too
+ }
+
+ public IEnumerable<IField> Fields { get { return Array.Empty<IField>(); } }
+ public uint NaturalAlignment { get { return 1U; } }
+
+ public bool IsFixedSize { get { return false; } }
+
+ public uint Size
+ {
+ get
+ {
+ throw new InvalidOperationException("Size is invalid for variable sized layouts");
+ }
+ }
+
+ public uint SizeAsBaseType
+ {
+ get
+ {
+ throw new InvalidOperationException("Size is invalid for variable sized layouts");
+ }
+ }
+
+ public Type Type { get { return typeof(string); } }
+
+ public object Read(IAddressSpace dataSource, ulong position)
+ {
+ return Read(dataSource, position, out uint _);
+ }
+
+ public object Read(IAddressSpace dataSource, ulong position, out uint bytesRead)
+ {
+ List<byte> stringBytes = new();
+ uint offset = 0;
+ for (; ; offset++)
+ {
+ byte[] nextByte = dataSource.Read(position + offset, 1);
+ if (nextByte[0] == 0)
+ {
+ break;
+ }
+ else
+ {
+ stringBytes.Add(nextByte[0]);
+ }
+ }
+ bytesRead = offset + 1;
+ return _encoding.GetString(stringBytes.ToArray(), 0, stringBytes.Count);
+ }
+ }
+
+ public static partial class LayoutManagerExtensions
+ {
+ /// <summary>
+ /// Add support for parsing null terminated strings as System.String
+ /// </summary>
+ public static LayoutManager AddNullTerminatedString(this LayoutManager layouts)
+ {
+ return AddNullTerminatedString(layouts, Encoding.UTF8);
+ }
+
+ /// <summary>
+ /// Add support for parsing null terminated strings as System.String
+ /// </summary>
+ /// <param name="layouts">The layout manager that will hold the new layout</param>
+ /// <param name="encoding">The encoding used to parse string characters. Currently only UTF8 and ASCII are supported</param>
+ public static LayoutManager AddNullTerminatedString(this LayoutManager layouts, Encoding encoding)
+ {
+ layouts.AddLayout(new NullTerminatedStringLayout(encoding));
+ return layouts;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+
+namespace Microsoft.FileFormats.PDB
+{
+ public class PDBFile : IDisposable
+ {
+ private readonly Reader _reader;
+ private readonly Lazy<PDBFileHeader> _header;
+ private readonly Lazy<Reader[]> _streams;
+ private readonly Lazy<PDBNameStream> _nameStream;
+ private readonly Lazy<DbiStream> _dbiStream;
+
+ public PDBFile(IAddressSpace dataSource)
+ {
+ _reader = new Reader(dataSource);
+ _header = new Lazy<PDBFileHeader>(() => _reader.Read<PDBFileHeader>(0));
+ _streams = new Lazy<Reader[]>(ReadDirectory);
+ _nameStream = new Lazy<PDBNameStream>(() => new PDBNameStream(Streams[1]));
+ _dbiStream = new Lazy<DbiStream>(() => new DbiStream(Streams[3]));
+ }
+
+ public PDBFileHeader Header { get { return _header.Value; } }
+ public IList<Reader> Streams { get { return _streams.Value; } }
+ public PDBNameStream NameStream { get { return _nameStream.Value; } }
+ public DbiStream DbiStream { get { return _dbiStream.Value; } }
+ public uint Age { get { return NameStream.Header.Age; } }
+ public uint DbiAge { get { return DbiStream.Header.Age; } }
+ public Guid Signature { get { return new Guid(NameStream.Header.Guid); } }
+
+ public void Dispose()
+ {
+ if (_reader.DataSource is IDisposable disposable)
+ {
+ disposable.Dispose();
+ }
+ }
+
+ public bool IsValid()
+ {
+ if (_reader.Length > _reader.SizeOf<PDBFileHeader>()) {
+ return Header.IsMagicValid.Check();
+ }
+ return false;
+ }
+
+ private Reader[] ReadDirectory()
+ {
+ Header.IsMagicValid.CheckThrowing();
+ uint secondLevelPageCount = ToPageCount(Header.DirectorySize);
+ ulong pageIndicesOffset = _reader.SizeOf<PDBFileHeader>();
+ PDBPagedAddressSpace secondLevelPageList = CreatePagedAddressSpace(_reader.DataSource, pageIndicesOffset, secondLevelPageCount * sizeof(uint));
+ PDBPagedAddressSpace directoryContent = CreatePagedAddressSpace(secondLevelPageList, 0, Header.DirectorySize);
+
+ Reader directoryReader = new(directoryContent);
+ ulong position = 0;
+ uint countStreams = directoryReader.Read<uint>(ref position);
+ uint[] streamSizes = directoryReader.ReadArray<uint>(ref position, countStreams);
+ Reader[] streams = new Reader[countStreams];
+ for (uint i = 0; i < streamSizes.Length; i++)
+ {
+ streams[i] = new Reader(CreatePagedAddressSpace(directoryContent, position, streamSizes[i]));
+ position += ToPageCount(streamSizes[i]) * sizeof(uint);
+ }
+ return streams;
+ }
+
+ private PDBPagedAddressSpace CreatePagedAddressSpace(IAddressSpace indicesData, ulong offset, uint length)
+ {
+ uint[] indices = new Reader(indicesData).ReadArray<uint>(offset, ToPageCount(length));
+ return new PDBPagedAddressSpace(_reader.DataSource, indices, Header.PageSize, length);
+ }
+
+ private uint ToPageCount(uint size)
+ {
+ return unchecked((Header.PageSize + size - 1) / Header.PageSize);
+ }
+ }
+
+ /// <summary>
+ /// Defines a virtual address paged address space that maps to an underlying physical
+ /// paged address space with a different set of page Indices.
+ /// </summary>
+ /// <remarks>
+ /// A paged address space is an address space where any address A can be converted
+ /// to a page index and a page offset. A = index*page_size + offset.
+ ///
+ /// This paged address space maps each virtual address to a physical address by
+ /// remapping each virtual page to potentially different physical page. If V is
+ /// the virtual page index then pageIndices[V] is the physical page index.
+ ///
+ /// For example if pageSize is 0x100 and pageIndices is { 0x7, 0x9 } then
+ /// virtual address 0x156 is:
+ /// virtual page index 0x1, virtual offset 0x56
+ /// physical page index 0x9, physical offset 0x56
+ /// physical address is 0x956
+ /// </remarks>
+ internal sealed class PDBPagedAddressSpace : IAddressSpace
+ {
+ private readonly IAddressSpace _physicalAddresses;
+ private readonly uint[] _pageIndices;
+ private readonly uint _pageSize;
+
+ public PDBPagedAddressSpace(IAddressSpace physicalAddresses, uint[] pageIndices, uint pageSize, ulong length)
+ {
+ _physicalAddresses = physicalAddresses;
+ _pageIndices = pageIndices;
+ _pageSize = pageSize;
+ Length = length;
+ }
+
+ public ulong Length { get; private set; }
+
+ public uint Read(ulong position, byte[] buffer, uint bufferOffset, uint count)
+ {
+ if (position + count > Length)
+ {
+ throw new BadInputFormatException("Unexpected end of data: Expected " + count + " bytes.");
+ }
+
+ uint bytesRead = 0;
+ while (bytesRead != count)
+ {
+ uint virtualPageOffset;
+ ulong physicalPosition = GetPhysicalAddress(position, out virtualPageOffset);
+ uint pageBytesToRead = Math.Min(_pageSize - virtualPageOffset, count - bytesRead);
+ uint pageBytesRead = _physicalAddresses.Read(physicalPosition, buffer, bufferOffset + bytesRead, pageBytesToRead);
+ bytesRead += pageBytesRead;
+ position += pageBytesRead;
+ if (pageBytesToRead != pageBytesRead)
+ {
+ break;
+ }
+ }
+ return bytesRead;
+ }
+
+ private ulong GetPhysicalAddress(ulong virtualAddress, out uint virtualOffset)
+ {
+ uint virtualPageIndex = (uint)(virtualAddress / _pageSize);
+ virtualOffset = (uint)(virtualAddress - (virtualPageIndex * _pageSize));
+ uint physicalPageIndex = _pageIndices[(int)virtualPageIndex];
+ return (ulong)physicalPageIndex * _pageSize + virtualOffset;
+ }
+ }
+
+ public class PDBNameStream
+ {
+ private readonly Reader _streamReader;
+ private readonly Lazy<NameIndexStreamHeader> _header;
+
+
+ public PDBNameStream(Reader streamReader)
+ {
+ _streamReader = streamReader;
+ _header = new Lazy<NameIndexStreamHeader>(() => _streamReader.Read<NameIndexStreamHeader>(0));
+ }
+
+ public NameIndexStreamHeader Header { get { return _header.Value; } }
+ }
+
+ public class DbiStream
+ {
+ private readonly Reader _streamReader;
+ private readonly Lazy<DbiStreamHeader> _header;
+
+
+ public DbiStream(Reader streamReader)
+ {
+ _streamReader = streamReader;
+ _header = new Lazy<DbiStreamHeader>(() => _streamReader.Read<DbiStreamHeader>(0));
+ }
+
+ public bool IsValid()
+ {
+ if (_streamReader.Length >= _streamReader.SizeOf<DbiStreamHeader>()) {
+ return _header.Value.IsHeaderValid.Check();
+ }
+ return false;
+ }
+
+ public DbiStreamHeader Header { get { _header.Value.IsHeaderValid.CheckThrowing(); return _header.Value; } }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats.PDB
+{
+ public class PDBFileHeader : TStruct
+ {
+ private static byte[] ExpectedMagic
+ {
+ get
+ {
+ return new byte[]
+ {
+ 0x4D, 0x69, 0x63, 0x72, 0x6F, 0x73, 0x6F, 0x66, // "Microsof"
+ 0x74, 0x20, 0x43, 0x2F, 0x43, 0x2B, 0x2B, 0x20, // "t C/C++ "
+ 0x4D, 0x53, 0x46, 0x20, 0x37, 0x2E, 0x30, 0x30, // "MSF 7.00"
+ 0x0D, 0x0A, 0x1A, 0x44, 0x53, 0x00, 0x00, 0x00 // "^^^DS^^^"
+ };
+ }
+ }
+
+ [ArraySize(32)]
+ public byte[] Magic;
+ public uint PageSize;
+ public uint FreePageMap;
+ public uint PagesUsed;
+ public uint DirectorySize;
+ public uint Reserved;
+
+ #region Validation Rules
+ public ValidationRule IsMagicValid
+ {
+ get { return new ValidationRule("PDB header magic is invalid", () => Magic.SequenceEqual(ExpectedMagic)); }
+ }
+ #endregion
+ }
+
+ public class NameIndexStreamHeader : TStruct
+ {
+ public uint Version;
+ public uint Signature;
+ public uint Age;
+ [ArraySize(16)]
+ public byte[] Guid;
+ public uint CountStringBytes;
+ }
+
+ public class DbiStreamHeader : TStruct
+ {
+ private const uint CurrentSignature = uint.MaxValue;
+ private const uint CurrentVersion = 19990903; // DBIImpvV70
+
+ public uint Signature;
+ public uint Version;
+ public uint Age;
+
+ // This is not the complete DBI header, but it is enough to get the Age.
+
+ #region Validation Rules
+ public ValidationRule IsHeaderValid
+ {
+ get { return new ValidationRule("DBI header is invalid", () => Signature == CurrentSignature && Version == CurrentVersion); }
+ }
+ #endregion
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Collections.ObjectModel;
+using System.Linq;
+
+namespace Microsoft.FileFormats.PE
+{
+ /// <summary>
+ /// A very basic PE reader that can extract a few useful pieces of information
+ /// </summary>
+ public class PEFile : IDisposable
+ {
+ // PE file
+ private readonly bool _isDataSourceVirtualAddressSpace;
+ private readonly Reader _headerReader;
+ private readonly Lazy<ushort> _dosHeaderMagic;
+ private readonly Lazy<ImageFileHeader> _fileHeader;
+ private readonly Lazy<uint> _peHeaderOffset;
+ private readonly Lazy<uint> _peSignature;
+ private readonly Lazy<ImageOptionalHeaderMagic> _optionalHeaderMagic;
+ private readonly Lazy<Reader> _fileReader;
+ private readonly Lazy<ImageOptionalHeader> _optionalHeader;
+ private readonly Lazy<List<ImageDataDirectory>> _imageDataDirectory;
+ private readonly Lazy<IEnumerable<PEPdbRecord>> _pdb;
+ private readonly Lazy<List<ImageSectionHeader>> _segments;
+ private readonly Lazy<VsFixedFileInfo> _vsFixedFileInfo;
+ private readonly Lazy<IEnumerable<PdbChecksum>> _pdbChecksum;
+ private readonly Lazy<IEnumerable<PEPerfMapRecord>> _perfMapsV1;
+ private readonly Lazy<Reader> _virtualAddressReader;
+ private readonly Lazy<ImageExportDirectory> _exportDirectory;
+
+ private const ushort ExpectedDosHeaderMagic = 0x5A4D; // MZ
+ private const int PESignatureOffsetLocation = 0x3C;
+ private const uint ExpectedPESignature = 0x00004550; // PE00
+ private const int ImageDataDirectoryCount = 15;
+
+ public const uint ChecksumLength = 4;
+ public const uint CertDirectoryLength = 8;
+ public const int CertDirectoryIndex = 4;
+
+ public PEFile(IAddressSpace dataSource, bool isDataSourceVirtualAddressSpace = false)
+ {
+ _isDataSourceVirtualAddressSpace = isDataSourceVirtualAddressSpace;
+ _headerReader = new Reader(dataSource);
+ _dosHeaderMagic = new Lazy<ushort>(() => _headerReader.Read<ushort>(0));
+ _peHeaderOffset = new Lazy<uint>(ReadPEHeaderOffset);
+ _peSignature = new Lazy<uint>(() => _headerReader.Read<uint>(PEHeaderOffset));
+ _fileHeader = new Lazy<ImageFileHeader>(ReadFileHeader);
+ _optionalHeaderMagic = new Lazy<ImageOptionalHeaderMagic>(ReadOptionalHeaderMagic);
+ _fileReader = new Lazy<Reader>(CreateFileReader);
+ _optionalHeader = new Lazy<ImageOptionalHeader>(ReadOptionalHeader);
+ _imageDataDirectory = new Lazy<List<ImageDataDirectory>>(ReadImageDataDirectory);
+ _pdb = new Lazy<IEnumerable<PEPdbRecord>>(ReadPdbInfo);
+ _segments = new Lazy<List<ImageSectionHeader>>(ReadSectionHeaders);
+ _vsFixedFileInfo = new Lazy<VsFixedFileInfo>(ReadVersionResource);
+ _pdbChecksum = new Lazy<IEnumerable<PdbChecksum>>(ReadPdbChecksum);
+ _perfMapsV1 = new Lazy<IEnumerable<PEPerfMapRecord>>(ReadPerfMapV1Entries);
+ _virtualAddressReader = new Lazy<Reader>(CreateVirtualAddressReader);
+ _exportDirectory = new Lazy<ImageExportDirectory>(ReadExportDirectory);
+ }
+
+ public ushort DosHeaderMagic { get { return _dosHeaderMagic.Value; } }
+ public uint PEHeaderOffset { get { return _peHeaderOffset.Value; } }
+ public uint PESignature { get { return _peSignature.Value; } }
+ public ImageFileHeader FileHeader { get { return _fileHeader.Value; } }
+ public uint Timestamp { get { return FileHeader.TimeDateStamp; } }
+ public ImageOptionalHeaderMagic OptionalHeaderMagic { get { return _optionalHeaderMagic.Value; } }
+ public Reader FileReader { get { return _fileReader.Value; } }
+ public ImageOptionalHeader OptionalHeader { get { return _optionalHeader.Value; } }
+ public uint SizeOfImage { get { return OptionalHeader.SizeOfImage; } }
+ public ReadOnlyCollection<ImageDataDirectory> ImageDataDirectory { get { return _imageDataDirectory.Value.AsReadOnly(); } }
+ public IEnumerable<PEPdbRecord> Pdbs { get { return _pdb.Value; } }
+ public Reader RelativeVirtualAddressReader { get { return _virtualAddressReader.Value; } }
+ public ReadOnlyCollection<ImageSectionHeader> Segments { get { return _segments.Value.AsReadOnly(); } }
+ public VsFixedFileInfo VersionInfo { get { return _vsFixedFileInfo.Value; } }
+ public IEnumerable<PEPerfMapRecord> PerfMapsV1 { get { return _perfMapsV1.Value; } }
+ public IEnumerable<PdbChecksum> PdbChecksums { get { return _pdbChecksum.Value; } }
+
+ public void Dispose()
+ {
+ if (_headerReader.DataSource is IDisposable disposable)
+ {
+ disposable.Dispose();
+ }
+ }
+
+ public bool IsValid()
+ {
+ if (_headerReader.Length > sizeof(ushort))
+ {
+ try
+ {
+ if (HasValidDosSignature.Check())
+ {
+ if (_headerReader.Length > PESignatureOffsetLocation)
+ {
+ return HasValidPESignature.Check();
+ }
+ }
+ }
+ catch (Exception ex) when (ex is InvalidVirtualAddressException || ex is BadInputFormatException)
+ {
+ }
+ }
+ return false;
+ }
+
+ public bool IsILImage { get { return ComDataDirectory.VirtualAddress != 0; } }
+
+ /// <summary>
+ /// The COM data directory. In practice this is the metadata of an IL image.
+ /// </summary>
+ public ImageDataDirectory ComDataDirectory { get { return ImageDataDirectory[(int)ImageDirectoryEntry.ComDescriptor]; } }
+
+ /// <summary>
+ /// Returns the address of a module export symbol if found
+ /// </summary>
+ /// <param name="symbolName">symbol name (without the module name prepended)</param>
+ /// <param name="offset">symbol offset returned</param>
+ /// <returns>true if found</returns>
+ public bool TryGetExportSymbol(string symbolName, out ulong offset)
+ {
+ try
+ {
+ ImageExportDirectory exportDirectory = _exportDirectory.Value;
+ if (exportDirectory is not null)
+ {
+ for (int nameIndex = 0; nameIndex < exportDirectory.NumberOfNames; nameIndex++)
+ {
+ uint namePointerRVA = RelativeVirtualAddressReader.Read<uint>((ulong)(exportDirectory.AddressOfNames + (sizeof(uint) * nameIndex)));
+ if (namePointerRVA != 0)
+ {
+ string name = RelativeVirtualAddressReader.Read<string>(namePointerRVA);
+ if (name == symbolName)
+ {
+ ushort ordinalForNamedExport = RelativeVirtualAddressReader.Read<ushort>((ulong)(exportDirectory.AddressOfNameOrdinals + (sizeof(ushort) * nameIndex)));
+ offset = RelativeVirtualAddressReader.Read<uint>((ulong)(exportDirectory.AddressOfFunctions + (sizeof(uint) * ordinalForNamedExport)));
+ return true;
+ }
+ }
+ }
+ }
+ }
+ catch (Exception ex) when (ex is InvalidVirtualAddressException || ex is BadInputFormatException)
+ {
+ }
+ offset = 0;
+ return false;
+ }
+
+ private ImageExportDirectory ReadExportDirectory()
+ {
+ if (IsValid())
+ {
+ ImageDataDirectory exportTableDirectory = ImageDataDirectory[(int)ImageDirectoryEntry.Export];
+ if (exportTableDirectory is not null)
+ {
+ return RelativeVirtualAddressReader.Read<ImageExportDirectory>(exportTableDirectory.VirtualAddress);
+ }
+ }
+ return null;
+ }
+
+ private uint ReadPEHeaderOffset()
+ {
+ HasValidDosSignature.CheckThrowing();
+ return _headerReader.Read<uint>(PESignatureOffsetLocation);
+ }
+
+ private uint PEOptionalHeaderOffset
+ {
+ get { return _headerReader.SizeOf<ImageFileHeader>() + PEHeaderOffset + 0x4; }
+ }
+
+ public uint PEChecksumOffset
+ {
+ get { return PEOptionalHeaderOffset + 0x40; }
+ }
+
+ public uint CertificateTableOffset
+ {
+ get { return PEOptionalHeaderOffset + FileReader.SizeOf<ImageOptionalHeader>() + 0x20; }
+ }
+
+ private ImageFileHeader ReadFileHeader()
+ {
+ HasValidPESignature.CheckThrowing();
+ return _headerReader.Read<ImageFileHeader>(PEHeaderOffset + 0x4);
+ }
+
+ private ImageOptionalHeaderMagic ReadOptionalHeaderMagic()
+ {
+ ulong offset = PEOptionalHeaderOffset;
+ return _headerReader.Read<ImageOptionalHeaderMagic>(offset);
+ }
+
+ private Reader CreateFileReader()
+ {
+ OptionalHeaderMagic.IsMagicValid.CheckThrowing();
+ bool is64Bit = OptionalHeaderMagic.Magic == ImageMagic.Magic64;
+ return new Reader(_headerReader.DataSource, new LayoutManager().AddPETypes(is64Bit));
+ }
+
+ private ImageOptionalHeader ReadOptionalHeader()
+ {
+ ulong offset = PEOptionalHeaderOffset;
+ return FileReader.Read<ImageOptionalHeader>(offset);
+ }
+
+ private List<ImageDataDirectory> ReadImageDataDirectory()
+ {
+ ulong offset = PEOptionalHeaderOffset + FileReader.SizeOf<ImageOptionalHeader>();
+
+ ImageDataDirectory[] result = _headerReader.ReadArray<ImageDataDirectory>(offset, ImageDataDirectoryCount);
+ return new List<ImageDataDirectory>(result);
+ }
+
+ private List<ImageSectionHeader> ReadSectionHeaders()
+ {
+ ulong offset = PEOptionalHeaderOffset + FileHeader.SizeOfOptionalHeader;
+ List<ImageSectionHeader> result = new(_headerReader.ReadArray<ImageSectionHeader>(offset, FileHeader.NumberOfSections));
+ return result;
+ }
+
+ private IEnumerable<PEPdbRecord> ReadPdbInfo()
+ {
+ ImageDataDirectory imageDebugDirectory = ImageDataDirectory[(int)ImageDirectoryEntry.Debug];
+ uint count = imageDebugDirectory.Size / FileReader.SizeOf<ImageDebugDirectory>();
+ ImageDebugDirectory[] debugDirectories = RelativeVirtualAddressReader.ReadArray<ImageDebugDirectory>(imageDebugDirectory.VirtualAddress, count);
+
+ foreach (ImageDebugDirectory directory in debugDirectories)
+ {
+ if (directory.Type == ImageDebugType.Codeview)
+ {
+ ulong position = directory.AddressOfRawData;
+ CvInfoPdb70 pdb = RelativeVirtualAddressReader.Read<CvInfoPdb70>(ref position);
+ if (pdb.CvSignature == CvInfoPdb70.PDB70CvSignature)
+ {
+ bool isPortablePDB = directory.MinorVersion == ImageDebugDirectory.PortablePDBMinorVersion;
+ string fileName = RelativeVirtualAddressReader.Read<string>(position);
+ yield return new PEPdbRecord(isPortablePDB, fileName, new Guid(pdb.Signature), pdb.Age);
+ }
+ }
+ }
+ }
+
+ private IEnumerable<PdbChecksum> ReadPdbChecksum()
+ {
+ ImageDataDirectory imageDebugDirectory = ImageDataDirectory[(int)ImageDirectoryEntry.Debug];
+ uint count = imageDebugDirectory.Size / FileReader.SizeOf<ImageDebugDirectory>();
+ ImageDebugDirectory[] debugDirectories = RelativeVirtualAddressReader.ReadArray<ImageDebugDirectory>(imageDebugDirectory.VirtualAddress, count);
+
+ foreach (ImageDebugDirectory directory in debugDirectories)
+ {
+ if (directory.Type == ImageDebugType.PdbChecksum)
+ {
+ uint sizeOfData = directory.SizeOfData;
+ ulong position = directory.AddressOfRawData;
+ string algorithmName = RelativeVirtualAddressReader.Read<string>(position);
+ uint algorithmLength = (uint)algorithmName.Length;
+ uint length = sizeOfData - algorithmLength - 1; // -1 for null terminator
+ byte[] checksum = RelativeVirtualAddressReader.ReadArray<byte>(position + algorithmLength + 1 /* +1 for null terminator */, length);
+ yield return new PdbChecksum(algorithmName, checksum);
+ }
+ }
+ }
+
+ private IEnumerable<PEPerfMapRecord> ReadPerfMapV1Entries()
+ {
+ ImageDataDirectory imageDebugDirectory = ImageDataDirectory[(int)ImageDirectoryEntry.Debug];
+ uint count = imageDebugDirectory.Size / FileReader.SizeOf<ImageDebugDirectory>();
+ ImageDebugDirectory[] debugDirectories = RelativeVirtualAddressReader.ReadArray<ImageDebugDirectory>(imageDebugDirectory.VirtualAddress, count);
+
+ foreach (ImageDebugDirectory directory in debugDirectories)
+ {
+ if (directory.Type == ImageDebugType.PerfMap && directory.MajorVersion == 1 && directory.MinorVersion == 0)
+ {
+ ulong position = directory.AddressOfRawData;
+ PerfMapIdV1 perfmapEntryHeader = RelativeVirtualAddressReader.Read<PerfMapIdV1>(ref position);
+ if (perfmapEntryHeader.Magic == PerfMapIdV1.PerfMapEntryMagic)
+ {
+ string fileName = RelativeVirtualAddressReader.Read<string>(position);
+ yield return new PEPerfMapRecord(fileName, perfmapEntryHeader.Signature, perfmapEntryHeader.Version);
+ }
+ }
+ }
+ }
+
+ private const uint VersionResourceType = 16;
+ private const uint VersionResourceName = 1;
+ private const uint VersionResourceLanguage = 0x409;
+
+ private VsFixedFileInfo ReadVersionResource()
+ {
+ ImageResourceDataEntry dataEntry = GetResourceDataEntry(VersionResourceType, VersionResourceName, VersionResourceLanguage);
+ // If the version resource can't be found under the 0x409 language, try as language "neutral" (0)
+ dataEntry ??= GetResourceDataEntry(VersionResourceType, VersionResourceName, 0);
+ if (dataEntry != null)
+ {
+ VsVersionInfo info = RelativeVirtualAddressReader.Read<VsVersionInfo>(dataEntry.OffsetToData);
+ if (info.Value.Signature == VsFixedFileInfo.FixedFileInfoSignature)
+ {
+ return info.Value;
+ }
+ }
+ return null;
+ }
+
+ private ImageResourceDataEntry GetResourceDataEntry(uint type, uint name, uint language)
+ {
+ uint resourceSectionRva = ImageDataDirectory[(int)ImageDirectoryEntry.Resource].VirtualAddress;
+ ImageResourceDirectory resourceDirectory = RelativeVirtualAddressReader.Read<ImageResourceDirectory>(resourceSectionRva);
+
+ if (GetNextLevelResourceEntryRva(resourceDirectory, type, resourceSectionRva, out uint nameTableRva))
+ {
+ if (GetNextLevelResourceEntryRva(resourceDirectory, name, resourceSectionRva + nameTableRva, out uint langTableRva))
+ {
+ if (GetNextLevelResourceEntryRva(resourceDirectory, language, resourceSectionRva + langTableRva, out uint resourceDataEntryRva))
+ {
+ return RelativeVirtualAddressReader.Read<ImageResourceDataEntry>(resourceSectionRva + resourceDataEntryRva);
+ }
+ }
+ }
+ return null;
+ }
+
+ private bool GetNextLevelResourceEntryRva(ImageResourceDirectory resourceDirectory, uint id, uint rva, out uint nextLevelRva)
+ {
+ ushort numNameEntries = resourceDirectory.NumberOfNamedEntries;
+ ushort numIDEntries = resourceDirectory.NumberOfIdEntries;
+
+ if (numNameEntries == ushort.MaxValue)
+ {
+ numNameEntries = 0;
+ }
+
+ if (numIDEntries == ushort.MaxValue)
+ {
+ numIDEntries = 0;
+ }
+
+ uint directorySize = RelativeVirtualAddressReader.SizeOf<ImageResourceDirectory>();
+ uint entrySize = RelativeVirtualAddressReader.SizeOf<ImageResourceDirectoryEntry>();
+
+ for (ushort i = numNameEntries; i < numNameEntries + numIDEntries; i++)
+ {
+ ImageResourceDirectoryEntry entry = RelativeVirtualAddressReader.Read<ImageResourceDirectoryEntry>(rva + directorySize + (i * entrySize));
+ if (entry.Id == id)
+ {
+ nextLevelRva = entry.OffsetToData & 0x7FFFFFFF;
+ return true;
+ }
+ }
+
+ nextLevelRva = 0;
+ return false;
+ }
+
+ private Reader CreateVirtualAddressReader()
+ {
+ if (_isDataSourceVirtualAddressSpace)
+ {
+ return _fileReader.Value;
+ }
+ else
+ {
+ return _fileReader.Value.WithAddressSpace(new PEAddressSpace(_headerReader.DataSource, 0, Segments));
+ }
+ }
+
+ #region Validation Rules
+ public ValidationRule HasValidDosSignature
+ {
+ get
+ {
+ return new ValidationRule("PE file does not have valid DOS header", () =>
+ DosHeaderMagic == ExpectedDosHeaderMagic);
+ }
+ }
+
+ public ValidationRule HasValidPESignature
+ {
+ get
+ {
+ return new ValidationRule("PE file does not have a valid PE signature", () =>
+ PESignature == ExpectedPESignature);
+ }
+ }
+ #endregion
+ }
+
+ public class PEAddressSpace : IAddressSpace
+ {
+ private Lazy<ulong> _length;
+ private ReadOnlyCollection<ImageSectionHeader> _segments;
+ private ulong _baseAddress;
+ private IAddressSpace _addressSpace;
+
+ public ulong Length
+ {
+ get
+ {
+ return _length.Value;
+ }
+ }
+
+ public PEAddressSpace(IAddressSpace addressSpace, ulong baseAddress, ReadOnlyCollection<ImageSectionHeader> segments)
+ {
+ _length = new Lazy<ulong>(GetLength);
+ _segments = segments;
+ _baseAddress = baseAddress;
+ _addressSpace = addressSpace;
+ }
+
+ public uint Read(ulong position, byte[] buffer, uint bufferOffset, uint count)
+ {
+ ImageSectionHeader segment = _segments.Where(header => header.VirtualAddress <= position && position <= header.VirtualAddress + header.VirtualSize).FirstOrDefault();
+ if (segment == null)
+ {
+ return 0;
+ }
+ ulong offset = _baseAddress + position - segment.VirtualAddress + segment.PointerToRawData;
+ uint result = _addressSpace.Read(offset, buffer, bufferOffset, count);
+ return result;
+ }
+
+ private ulong GetLength()
+ {
+ return _segments.Max(seg => seg.VirtualAddress + seg.VirtualSize);
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+using System;
+
+namespace Microsoft.FileFormats.PE
+{
+ public sealed class PEPdbRecord
+ {
+ public bool IsPortablePDB { get; private set; }
+ public string Path { get; private set; }
+ public Guid Signature { get; private set; }
+ public int Age { get; private set; }
+
+ public PEPdbRecord(bool isPortablePDB, string path, Guid sig, int age)
+ {
+ IsPortablePDB = isPortablePDB;
+ Path = path;
+ Signature = sig;
+ Age = age;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+using System;
+
+namespace Microsoft.FileFormats.PE
+{
+ public sealed class PEPerfMapRecord
+ {
+ public string Path { get; private set; }
+ public byte[] Signature { get; private set; }
+ public uint Version { get; private set; }
+
+ public PEPerfMapRecord(string path, byte[] sig, uint version)
+ {
+ Path = path;
+ Signature = sig;
+ Version = version;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Linq;
+
+namespace Microsoft.FileFormats.PE
+{
+ public static class LayoutManagerExtensions
+ {
+ public static LayoutManager AddPETypes(this LayoutManager layouts, bool is64Bit)
+ {
+ return layouts
+ .AddPrimitives(false)
+ .AddEnumTypes()
+ .AddSizeT(is64Bit ? 8 : 4)
+ .AddNullTerminatedString()
+ .AddTStructTypes(is64Bit ? new string[] { "PE32+" } : new string[] { "PE32" });
+ }
+ }
+
+ /// <summary>
+ /// IMAGE_NT_OPTIONAL_HDR32_MAGIC/IMAGE_NT_OPTIONAL_HDR64_MAGIC values
+ /// </summary>
+ public enum ImageMagic : ushort
+ {
+ Magic32 = 0x10b,
+ Magic64 = 0x20b
+ }
+
+ /// <summary>
+ /// IMAGE_OPTIONAL_HEADER
+ /// </summary>
+ public class ImageOptionalHeaderMagic : TStruct
+ {
+ public ImageMagic Magic;
+
+ #region Validation Rules
+ public ValidationRule IsMagicValid
+ {
+ get
+ {
+ return new ValidationRule("PE Optional Header has invalid magic field", () => Enum.IsDefined(typeof(ImageMagic), Magic));
+ }
+ }
+ #endregion
+ }
+
+ /// <summary>
+ /// IMAGE_OPTIONAL_HEADER
+ /// </summary>
+ public class ImageOptionalHeader : ImageOptionalHeaderMagic
+ {
+ // Standard fields
+ public byte MajorLinkerVersion;
+ public byte MinorLinkerVersion;
+ public uint SizeOfCode;
+ public uint SizeOfInitializedData;
+ public uint SizeOfUninitializedData;
+ public uint RVAOfEntryPoint;
+ public uint BaseOfCode;
+ [If("PE32")]
+ public uint BaseOfData;
+
+ // NT additional fields
+ public SizeT ImageBase;
+ public uint SectionAlignment;
+ public uint FileAlignment;
+ public ushort MajorOperatingSystemVersion;
+ public ushort MinorOperatingSystemVersion;
+ public ushort MajorImageVersion;
+ public ushort MinorImageVersion;
+ public ushort MajorSubsystemVersion;
+ public ushort MinorSubsystemVersion;
+ public uint Win32VersionValue;
+ public uint SizeOfImage;
+ public uint SizeOfHeaders;
+ public uint CheckSum;
+ public ushort Subsystem;
+ public ushort DllCharacteristics;
+ public SizeT SizeOfStackReserve;
+ public SizeT SizeOfStackCommit;
+ public SizeT SizeOfHeapReserve;
+ public SizeT SizeOfHeapCommit;
+ public uint LoaderFlags;
+ public uint NumberOfRvaAndSizes;
+ }
+
+ /// <summary>
+ /// IMAGE_FILE_MACHINE_* values for ImageFileHeader.Machine
+ /// </summary>
+ public enum ImageFileMachine
+ {
+ Unknown = 0,
+ Amd64 = 0x8664, // AMD64 (K8)
+ I386 = 0x014c, // Intel 386.
+ Arm = 0x01c0, // ARM Little-Endian
+ Thumb = 0x01c2,
+ ArmNT = 0x01c4, // ARM Thumb-2 Little-Endian
+ Arm64 = 0xAA64
+ }
+
+ /// <summary>
+ /// Characteristics (IMAGE_FILE)
+ /// </summary>
+ [Flags]
+ public enum ImageFile : ushort
+ {
+ RelocsStripped = 0x0001,
+ ExecutableImage = 0x0002,
+ LargeAddressAware = 0x0020,
+ System = 0x1000,
+ Dll = 0x2000,
+ }
+
+ /// <summary>
+ /// IMAGE_FILE_HEADER struct
+ /// </summary>
+ public class ImageFileHeader : TStruct
+ {
+ public ushort Machine;
+ public ushort NumberOfSections;
+ public uint TimeDateStamp;
+ public uint PointerToSymbolTable;
+ public uint NumberOfSymbols;
+ public ushort SizeOfOptionalHeader;
+ public ushort Characteristics;
+ }
+
+ #region Section Header
+
+ /// <summary>
+ /// IMAGE_SECTION_HEADER
+ /// </summary>
+ public class ImageSectionHeader : TStruct
+ {
+ [ArraySize(8)]
+ public byte[] Name;
+ public uint VirtualSize;
+ public uint VirtualAddress;
+ public uint SizeOfRawData;
+ public uint PointerToRawData;
+ public uint PointerToRelocations;
+ public uint PointerToLinenumbers;
+ public ushort NumberOfRelocations;
+ public ushort NumberOfLinenumbers;
+ public uint Characteristics;
+ }
+
+ #endregion
+
+ #region Directories
+
+ /// <summary>
+ /// IMAGE_DIRECTORY_ENTRY_* defines
+ /// </summary>
+ public enum ImageDirectoryEntry
+ {
+ Export = 0,
+ Import = 1,
+ Resource = 2,
+ Exception = 3,
+ Certificates = 4,
+ BaseRelocation = 5,
+ Debug = 6,
+ Architecture = 7,
+ GlobalPointers = 8,
+ ThreadStorage = 9,
+ LoadConfiguration = 10,
+ BoundImport = 11,
+ ImportAddress = 12,
+ DelayImport = 13,
+ ComDescriptor = 14
+ }
+
+ /// <summary>
+ /// IMAGE_DATA_DIRECTORY struct
+ /// </summary>
+ public class ImageDataDirectory : TStruct
+ {
+ public uint VirtualAddress;
+ public uint Size;
+ }
+
+ #endregion
+
+ #region Debug Directory
+
+ /// <summary>
+ /// IMAGE_DEBUG_TYPE_* defines
+ /// </summary>
+ public enum ImageDebugType
+ {
+ Unknown = 0,
+ Coff = 1,
+ Codeview = 2,
+ Fpo = 3,
+ Misc = 4,
+ Bbt = 10,
+ Reproducible = 16,
+ EmbeddedPortablePdb = 17,
+ PdbChecksum = 19,
+ PerfMap = 21
+ };
+
+ /// <summary>
+ /// IMAGE_DEBUG_DIRECTORY struct
+ /// </summary>
+ public class ImageDebugDirectory : TStruct
+ {
+ public const ushort PortablePDBMinorVersion = 0x504d;
+
+ public uint Characteristics;
+ public uint TimeDateStamp;
+ public ushort MajorVersion;
+ public ushort MinorVersion;
+ public ImageDebugType Type;
+ public uint SizeOfData;
+ public uint AddressOfRawData;
+ public uint PointerToRawData;
+ };
+
+ public class CvInfoPdb70 : TStruct
+ {
+ public const int PDB70CvSignature = 0x53445352; // RSDS in ascii
+
+ public int CvSignature;
+ [ArraySize(16)]
+ public byte[] Signature;
+ public int Age;
+ }
+
+ public class PerfMapIdV1 : TStruct
+ {
+ public const int PerfMapEntryMagic = 0x4D523252; // R2RM in ascii
+
+ public int Magic;
+
+ [ArraySize(16)]
+ public byte[] Signature;
+ public uint Version;
+ }
+
+ #endregion
+
+ #region Resource Directory
+
+ /// <summary>
+ /// IMAGE_RESOURCE_DIRECTORY struct
+ /// </summary>
+ public class ImageResourceDirectory : TStruct
+ {
+ public uint Characteristics;
+ public uint TimeDateStamp;
+ public ushort MajorVersion;
+ public ushort MinorVersion;
+ public ushort NumberOfNamedEntries;
+ public ushort NumberOfIdEntries;
+ };
+
+ /// <summary>
+ /// IMAGE_RESOURCE_DIRECTORY_ENTRY for the resources by id
+ /// </summary>
+ public class ImageResourceDirectoryEntry : TStruct
+ {
+ // Resource id or name offset. Currently doesn't supported named entries/resources.
+ public uint Id;
+
+ // High bit 0. Address of a Resource Data entry (a leaf).
+ // High bit 1. The lower 31 bits are the address of another resource directory table (the next level down).
+ public uint OffsetToData;
+ }
+
+ /// <summary>
+ /// IMAGE_RESOURCE_DATA_ENTRY struct
+ /// </summary>
+ public class ImageResourceDataEntry : TStruct
+ {
+ public uint OffsetToData;
+ public uint Size;
+ public uint CodePage;
+ public uint Reserved;
+ }
+
+ /// <summary>
+ /// VS_FIXEDFILEINFO.FileFlags
+ /// </summary>
+ [Flags]
+ public enum FileInfoFlags : uint
+ {
+ Debug = 0x00000001,
+ SpecialBuild = 0x00000020,
+ }
+
+ /// <summary>
+ /// VS_FIXEDFILEINFO struct
+ /// </summary>
+ public class VsFixedFileInfo : TStruct
+ {
+ public const uint FixedFileInfoSignature = 0xFEEF04BD;
+
+ public uint Signature; // e.g. 0xfeef04bd
+ public uint StrucVersion; // e.g. 0x00000042 = "0.42"
+ public ushort FileVersionMinor;
+ public ushort FileVersionMajor;
+ public ushort FileVersionRevision;
+ public ushort FileVersionBuild;
+ public ushort ProductVersionMinor;
+ public ushort ProductVersionMajor;
+ public ushort ProductVersionRevision;
+ public ushort ProductVersionBuild;
+ public uint FileFlagsMask; // = 0x3F for version "0.42"
+ public FileInfoFlags FileFlags;
+ public uint FileOS; // e.g. VOS_DOS_WINDOWS16
+ public uint FileType; // e.g. VFT_DRIVER
+ public uint FileSubtype; // e.g. VFT2_DRV_KEYBOARD
+ public uint FileDateMS; // e.g. 0
+ public uint FileDateLS; // e.g. 0
+ }
+
+ /// <summary>
+ /// VS_VERSIONINFO struct
+ /// </summary>
+ public class VsVersionInfo : TStruct
+ {
+ public ushort Length;
+ public ushort ValueLength;
+ public ushort Type;
+ [ArraySize(16)]
+ public char[] Key;
+ public ushort Padding1;
+ public VsFixedFileInfo Value;
+ }
+
+ #endregion
+
+ #region IMAGE_EXPORT_DIRECTORY
+
+ /// <summary>
+ /// IMAGE_EXPORT_DIRECTORY struct
+ /// </summary>
+ public class ImageExportDirectory : TStruct
+ {
+ public uint Characteristics;
+ public uint TimeDateStamp;
+ public ushort MajorVersion;
+ public ushort MinorVersion;
+ public uint Name;
+ public uint Base;
+ public uint NumberOfFunctions;
+ public uint NumberOfNames;
+ public uint AddressOfFunctions; // RVA from base of image
+ public uint AddressOfNames; // RVA from base of image
+ public uint AddressOfNameOrdinals; // RVA from base of image
+ };
+
+ #endregion
+
+ /// <summary>
+ /// Pair of a checksum algorithm name (ex: "SHA256") and the bytes of the checksum.
+ /// </summary>
+ public class PdbChecksum : TStruct
+ {
+ public PdbChecksum(string algorithmName, byte[] checksum)
+ {
+ AlgorithmName = algorithmName;
+ Checksum = checksum;
+ }
+
+ public string AlgorithmName { get; }
+ public byte[] Checksum { get; }
+
+ public override string ToString()
+ {
+ return $"{AlgorithmName}:{ToHexString(Checksum)}";
+ }
+
+ /// <summary>
+ /// Convert an array of bytes to a lower case hex string.
+ /// </summary>
+ /// <param name="bytes">array of bytes</param>
+ /// <returns>hex string</returns>
+ public static string ToHexString(byte[] bytes)
+ {
+ if (bytes == null)
+ {
+ throw new ArgumentNullException(nameof(bytes));
+ }
+ return string.Concat(bytes.Select(b => b.ToString("x2")));
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.IO;
+using System.Text;
+
+namespace Microsoft.FileFormats.PerfMap
+{
+ public sealed class PerfMapFile
+ {
+ // See format of the perfmap file at https://github.com/dotnet/runtime/blob/main/docs/design/coreclr/botr/r2r-perfmap-format.md
+ private const int PerfMapV1SigLength = 16;
+
+ private const int PerfMapV1HeaderRecordCount = 5;
+
+ public const int MaxKnownPerfMapVersion = 1;
+
+ private const int HeaderRecordPseudoLength = 0;
+
+ private enum PerfMapPseudoRVAToken : uint
+ {
+ OutputSignature = 0xFFFFFFFF,
+ FormatVersion = 0xFFFFFFFE,
+ TargetOS = 0xFFFFFFFD,
+ TargetArchitecture = 0xFFFFFFFC,
+ TargetABI = 0xFFFFFFFB,
+ }
+
+ public enum PerfMapArchitectureToken : uint
+ {
+ Unknown = 0,
+ ARM = 1,
+ ARM64 = 2,
+ X64 = 3,
+ X86 = 4,
+ }
+
+ public enum PerfMapOSToken : uint
+ {
+ Unknown = 0,
+ Windows = 1,
+ Linux = 2,
+ OSX = 3,
+ FreeBSD = 4,
+ NetBSD = 5,
+ SunOS = 6,
+ }
+
+ public enum PerfMapAbiToken : uint
+ {
+ Unknown = 0,
+ Default = 1,
+ Armel = 2,
+ }
+
+ private readonly Stream _stream;
+ private readonly Lazy<PerfMapHeader> _header;
+
+ public PerfMapHeader Header { get => _header.Value; }
+
+ public bool IsValid => Header is not null;
+
+ public IEnumerable<PerfMapRecord> PerfRecords
+ {
+ get
+ {
+ ThrowIfInvalid();
+
+ if (Header.Version > MaxKnownPerfMapVersion)
+ {
+ throw new NotImplementedException($"Format version {Header.Version} unknown. Max known format is {MaxKnownPerfMapVersion}");
+ }
+ using StreamReader reader = new(_stream, Encoding.UTF8, false, 1024, leaveOpen: true);
+
+ // Skip over the header.
+ // For now this is V1, the length will need to be a lookup on the version.
+ for (int i = 0; i < PerfMapV1HeaderRecordCount; ++i)
+ {
+ _ = reader.ReadLine();
+ }
+ while (true)
+ {
+ PerfMapFile.PerfMapRecord cur = ReadRecord(reader);
+ if (cur is null)
+ {
+ yield break;
+ }
+ yield return cur;
+ }
+ }
+ }
+
+ private void ThrowIfInvalid()
+ {
+ if (!IsValid)
+ {
+ throw new BadInputFormatException("The PerfMap is not valid");
+ }
+ }
+
+ public PerfMapFile(Stream stream)
+ {
+ System.Diagnostics.Debug.Assert(stream.CanSeek);
+ _stream = stream;
+ _header = new Lazy<PerfMapHeader>(ReadHeader, System.Threading.LazyThreadSafetyMode.ExecutionAndPublication);
+ }
+
+ private PerfMapHeader ReadHeader()
+ {
+ static bool IsValidHeaderRecord(PerfMapPseudoRVAToken expectedToken, PerfMapRecord record)
+ => record is not null && (uint)expectedToken == record.Rva
+ && record.Length == HeaderRecordPseudoLength;
+
+ long prevPosition = _stream.Position;
+ try
+ {
+ _stream.Position = 0;
+ // Headers don't need much of a buffer.
+ using StreamReader reader = new(_stream, Encoding.UTF8, false, 256, leaveOpen: true);
+
+ PerfMapRecord sigRecord = ReadRecord(reader);
+ if (!IsValidHeaderRecord(PerfMapPseudoRVAToken.OutputSignature, sigRecord) ||
+ !Helpers.TryConvertHexStringToBytes(sigRecord.Name, out byte[] sigBytes) ||
+ sigBytes?.Length != PerfMapV1SigLength)
+ {
+ return null;
+ }
+ PerfMapRecord versionRecord = ReadRecord(reader);
+ if (!IsValidHeaderRecord(PerfMapPseudoRVAToken.FormatVersion, versionRecord) ||
+ !uint.TryParse(versionRecord.Name, out uint version))
+ {
+ return null;
+ }
+ PerfMapRecord osRecord = ReadRecord(reader);
+ if (!IsValidHeaderRecord(PerfMapPseudoRVAToken.TargetOS, osRecord) ||
+ !uint.TryParse(osRecord.Name, out uint os))
+ {
+ return null;
+ }
+ PerfMapRecord archRecord = ReadRecord(reader);
+ if (!IsValidHeaderRecord(PerfMapPseudoRVAToken.TargetArchitecture, archRecord) ||
+ !uint.TryParse(archRecord.Name, out uint arch))
+ {
+ return null;
+ }
+ PerfMapRecord abiRecord = ReadRecord(reader);
+ if (!IsValidHeaderRecord(PerfMapPseudoRVAToken.TargetABI, abiRecord) ||
+ !uint.TryParse(abiRecord.Name, out uint abi))
+ {
+ return null;
+ }
+ return new PerfMapHeader(sigBytes, version, os, arch, abi);
+ // Append as necessary as revisions get added here.
+ // We don't return null on a higher versioned heder than the max known as they are backwards compatible and they are not necessary for indexing.
+ }
+ catch (Exception ex) when (ex is BadInputFormatException || ex is EndOfStreamException)
+ {
+
+ }
+ finally
+ {
+ _stream.Position = prevPosition;
+ }
+ return null;
+ }
+
+ private static PerfMapRecord ReadRecord(StreamReader reader)
+ {
+ string[] segments = reader.ReadLine()?.Split();
+
+ if (segments is null)
+ {
+ return null;
+ }
+ if (segments.Length != 3)
+ {
+ throw new BadInputFormatException("Entry on perfmap record doesn't have 3 segments.");
+ }
+ if (!uint.TryParse(segments[0], NumberStyles.HexNumber, CultureInfo.InvariantCulture, out uint rva))
+ {
+ throw new BadInputFormatException("Record's RVA is not a valid hex unsigned int.");
+ }
+ if (!ushort.TryParse(segments[1], NumberStyles.HexNumber, CultureInfo.InvariantCulture, out ushort length))
+ {
+ throw new BadInputFormatException("Record's Length is not a valid hex unsigned int.");
+ }
+ return new PerfMapRecord(rva, length, segments[2]);
+ }
+
+ public sealed class PerfMapRecord
+ {
+ public PerfMapRecord(uint rva, ushort length, string entryName)
+ {
+ Rva = rva;
+ Length = length;
+ Name = entryName;
+ }
+
+ public uint Rva { get; }
+ public ushort Length { get; }
+ public string Name { get; }
+ }
+
+ public sealed class PerfMapHeader
+ {
+ public PerfMapHeader(byte[] signature, uint version, uint operatingSystem, uint architecture, uint abi)
+ {
+ Signature = signature;
+ Version = version;
+ OperatingSystem = (PerfMapOSToken) operatingSystem;
+ Architecture = (PerfMapArchitectureToken) architecture;
+ Abi = (PerfMapAbiToken) abi;
+ }
+
+ public byte[] Signature { get; }
+ public uint Version { get; }
+ public PerfMapOSToken OperatingSystem { get; }
+ public PerfMapArchitectureToken Architecture { get; }
+ public PerfMapAbiToken Abi { get; }
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Linq;
+using System.Reflection;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ /// <summary>
+ /// A pointer layout that can create pointers from integral storage types
+ /// </summary>
+ public class PointerLayout : LayoutBase
+ {
+ protected readonly ILayout _storageLayout;
+ private readonly LayoutManager _layoutManager;
+ private readonly Type _targetType;
+ private ILayout _targetLayout;
+
+ public ILayout TargetLayout => _targetLayout ??= _layoutManager.GetLayout(_targetType);
+
+ public PointerLayout(LayoutManager layoutManager, Type pointerType, ILayout storageLayout, Type targetType) :
+ base(pointerType, storageLayout.Size, storageLayout.NaturalAlignment)
+ {
+ _layoutManager = layoutManager;
+ _storageLayout = storageLayout;
+ _targetType = targetType;
+ }
+ }
+
+ /// <summary>
+ /// A pointer layout that can create pointers from the System.UInt64 storage type
+ /// </summary>
+ public class UInt64PointerLayout : PointerLayout
+ {
+ public UInt64PointerLayout(LayoutManager layoutManager, Type pointerType, ILayout storageLayout, Type targetType) :
+ base(layoutManager, pointerType, storageLayout, targetType)
+ {
+ if (storageLayout.Type != typeof(ulong))
+ {
+ throw new ArgumentException("storageLayout must have System.UInt64 type");
+ }
+ }
+
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ ulong value = (ulong)_storageLayout.Read(dataSource, position);
+ Pointer p = (Pointer)Activator.CreateInstance(Type);
+ p.Init(TargetLayout, value);
+ return p;
+ }
+ }
+
+ /// <summary>
+ /// A pointer layout that can create pointers from the System.UInt32 storage type
+ /// </summary>
+ public class UInt32PointerLayout : PointerLayout
+ {
+ public UInt32PointerLayout(LayoutManager layoutManager, Type pointerType, ILayout storageLayout, Type targetType) :
+ base(layoutManager, pointerType, storageLayout, targetType)
+ {
+ if (storageLayout.Type != typeof(uint))
+ {
+ throw new ArgumentException("storageLayout must have System.UInt32 type");
+ }
+ }
+
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ ulong value = (uint)_storageLayout.Read(dataSource, position);
+ Pointer p = (Pointer)Activator.CreateInstance(Type);
+ p.Init(TargetLayout, value);
+ return p;
+ }
+ }
+
+ /// <summary>
+ /// A pointer layout that can create pointers from the SizeT storage type
+ /// </summary>
+ public class SizeTPointerLayout : PointerLayout
+ {
+ public SizeTPointerLayout(LayoutManager layoutManager, Type pointerType, ILayout storageLayout, Type targetType) :
+ base(layoutManager, pointerType, storageLayout, targetType)
+ {
+ if (storageLayout.Type != typeof(SizeT))
+ {
+ throw new ArgumentException("storageLayout must have SizeT type");
+ }
+ }
+
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ ulong value = (SizeT)_storageLayout.Read(dataSource, position);
+ Pointer p = (Pointer)Activator.CreateInstance(Type);
+ p.Init(TargetLayout, value);
+ return p;
+ }
+ }
+
+ public class Pointer
+ {
+ public ulong Value;
+ public bool IsNull
+ {
+ get { return Value == 0; }
+ }
+
+ public override string ToString()
+ {
+ return "0x" + Value.ToString("x");
+ }
+
+ public static implicit operator ulong (Pointer instance)
+ {
+ return instance.Value;
+ }
+
+ internal void Init(ILayout targetLayout, ulong value)
+ {
+ _targetLayout = targetLayout;
+ Value = value;
+ }
+
+ protected ILayout _targetLayout;
+ }
+
+ /// <summary>
+ /// A pointer that can be dereferenced to produce another object
+ /// </summary>
+ /// <typeparam name="TargetType">The type of object that is produced by dereferencing the pointer</typeparam>
+ /// <typeparam name="StorageType">The type that determines how the pointer's underlying address value is parsed</typeparam>
+ public class Pointer<TargetType, StorageType> : Pointer
+ {
+ /// <summary>
+ /// Read an object of _TargetType_ from the _addressSpace_
+ /// </summary>
+ public TargetType Dereference(IAddressSpace addressSpace)
+ {
+ return Element(addressSpace, 0);
+ }
+
+ /// <summary>
+ /// Read the array element _index_ from an array in _addressSpace_
+ /// </summary>
+ public TargetType Element(IAddressSpace addressSpace, uint index)
+ {
+ if (Value != 0)
+ {
+ return (TargetType)_targetLayout.Read(addressSpace, Value + index * _targetLayout.Size);
+ }
+ return default;
+ }
+ }
+
+ public static partial class LayoutManagerExtensions
+ {
+ /// <summary>
+ /// Adds support for reading types derived from Pointer<,>
+ /// </summary>
+ public static LayoutManager AddPointerTypes(this LayoutManager layouts)
+ {
+ layouts.AddLayoutProvider(GetPointerLayout);
+ return layouts;
+ }
+
+ private static ILayout GetPointerLayout(Type pointerType, LayoutManager layoutManager)
+ {
+ if (!typeof(Pointer).GetTypeInfo().IsAssignableFrom(pointerType))
+ {
+ return null;
+ }
+ Type curPointerType = pointerType;
+ TypeInfo genericPointerTypeInfo = null;
+ while (curPointerType != typeof(Pointer))
+ {
+ TypeInfo curPointerTypeInfo = curPointerType.GetTypeInfo();
+ if (curPointerTypeInfo.IsGenericType && curPointerTypeInfo.GetGenericTypeDefinition() == typeof(Pointer<,>))
+ {
+ genericPointerTypeInfo = curPointerTypeInfo;
+ break;
+ }
+ curPointerType = curPointerTypeInfo.BaseType;
+ }
+ if (genericPointerTypeInfo == null)
+ {
+ throw new LayoutException("Pointer types must be derived from Pointer<,,>");
+ }
+ Type targetType = genericPointerTypeInfo.GetGenericArguments()[0];
+ Type storageType = genericPointerTypeInfo.GetGenericArguments()[1];
+ ILayout storageLayout = layoutManager.GetLayout(storageType);
+
+ // Unfortunately the storageLayout.Read returns a boxed object that can't be
+ // casted to a ulong without first being unboxed. These three Pointer layout
+ // types are identical other than unboxing to a different type. Generics
+ // doesn't work, there is no constraint that ensures the type parameter defines
+ // a casting operator to ulong. Specifying a Func<object,ulong> parameter
+ // would work, but I opted to write each class separately so that we don't
+ // pay the cost of an extra delegate invocation for each pointer read. It
+ // may be premature optimization, but the complexity of it should be relatively
+ // constrained within this file at least.
+
+ if (storageLayout.Type == typeof(SizeT))
+ {
+ return new SizeTPointerLayout(layoutManager, pointerType, storageLayout, targetType);
+ }
+ else if (storageLayout.Type == typeof(ulong))
+ {
+ return new UInt64PointerLayout(layoutManager, pointerType, storageLayout, targetType);
+ }
+ else if (storageLayout.Type == typeof(uint))
+ {
+ return new UInt32PointerLayout(layoutManager, pointerType, storageLayout, targetType);
+ }
+ else
+ {
+ throw new LayoutException("Pointer types must have a storage type of SizeT, ulong, or uint");
+ }
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ public abstract class PrimitiveTypeLayout : LayoutBase
+ {
+ public PrimitiveTypeLayout(Type type, bool isBigEndian, uint size) : base(type, size)
+ {
+ IsBigEndian = isBigEndian;
+ }
+ public bool IsBigEndian { get; private set; }
+ }
+
+ /// <summary>
+ /// TypeParser for System.Int8
+ /// </summary>
+ public class BoolLayout : PrimitiveTypeLayout
+ {
+ public BoolLayout(Type type, bool isBigEndian) : base(type, isBigEndian, 1) { }
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ byte[] buffer = dataSource.Read(position, 1);
+ return buffer[0] != 0;
+ }
+ }
+
+ /// <summary>
+ /// TypeParser for System.Int8
+ /// </summary>
+ public class Int8Layout : PrimitiveTypeLayout
+ {
+ public Int8Layout(bool isBigEndian) : this(typeof(sbyte), isBigEndian) { }
+ public Int8Layout(Type type, bool isBigEndian) : base(type, isBigEndian, 1) { }
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ byte[] buffer = dataSource.Read(position, 1);
+ return unchecked((sbyte)buffer[0]);
+ }
+ }
+
+ /// <summary>
+ /// TypeParser for System.UInt8
+ /// </summary>
+ public class UInt8Layout : PrimitiveTypeLayout
+ {
+ public UInt8Layout(bool isBigEndian) : this(typeof(byte), isBigEndian) { }
+ public UInt8Layout(Type type, bool isBigEndian) : base(type, isBigEndian, 1) { }
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ byte[] buffer = dataSource.Read(position, 1);
+ return buffer[0];
+ }
+ }
+
+ /// <summary>
+ /// TypeParser for System.Char.
+ /// </summary>
+ public class CharLayout : PrimitiveTypeLayout
+ {
+ public CharLayout(bool isBigEndian) : this(typeof(char), isBigEndian) { }
+ public CharLayout(Type type, bool isBigEndian) : base(type, isBigEndian, 2) { }
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ byte[] buffer = dataSource.Read(position, 2);
+ if (IsBigEndian == BitConverter.IsLittleEndian)
+ {
+ Array.Reverse(buffer);
+ }
+ return BitConverter.ToChar(buffer, 0);
+ }
+ }
+
+ /// <summary>
+ /// TypeParser for System.Int16
+ /// </summary>
+ public class Int16Layout : PrimitiveTypeLayout
+ {
+ public Int16Layout(bool isBigEndian) : this(typeof(short), isBigEndian) { }
+ public Int16Layout(Type type, bool isBigEndian) : base(type, isBigEndian, 2) { }
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ byte[] buffer = dataSource.Read(position, 2);
+ if (IsBigEndian == BitConverter.IsLittleEndian)
+ {
+ Array.Reverse(buffer);
+ }
+ return BitConverter.ToInt16(buffer, 0);
+ }
+ }
+
+ /// <summary>
+ /// TypeParser for System.UInt16
+ /// </summary>
+ public class UInt16Layout : PrimitiveTypeLayout
+ {
+ public UInt16Layout(bool isBigEndian) : this(typeof(ushort), isBigEndian) { }
+ public UInt16Layout(Type type, bool isBigEndian) : base(type, isBigEndian, 2) { }
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ byte[] buffer = dataSource.Read(position, 2);
+ if (IsBigEndian == BitConverter.IsLittleEndian)
+ {
+ Array.Reverse(buffer);
+ }
+ return BitConverter.ToUInt16(buffer, 0);
+ }
+ }
+
+ /// <summary>
+ /// TypeParser for System.Int32
+ /// </summary>
+ public class Int32Layout : PrimitiveTypeLayout
+ {
+ public Int32Layout(bool isBigEndian) : this(typeof(int), isBigEndian) { }
+ public Int32Layout(Type type, bool isBigEndian) : base(type, isBigEndian, 4) { }
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ byte[] buffer = dataSource.Read(position, 4);
+ if (IsBigEndian == BitConverter.IsLittleEndian)
+ {
+ Array.Reverse(buffer);
+ }
+ return BitConverter.ToInt32(buffer, 0);
+ }
+ }
+
+ /// <summary>
+ /// TypeParser for System.UInt32
+ /// </summary>
+ public class UInt32Layout : PrimitiveTypeLayout
+ {
+ public UInt32Layout(bool isBigEndian) : this(typeof(uint), isBigEndian) { }
+ public UInt32Layout(Type type, bool isBigEndian) : base(type, isBigEndian, 4) { }
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ byte[] buffer = dataSource.Read(position, 4);
+ if (IsBigEndian == BitConverter.IsLittleEndian)
+ {
+ Array.Reverse(buffer);
+ }
+ return BitConverter.ToUInt32(buffer, 0);
+ }
+ }
+
+ /// <summary>
+ /// TypeParser for System.Single
+ /// </summary>
+ public class SingleLayout : PrimitiveTypeLayout
+ {
+ public SingleLayout(bool isBigEndian) : this(typeof(float), isBigEndian) { }
+ public SingleLayout(Type type, bool isBigEndian) : base(type, isBigEndian, 4) { }
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ byte[] buffer = dataSource.Read(position, 4);
+ if (IsBigEndian == BitConverter.IsLittleEndian)
+ {
+ byte temp = buffer[0];
+ buffer[0] = buffer[3];
+ buffer[3] = temp;
+ temp = buffer[1];
+ buffer[1] = buffer[2];
+ buffer[2] = temp;
+ }
+ return BitConverter.ToSingle(buffer, 0);
+ }
+ }
+
+ /// <summary>
+ /// TypeParser for System.Int64
+ /// </summary>
+ public class Int64Layout : PrimitiveTypeLayout
+ {
+ public Int64Layout(bool isBigEndian) : this(typeof(long), isBigEndian) { }
+ public Int64Layout(Type type, bool isBigEndian) : base(type, isBigEndian, 8) { }
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ byte[] buffer = dataSource.Read(position, 8);
+ if (IsBigEndian == BitConverter.IsLittleEndian)
+ {
+ Array.Reverse(buffer);
+ }
+ return BitConverter.ToInt64(buffer, 0);
+ }
+ }
+
+ /// <summary>
+ /// TypeParser for System.UInt64
+ /// </summary>
+ public class UInt64Layout : PrimitiveTypeLayout
+ {
+ public UInt64Layout(bool isBigEndian) : this(typeof(ulong), isBigEndian) { }
+ public UInt64Layout(Type type, bool isBigEndian) : base(type, isBigEndian, 8) { }
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ byte[] buffer = dataSource.Read(position, 8);
+ if (IsBigEndian == BitConverter.IsLittleEndian)
+ {
+ Array.Reverse(buffer);
+ }
+ return BitConverter.ToUInt64(buffer, 0);
+ }
+ }
+
+ /// <summary>
+ /// TypeParser for System.Double
+ /// </summary>
+ public class DoubleLayout : PrimitiveTypeLayout
+ {
+ public DoubleLayout(bool isBigEndian) : this(typeof(double), isBigEndian) { }
+ public DoubleLayout(Type type, bool isBigEndian) : base(type, isBigEndian, 8) { }
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ byte[] buffer = dataSource.Read(position, 4);
+ if (IsBigEndian == BitConverter.IsLittleEndian)
+ {
+ byte temp = buffer[0];
+ buffer[0] = buffer[7];
+ buffer[7] = temp;
+ temp = buffer[1];
+ buffer[1] = buffer[6];
+ buffer[6] = temp;
+ temp = buffer[2];
+ buffer[2] = buffer[5];
+ buffer[5] = temp;
+ temp = buffer[3];
+ buffer[3] = buffer[4];
+ buffer[4] = temp;
+ }
+ return BitConverter.ToDouble(buffer, 0);
+ }
+ }
+
+ public static partial class LayoutManagerExtensions
+ {
+ /// <summary>
+ /// Adds supports for reading bool, sbyte, byte, char, short, ushort, int, uint, long, ulong, float, and double
+ /// </summary>
+ /// <param name="layouts">The layout manager that will hold the new layout</param>
+ /// <param name="isBigEndian">True if the primitives should be read in big endian byte order, otherwise little endian</param>
+ public static LayoutManager AddPrimitives(this LayoutManager layouts, bool isBigEndian = false)
+ {
+ layouts.AddLayout(new BoolLayout(typeof(bool), isBigEndian));
+ layouts.AddLayout(new Int8Layout(typeof(sbyte), isBigEndian));
+ layouts.AddLayout(new UInt8Layout(typeof(byte), isBigEndian));
+ layouts.AddLayout(new CharLayout(typeof(char), isBigEndian));
+ layouts.AddLayout(new Int16Layout(typeof(short), isBigEndian));
+ layouts.AddLayout(new UInt16Layout(typeof(ushort), isBigEndian));
+ layouts.AddLayout(new Int32Layout(typeof(int), isBigEndian));
+ layouts.AddLayout(new UInt32Layout(typeof(uint), isBigEndian));
+ layouts.AddLayout(new Int64Layout(typeof(long), isBigEndian));
+ layouts.AddLayout(new UInt64Layout(typeof(ulong), isBigEndian));
+ layouts.AddLayout(new SingleLayout(typeof(float), isBigEndian));
+ layouts.AddLayout(new DoubleLayout(typeof(double), isBigEndian));
+ return layouts;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+
+namespace Microsoft.FileFormats
+{
+ public class Reader
+ {
+ public Reader(IAddressSpace dataSource, bool isBigEndian = false) :
+ this(dataSource, new LayoutManager().AddPrimitives(isBigEndian).AddEnumTypes().AddTStructTypes())
+ { }
+
+ public Reader(IAddressSpace dataSource, LayoutManager layoutManager)
+ {
+ DataSource = dataSource;
+ LayoutManager = layoutManager;
+ }
+
+ public LayoutManager LayoutManager { get; private set; }
+ public IAddressSpace DataSource { get; private set; }
+
+
+ public T[] ReadArray<T>(ulong position, uint elementCount)
+ {
+ return (T[])LayoutManager.GetArrayLayout<T[]>(elementCount).Read(DataSource, position);
+ }
+
+ public T[] ReadArray<T>(ref ulong position, uint elementCount)
+ {
+ uint bytesRead;
+ T[] ret = (T[])LayoutManager.GetArrayLayout<T[]>(elementCount).Read(DataSource, position, out bytesRead);
+ position += bytesRead;
+ return ret;
+ }
+
+ public bool TryRead<T>(ulong position, out T value)
+ {
+ if (DataSource.Length > (position + SizeOf<T>()))
+ {
+ value = Read<T>(position);
+ return true;
+ }
+ value = default(T);
+ return false;
+ }
+
+ public T Read<T>(ulong position)
+ {
+ return (T)LayoutManager.GetLayout<T>().Read(DataSource, position);
+ }
+
+ public T Read<T>(ref ulong position)
+ {
+ uint bytesRead;
+ T ret = (T)LayoutManager.GetLayout<T>().Read(DataSource, position, out bytesRead);
+ position += bytesRead;
+ return ret;
+ }
+
+ public uint Read(ulong position, byte[] buffer, uint bufferOffset, uint count)
+ {
+ return DataSource.Read(position, buffer, bufferOffset, count);
+ }
+
+ public byte[] Read(ulong position, uint count)
+ {
+ return DataSource.Read(position, count);
+ }
+
+ public byte[] Read(ref ulong position, uint count)
+ {
+ byte[] ret = DataSource.Read(position, count);
+ position += count;
+ return ret;
+ }
+
+ public ulong Length { get { return DataSource.Length; } }
+
+ public uint SizeOf<T>()
+ {
+ return LayoutManager.GetLayout<T>().Size;
+ }
+
+ public Reader WithRelativeAddressSpace(ulong startOffset, ulong length)
+ {
+ return WithAddressSpace(new RelativeAddressSpace(DataSource, startOffset, length));
+ }
+
+ public Reader WithRelativeAddressSpace(ulong startOffset, ulong length, long baseToRelativeShift)
+ {
+ return WithAddressSpace(new RelativeAddressSpace(DataSource, startOffset, length, baseToRelativeShift));
+ }
+
+ public Reader WithAddressSpace(IAddressSpace addressSpace)
+ {
+ return new Reader(addressSpace, LayoutManager);
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ /// <summary>
+ /// An integral type that can be configured to parse as a 4 byte uint or 8 byte ulong
+ /// </summary>
+ public struct SizeT
+ {
+ private ulong _value;
+ internal SizeT(ulong value)
+ {
+ _value = value;
+ }
+
+ public static implicit operator ulong (SizeT instance)
+ {
+ return instance._value;
+ }
+
+ public static explicit operator long (SizeT instance)
+ {
+ return (long)instance._value;
+ }
+
+ public static explicit operator uint (SizeT instance)
+ {
+ return (uint)instance._value;
+ }
+
+ public override string ToString()
+ {
+ return "0x" + _value.ToString("x");
+ }
+ }
+
+ public class UInt64SizeTLayout : LayoutBase
+ {
+ private ILayout _storageLayout;
+ public UInt64SizeTLayout(ILayout storageLayout) : base(typeof(SizeT), storageLayout.Size)
+ {
+ if (storageLayout.Type != typeof(ulong))
+ {
+ throw new ArgumentException("storageLayout must be for the System.UInt64 type");
+ }
+ _storageLayout = storageLayout;
+ }
+
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ return new SizeT((ulong)_storageLayout.Read(dataSource, position));
+ }
+ }
+
+ public class UInt32SizeTLayout : LayoutBase
+ {
+ private ILayout _storageLayout;
+ public UInt32SizeTLayout(ILayout storageLayout) : base(typeof(SizeT), storageLayout.Size)
+ {
+ if (storageLayout.Type != typeof(uint))
+ {
+ throw new ArgumentException("storageLayout must be for the System.UInt32 type");
+ }
+ _storageLayout = storageLayout;
+ }
+
+ public override object Read(IAddressSpace dataSource, ulong position)
+ {
+ return new SizeT((uint)_storageLayout.Read(dataSource, position));
+ }
+ }
+
+ public static partial class LayoutManagerExtensions
+ {
+ /// <summary>
+ /// Adds support for parsing the SizeT type
+ /// </summary>
+ /// <param name="size">The number of bytes that should be parsed for SizeT, either 4 or 8</param>
+ /// <param name="layouts">The layout manager that will hold the new layout</param>
+ /// <remarks>
+ /// SizeT reuses the existing parsing logic for either uint or ulong depending on size. The ILayoutManager
+ /// is expected to already have the relevant type's layout defined before calling this method.
+ /// </remarks>
+ public static LayoutManager AddSizeT(this LayoutManager layouts, int size)
+ {
+ if (size == 4)
+ {
+ layouts.AddLayout(new UInt32SizeTLayout(layouts.GetLayout<uint>()));
+ }
+ else if (size == 8)
+ {
+ layouts.AddLayout(new UInt64SizeTLayout(layouts.GetLayout<ulong>()));
+ }
+ else
+ {
+ throw new ArgumentException("Size must be 4 or 8");
+ }
+ return layouts;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ /// <summary>
+ /// Creates an address space that reads from a stream.
+ /// </summary>
+ public sealed class StreamAddressSpace : IAddressSpace, IDisposable
+ {
+ private Stream _stream;
+
+ public StreamAddressSpace(Stream stream)
+ {
+ System.Diagnostics.Debug.Assert(stream.CanSeek);
+ _stream = stream;
+ Length = (ulong)stream.Length;
+ }
+
+ /// <summary>
+ /// The upper bound (non-inclusive) of readable addresses
+ /// </summary>
+ public ulong Length { get; private set; }
+
+ /// <summary>
+ /// Reads a range of bytes from the address space
+ /// </summary>
+ /// <param name="position">The position in the address space to begin reading from</param>
+ /// <param name="buffer">The buffer that will receive the bytes that are read</param>
+ /// <param name="bufferOffset">The offset in the output buffer to begin writing the bytes</param>
+ /// <param name="count">The number of bytes to read into the buffer</param>
+ /// <returns>The number of bytes read</returns>
+ public uint Read(ulong position, byte[] buffer, uint bufferOffset, uint count)
+ {
+ if (_stream is null)
+ {
+ throw new ObjectDisposedException(nameof(_stream), "StreamAddressSpace instance has been disposed");
+ }
+ if (position + count > Length)
+ {
+ throw new BadInputFormatException("Unexpected end of data: Expected " + count + " bytes.");
+ }
+ _stream.Position = (long)position;
+ return (uint)_stream.Read(buffer, (int)bufferOffset, (int)count);
+ }
+
+ public void Dispose()
+ {
+ _stream?.Dispose();
+ _stream = null;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Reflection;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ /// <summary>
+ /// A TStruct acts as a target-independent description of a structured sequence of bytes in
+ /// the input being parsed.
+ /// </summary>
+ /// <remarks>
+ /// TStructs primarily declare public instance fields. The type of each field must be a type
+ /// for which a ILayout exists within the LayoutManager used to read it.
+ ///
+ /// Although a LayoutManager isn't required to have any particular types, it is often configured to support
+ /// at least these types:
+ ///
+ /// Byte/SByte/[U]Int[16/32/64] - One of the fixed-size integral types. (In particular, *not* IntPtr)
+ /// Enum - Any enum whose underlying type is one of the fixed-size integral type.
+ /// TStruct - Another TStruct. Note that this describes a *nested* struct, not a pointer
+ /// to another struct.
+ ///
+ /// Binding to a LayoutManager object:
+ /// --------------------------
+ /// A TStruct is not actually parsable until a LayoutManager produces an ILayout for it. The LayoutManager
+ /// provides the extra information (e.g. pointer size) that permit the parser to compute the final
+ /// offsets and size of the TStruct fields.
+ ///
+ /// Non-instance fields:
+ /// --------------------
+ /// TStructs can contain methods, static or private fields and even nested classes if convenient. The parsing code
+ /// ignores them.
+ ///
+ /// </remarks>
+ public abstract class TStruct
+ {
+ }
+
+ [AttributeUsage(AttributeTargets.Class)]
+ public class PackAttribute : Attribute
+ {
+ public uint Pack { get; private set; }
+
+ public PackAttribute(uint pack)
+ {
+ Pack = pack;
+ }
+ }
+
+ /// <summary>
+ /// TLayouts expose one of these for each field that's mapped to the input
+ /// </summary>
+ public sealed class TField : IField
+ {
+ public TField(FieldInfo fieldInfo, ILayout layout, uint offset)
+ {
+ FieldInfo = fieldInfo;
+ Layout = layout;
+ Offset = offset;
+ }
+
+ public FieldInfo FieldInfo { get; private set; }
+ public uint Offset { get; private set; }
+ public uint Size { get { return Layout.Size; } }
+ public uint NaturalAlignment { get { return Layout.NaturalAlignment; } }
+ public ILayout Layout { get; private set; }
+ public ILayout DeclaringLayout { get; set; }
+ public string Name { get { return FieldInfo.Name; } }
+
+ public object GetValue(TStruct tStruct)
+ {
+ return FieldInfo.GetValue(tStruct);
+ }
+
+ public override string ToString()
+ {
+ return DeclaringLayout.Type.FullName + "." + FieldInfo.Name + " [+0x" + Offset.ToString("x") + "]";
+ }
+
+ public void SetValue(TStruct tStruct, object newValue)
+ {
+ FieldInfo.SetValue(tStruct, newValue);
+ }
+ }
+
+ /// <summary>
+ /// The layout for a TStruct derived type
+ /// </summary>
+ public class TLayout : LayoutBase
+ {
+ public TLayout(Type type, uint size, uint naturalAlignment, uint sizeAsBaseType, IField[] fields) :
+ base(type, size, naturalAlignment, sizeAsBaseType, fields)
+ { }
+
+ public override object Read(IAddressSpace dataTarget, ulong position)
+ {
+ TStruct blank = (TStruct)Activator.CreateInstance(Type);
+ foreach (IField field in Fields)
+ {
+ object fieldValue = field.Layout.Read(dataTarget, position + field.Offset);
+ field.SetValue(blank, fieldValue);
+ }
+ return blank;
+ }
+ }
+
+ public static partial class LayoutManagerExtensions
+ {
+ /// <summary>
+ /// Adds support for parsing types derived from TStruct. All the field types used within the TStruct types
+ /// must also have layouts available from the LayoutManager.
+ /// </summary>
+ public static LayoutManager AddTStructTypes(this LayoutManager layouts)
+ {
+ return AddTStructTypes(layouts, null);
+ }
+
+ /// <summary>
+ /// Adds support for parsing types derived from TStruct. All the field types used within the TStruct types
+ /// must also have layouts available from the LayoutManager.
+ /// </summary>
+ /// <param name="layouts">The layout manager that will hold the new layout</param>
+ /// <param name="enabledDefines">
+ /// The set of defines that can be used to enabled optional fields decorated with the IfAttribute
+ /// </param>
+ public static LayoutManager AddTStructTypes(this LayoutManager layouts, IEnumerable<string> enabledDefines)
+ {
+ return AddReflectionTypes(layouts, enabledDefines, typeof(TStruct));
+ }
+
+ /// <summary>
+ /// Adds support for parsing types derived from _requiredBaseType_ by using reflection to interpret their fields.
+ /// All field types used within these types must also have layouts available from the LayoutManager.
+ /// </summary>
+ /// <param name="layouts"></param>
+ /// <param name="enabledDefines">
+ /// The set of defines that can be used to enabled optional fields decorated with the IfAttribute
+ /// </param>
+ /// <param name="requiredBaseType"></param>
+ public static LayoutManager AddReflectionTypes(this LayoutManager layouts, IEnumerable<string> enabledDefines, Type requiredBaseType)
+ {
+ return layouts.AddReflectionTypes(enabledDefines, typeFilter: (type) => requiredBaseType.GetTypeInfo().IsAssignableFrom(type));
+ }
+
+ /// <summary>
+ /// Adds support for parsing types filtered by typeFilter from by using reflection to interpret their fields.
+ /// All field types used within these types must also have layouts available from the LayoutManager.
+ /// </summary>
+ /// <param name="layouts"></param>
+ /// <param name="enabledDefines">
+ /// The set of defines that can be used to enabled optional fields decorated with the IfAttribute
+ /// </param>
+ /// <param name="typeFilter">return true if reflection should be used to layout the type</param>
+ public static LayoutManager AddReflectionTypes(this LayoutManager layouts, IEnumerable<string> enabledDefines, Func<Type, bool> typeFilter)
+ {
+ layouts.AddLayoutProvider((type, layoutManager) =>
+ {
+ if (!typeFilter(type))
+ {
+ return null;
+ }
+ return GetTStructLayout(type, layoutManager, enabledDefines);
+ });
+ return layouts;
+ }
+
+ private static ILayout GetTStructLayout(Type tStructType, LayoutManager layoutManager, IEnumerable<string> enabledDefines)
+ {
+ enabledDefines ??= Array.Empty<string>();
+
+ TypeInfo typeInfo = tStructType.GetTypeInfo();
+
+ PackAttribute pack = typeInfo.GetCustomAttributes().Where(attr => attr is PackAttribute).Cast<PackAttribute>().SingleOrDefault();
+
+ FieldInfo[] reflectionFields = typeInfo.GetFields(BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic);
+ reflectionFields = reflectionFields.OrderBy(f => f.MetadataToken).ToArray();
+ reflectionFields = reflectionFields.Where(f => !f.DeclaringType.Equals(typeof(TStruct))).ToArray();
+ reflectionFields = reflectionFields.Where(f => IsFieldIncludedInDefines(f, enabledDefines)).ToArray();
+ TField[] tFields = new TField[reflectionFields.Length];
+
+ uint alignCeiling = pack?.Pack ?? 8;
+ uint biggestAlignmentSoFar = 1;
+ uint curOffset = 0;
+
+ ILayout parentLayout = null;
+ Type baseType = typeInfo.BaseType;
+ if (!baseType.Equals(typeof(TStruct)))
+ {
+ // Treat base type as first member.
+ parentLayout = layoutManager.GetLayout(baseType);
+ uint align = Math.Min(parentLayout.NaturalAlignment, alignCeiling);
+ biggestAlignmentSoFar = Math.Max(biggestAlignmentSoFar, align);
+ curOffset += parentLayout.SizeAsBaseType;
+ }
+
+ // build the field list
+ for (int i = 0; i < reflectionFields.Length; i++)
+ {
+ ILayout fieldLayout = GetFieldLayout(reflectionFields[i], layoutManager);
+ uint fieldSize = fieldLayout.Size;
+ uint align = fieldLayout.NaturalAlignment;
+ align = Math.Min(align, alignCeiling);
+ biggestAlignmentSoFar = Math.Max(biggestAlignmentSoFar, align);
+ curOffset = AlignUp(curOffset, align);
+ tFields[i] = new TField(reflectionFields[i], fieldLayout, curOffset);
+ curOffset += fieldSize;
+ }
+ curOffset = AlignUp(curOffset, biggestAlignmentSoFar);
+
+ uint sizeAsBaseType = curOffset;
+ if (curOffset == 0)
+ {
+ curOffset = 1; // As with C++, zero-length struct not allowed (except as parent of another struct).
+ }
+ IField[] totalFields;
+ if (parentLayout != null)
+ {
+ totalFields = parentLayout.Fields.Concat(tFields).ToArray();
+ }
+ else
+ {
+ totalFields = tFields;
+ }
+ TLayout layout = new(tStructType, curOffset, biggestAlignmentSoFar, sizeAsBaseType, totalFields);
+ foreach (TField field in tFields)
+ {
+ field.DeclaringLayout = layout;
+ }
+ return layout;
+ }
+
+ private static bool IsFieldIncludedInDefines(FieldInfo fieldInfo, IEnumerable<string> enabledDefines)
+ {
+ IEnumerable<IfAttribute> attrs = fieldInfo.GetCustomAttributes<IfAttribute>();
+ foreach (IfAttribute attr in attrs)
+ {
+ if (!enabledDefines.Contains(attr.DefineName))
+ {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private static ILayout GetFieldLayout(FieldInfo fieldInfo, LayoutManager layoutManager)
+ {
+ ILayout fieldLayout;
+ Type fieldType = fieldInfo.FieldType;
+ if (fieldType.IsArray)
+ {
+ ArraySizeAttribute ca = (ArraySizeAttribute)fieldInfo.GetCustomAttributes(typeof(ArraySizeAttribute)).FirstOrDefault();
+ if (ca == null)
+ {
+ throw new LayoutException("Array typed fields must use an ArraySize attribute to indicate their size");
+ }
+ fieldLayout = layoutManager.GetArrayLayout(fieldType, ca.NumElements);
+ }
+ else
+ {
+ fieldLayout = layoutManager.GetLayout(fieldType);
+ }
+
+ if (!fieldLayout.IsFixedSize)
+ {
+ throw new LayoutException(fieldInfo.Name + " is not a fixed size field. Only fixed size fields are supported in structures");
+ }
+
+ return fieldLayout;
+ }
+
+ private static uint AlignUp(uint p, uint align)
+ {
+ uint remainder = (p % align);
+ if (remainder != 0)
+ {
+ p += (align - remainder);
+ }
+ return p;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace Microsoft.FileFormats
+{
+ public class ValidationRule
+ {
+ private Func<bool> _checkFunc;
+ private ValidationRule[] _prereqs;
+
+ public ValidationRule(string errorMessage, Func<bool> checkFunc) : this(errorMessage, checkFunc, null) { }
+
+ public ValidationRule(string errorMessage, Func<bool> checkFunc, params ValidationRule[] prerequisiteValidations)
+ {
+ ErrorMessage = errorMessage;
+ _checkFunc = checkFunc;
+ _prereqs = prerequisiteValidations;
+ }
+
+ public string ErrorMessage { get; private set; }
+
+ public bool CheckPrerequisites()
+ {
+ return _prereqs == null || _prereqs.All(v => v.Check());
+ }
+
+ public bool Check()
+ {
+ return CheckPrerequisites() && _checkFunc();
+ }
+
+ public void CheckThrowing()
+ {
+ if (!Check())
+ {
+ throw new BadInputFormatException(ErrorMessage);
+ }
+ }
+ }
+}
--- /dev/null
+<Project Sdk="Microsoft.NET.Sdk">
+ <PropertyGroup>
+ <TargetFrameworks>netstandard2.0</TargetFrameworks>
+ <NoWarn>$(NoWarn);CA1852</NoWarn>
+ <IsPackable>true</IsPackable>
+ <IsShipping>false</IsShipping>
+ </PropertyGroup>
+
+ <ItemGroup>
+ <PackageReference Include="System.Text.Json" Version="8.0.3" />
+ </ItemGroup>
+
+ <ItemGroup>
+ <ProjectReference Include="$(MSBuildThisFileDirectory)..\Microsoft.SymbolStore\Microsoft.SymbolStore.csproj" />
+ </ItemGroup>
+</Project>
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text.Json;
+using System.Text.Json.Serialization;
+using Microsoft.SymbolStore;
+using Microsoft.SymbolStore.KeyGenerators;
+
+namespace Microsoft.SymbolManifestGenerator
+{
+ public static class SymbolManifestGenerator
+ {
+ private const int Private = 340;
+
+ public static void GenerateManifest(ITracer tracer, DirectoryInfo dir, string manifestFileName)
+ {
+ ManifestDataV1 manifestData = new();
+
+ FileInfo[] allFiles = dir.GetFiles("*", SearchOption.AllDirectories);
+ foreach (FileInfo file in allFiles)
+ {
+ using FileStream fileStream = file.OpenRead();
+ SymbolStoreFile symbolStoreFile = new(fileStream, file.FullName);
+ FileKeyGenerator generator = new(tracer, symbolStoreFile);
+ if (!generator.IsValid())
+ {
+ tracer.Information($"Could not generate a valid FileKeyGenerator from file '{file.FullName}'. Skipping.");
+ continue;
+ }
+
+ foreach (SymbolStoreKey clrKey in generator.GetKeys(KeyTypeFlags.ClrKeys))
+ {
+ FileInfo specialFile = ResolveClrKeyToUniqueFileFromAllFiles(allFiles, clrKey);
+ if (specialFile == null)
+ {
+ tracer.Information($"Known special file '{clrKey.FullPathName}' for runtime module '{file.FullName}' does not exist in directory '{file.DirectoryName}'. Skipping.");
+ continue;
+ }
+
+ string basedirRelativePath = specialFile.FullName.Replace(dir.FullName, string.Empty).TrimStart(Path.DirectorySeparatorChar);
+ string fileHash = CalculateSHA512(specialFile);
+
+ ManifestDataEntry manifestDataEntry = new()
+ {
+ BasedirRelativePath = basedirRelativePath,
+ SymbolKey = clrKey.Index,
+ Sha512 = fileHash,
+ DebugInformationLevel = Private,
+ LegacyDebugInformationLevel = Private
+ };
+
+ manifestData.Entries.Add(manifestDataEntry);
+ }
+ }
+
+ JsonSerializerOptions serializeOptions = new()
+ {
+ PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
+ WriteIndented = true
+ };
+ string manifestDataContent = JsonSerializer.Serialize(manifestData, serializeOptions);
+ File.WriteAllText(manifestFileName, manifestDataContent);
+ }
+
+ // Special files associated with a particular runtime module have not been guaranteed to be in the same directory as the runtime module.
+ // As such, the directory for which a manifest is being generated must guarantee that at most one file exists with the same name as the special file.
+ private static FileInfo ResolveClrKeyToUniqueFileFromAllFiles(FileInfo[] allFiles, SymbolStoreKey clrKey)
+ {
+ string clrKeyFileName = Path.GetFileName(clrKey.FullPathName);
+
+ FileInfo matchingSymbolFileOnDisk = allFiles.SingleOrDefault(file => FileHasClrKeyFileName(file, clrKeyFileName));
+
+ return matchingSymbolFileOnDisk;
+
+ static bool FileHasClrKeyFileName(FileInfo file, string clrKeyFileName)
+ {
+ return file.Name.Equals(clrKeyFileName, StringComparison.OrdinalIgnoreCase);
+ }
+ }
+
+ private static string CalculateSHA512(FileInfo file)
+ {
+ using FileStream fileReadStream = file.OpenRead();
+ using System.Security.Cryptography.SHA512 sha = System.Security.Cryptography.SHA512.Create();
+ byte[] hashValueBytes = sha.ComputeHash(fileReadStream);
+ return BitConverter.ToString(hashValueBytes).Replace("-", "");
+ }
+
+ private class ManifestFileVersion
+ {
+ public string Version { get; set; }
+ }
+
+ private class ManifestDataV1 : ManifestFileVersion
+ {
+ public List<ManifestDataEntry> Entries { get; set; }
+
+ public ManifestDataV1()
+ {
+ Version = "1";
+ Entries = new List<ManifestDataEntry>();
+ }
+ }
+
+ private class ManifestDataEntry
+ {
+ public string BasedirRelativePath { get; set; }
+ public string SymbolKey { get; set; }
+ public string Sha512 { get; set; }
+ public int DebugInformationLevel { get; set; }
+ [JsonPropertyName("DebugInformationLevel")]
+ public int LegacyDebugInformationLevel { get; set; }
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Security.Cryptography;
+using System.Text;
+using Microsoft.FileFormats.PE;
+
+namespace Microsoft.SymbolStore
+{
+ internal sealed class ChecksumValidator
+ {
+ private const string pdbStreamName = "#Pdb";
+ private const uint pdbIdSize = 20;
+
+ internal static void Validate(ITracer tracer, Stream pdbStream, IEnumerable<PdbChecksum> pdbChecksums)
+ {
+ uint offset = 0;
+
+ byte[] bytes = new byte[pdbStream.Length];
+ byte[] pdbId = new byte[pdbIdSize];
+ if (pdbStream.Read(bytes, offset: 0, count: bytes.Length) != bytes.Length)
+ {
+ throw new InvalidChecksumException("Unexpected stream length");
+ }
+
+ try
+ {
+ offset = GetPdbStreamOffset(pdbStream);
+ }
+ catch (Exception ex)
+ {
+ tracer.Error(ex.Message);
+ throw;
+ }
+
+ // Make a copy of the pdb Id
+ Array.Copy(bytes, offset, pdbId, 0, pdbIdSize);
+
+ // Zero out the pdb Id
+ for (int i = 0; i < pdbIdSize; i++)
+ {
+ bytes[i + offset] = 0;
+ }
+
+ bool algorithmNameKnown = false;
+ foreach (PdbChecksum checksum in pdbChecksums)
+ {
+ tracer.Information($"Testing checksum: {checksum}");
+
+ HashAlgorithm algorithm = HashAlgorithm.Create(checksum.AlgorithmName);
+ if (algorithm != null)
+ {
+ algorithmNameKnown = true;
+ byte[] hash = algorithm.ComputeHash(bytes);
+ if (hash.SequenceEqual(checksum.Checksum))
+ {
+ // If any of the checksums are OK, we're good
+ tracer.Information($"Found checksum match {checksum}");
+ // Restore the pdb Id
+ Array.Copy(pdbId, 0, bytes, offset, pdbIdSize);
+ // Restore the steam position
+ pdbStream.Seek(0, SeekOrigin.Begin);
+
+ return;
+ }
+ }
+ }
+
+ if (!algorithmNameKnown)
+ {
+ string algorithmNames = string.Join(" ", pdbChecksums.Select(c => c.AlgorithmName));
+ throw new InvalidChecksumException($"Unknown hash algorithm: {algorithmNames}");
+ }
+
+ throw new InvalidChecksumException("PDB checksum mismatch");
+ }
+
+ private static uint GetPdbStreamOffset(Stream pdbStream)
+ {
+ pdbStream.Position = 0;
+ using (BinaryReader reader = new(pdbStream, Encoding.UTF8, leaveOpen: true))
+ {
+ pdbStream.Seek(4 + // Signature
+ 2 + // Version Major
+ 2 + // Version Minor
+ 4, // Reserved)
+ SeekOrigin.Begin);
+
+ // skip the version string
+ uint versionStringSize = reader.ReadUInt32();
+
+ pdbStream.Seek(versionStringSize, SeekOrigin.Current);
+
+ // storage header
+ pdbStream.Seek(2, SeekOrigin.Current);
+
+ // read the stream headers
+ ushort streamCount = reader.ReadUInt16();
+ uint streamOffset;
+ string streamName;
+
+ for (int i = 0; i < streamCount; i++)
+ {
+ streamOffset = reader.ReadUInt32();
+ // stream size
+ pdbStream.Seek(4, SeekOrigin.Current);
+ streamName = reader.ReadNullTerminatedString();
+
+ if (streamName == pdbStreamName)
+ {
+ // We found it!
+ return streamOffset;
+ }
+
+ // streams headers are on a four byte alignment
+ if (pdbStream.Position % 4 != 0)
+ {
+ pdbStream.Seek(4 - pdbStream.Position % 4, SeekOrigin.Current);
+ }
+ }
+ }
+
+ throw new ArgumentException("We have a file with a metadata pdb signature but no pdb stream");
+ }
+ }
+
+ public static class BinaryReaderExtensions
+ {
+ public static string ReadNullTerminatedString(this BinaryReader stream)
+ {
+ StringBuilder builder = new();
+ char ch;
+ while ((ch = stream.ReadChar()) != 0)
+ {
+ builder.Append(ch);
+ }
+ return builder.ToString();
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+
+namespace Microsoft.SymbolStore
+{
+ /// <summary>
+ /// A simple trace/logging interface.
+ /// </summary>
+ public interface ITracer
+ {
+ void WriteLine(string message);
+
+ void WriteLine(string format, params object[] arguments);
+
+ void Information(string message);
+
+ void Information(string format, params object[] arguments);
+
+ void Warning(string message);
+
+ void Warning(string format, params object[] arguments);
+
+ void Error(string message);
+
+ void Error(string format, params object[] arguments);
+
+ void Verbose(string message);
+
+ void Verbose(string format, params object[] arguments);
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+
+namespace Microsoft.SymbolStore
+{
+ public class InvalidChecksumException : Exception
+ {
+ public InvalidChecksumException(string message) : base(message)
+ {
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using System.Linq;
+using Microsoft.FileFormats;
+using Microsoft.FileFormats.ELF;
+using Microsoft.FileFormats.MachO;
+using Microsoft.FileFormats.PE;
+
+namespace Microsoft.SymbolStore.KeyGenerators
+{
+ public class ELFCoreKeyGenerator : KeyGenerator
+ {
+ private readonly ELFCoreFile _core;
+
+ public ELFCoreKeyGenerator(ITracer tracer, SymbolStoreFile file)
+ : base(tracer)
+ {
+ StreamAddressSpace dataSource = new(file.Stream);
+ _core = new ELFCoreFile(dataSource);
+ }
+
+ public override bool IsValid()
+ {
+ return _core.IsValid();
+ }
+
+ public override bool IsDump()
+ {
+ return true;
+ }
+
+ public override IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags)
+ {
+ if (IsValid())
+ {
+ return _core.LoadedImages
+ .Select((ELFLoadedImage loadedImage) => CreateGenerator(loadedImage))
+ .Where((KeyGenerator generator) => generator != null)
+ .SelectMany((KeyGenerator generator) => generator.GetKeys(flags));
+ }
+ return SymbolStoreKey.EmptyArray;
+ }
+
+ private KeyGenerator CreateGenerator(ELFLoadedImage loadedImage)
+ {
+ try
+ {
+ if (loadedImage.Image.IsValid())
+ {
+ return new ELFFileKeyGenerator(Tracer, loadedImage.Image, loadedImage.Path);
+ }
+ // TODO - mikem 7/1/17 - need to figure out a better way to determine the file vs loaded layout
+ bool layout = loadedImage.Path.StartsWith("/");
+ RelativeAddressSpace reader = new(_core.DataSource, loadedImage.LoadAddress, _core.DataSource.Length);
+ PEFile peFile = new(reader, layout);
+ if (peFile.IsValid())
+ {
+ return new PEFileKeyGenerator(Tracer, peFile, loadedImage.Path);
+ }
+ // Check if this is a macho module in a ELF 5.0.x MacOS dump
+ MachOFile machOFile = new(reader, 0, true);
+ if (machOFile.IsValid())
+ {
+ return new MachOFileKeyGenerator(Tracer, machOFile, loadedImage.Path);
+ }
+ Tracer.Warning("Unknown ELF core image {0:X16} {1}", loadedImage.LoadAddress, loadedImage.Path);
+ }
+ catch (InvalidVirtualAddressException ex)
+ {
+ Tracer.Error("{0}: {1:X16} {2}", ex.Message, loadedImage.LoadAddress, loadedImage.Path);
+ }
+ return null;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.IO;
+using System.Linq;
+using Microsoft.FileFormats;
+using Microsoft.FileFormats.ELF;
+
+namespace Microsoft.SymbolStore.KeyGenerators
+{
+ public class ELFFileKeyGenerator : KeyGenerator
+ {
+ private const string IdentityPrefix = "elf-buildid";
+ private const string SymbolPrefix = "elf-buildid-sym";
+ private const string CoreClrPrefix = "elf-buildid-coreclr";
+ private const string CoreClrFileName = "libcoreclr.so";
+
+ /// <summary>
+ /// Symbol file extensions. The first one is the default symbol file extension used by .NET Core.
+ /// </summary>
+ private static readonly string[] s_symbolFileExtensions = { ".dbg", ".debug" };
+
+ /// <summary>
+ /// List of special clr files that are also indexed with libcoreclr.so's key.
+ /// </summary>
+ private static readonly string[] s_specialFiles = new string[] { "libmscordaccore.so", "libmscordbi.so", "mscordaccore.dll", "mscordbi.dll" };
+ private static readonly string[] s_sosSpecialFiles = new string[] { "libsos.so", "SOS.NETCore.dll" };
+
+ private static readonly HashSet<string> s_coreClrSpecialFiles = new(s_specialFiles.Concat(s_sosSpecialFiles));
+ private static readonly HashSet<string> s_dacdbiSpecialFiles = new(s_specialFiles);
+
+ private readonly ELFFile _elfFile;
+ private readonly string _path;
+
+ public ELFFileKeyGenerator(ITracer tracer, ELFFile elfFile, string path)
+ : base(tracer)
+ {
+ _elfFile = elfFile;
+ _path = path;
+ }
+
+ public ELFFileKeyGenerator(ITracer tracer, SymbolStoreFile file)
+ : this(tracer, new ELFFile(new StreamAddressSpace(file.Stream)), file.FileName)
+ {
+ }
+
+ public override bool IsValid()
+ {
+ return _elfFile.IsValid() &&
+ (_elfFile.Header.Type == ELFHeaderType.Executable || _elfFile.Header.Type == ELFHeaderType.Shared || _elfFile.Header.Type == ELFHeaderType.Relocatable);
+ }
+
+ public override IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags)
+ {
+ if (IsValid())
+ {
+ byte[] buildId = _elfFile.BuildID;
+ if (NormalizeBuildId(ref buildId))
+ {
+ bool symbolFile = false;
+ try
+ {
+ symbolFile = Array.Exists(_elfFile.Sections, section => (section.Name.StartsWith(".debug_info") || section.Name.StartsWith(".zdebug_info")));
+ }
+ catch (Exception ex) when
+ (ex is InvalidVirtualAddressException ||
+ ex is ArgumentOutOfRangeException ||
+ ex is IndexOutOfRangeException ||
+ ex is BadInputFormatException)
+ {
+ // This could occur when trying to read sections for an ELF image grabbed from a core dump
+ // In that case, fallback to checking the file extension
+ symbolFile = Array.IndexOf(s_symbolFileExtensions, Path.GetExtension(_path)) != -1;
+ }
+
+ string symbolFileName = GetSymbolFileName();
+ foreach (SymbolStoreKey key in GetKeys(flags, _path, buildId, symbolFile, symbolFileName))
+ {
+ yield return key;
+ }
+ if ((flags & KeyTypeFlags.HostKeys) != 0)
+ {
+ if (_elfFile.Header.Type == ELFHeaderType.Executable)
+ {
+ // The host program as itself (usually dotnet)
+ yield return BuildKey(_path, IdentityPrefix, buildId);
+
+ // apphost downloaded as the host program name
+ yield return BuildKey(_path, IdentityPrefix, buildId, "apphost");
+ }
+ }
+ }
+ else
+ {
+ Tracer.Error("Invalid ELF BuildID '{0}' for {1}", buildId == null ? "<null>" : ToHexString(buildId), _path);
+ }
+ }
+ }
+
+ /// <summary>
+ /// Creates the ELF file symbol store keys.
+ /// </summary>
+ /// <param name="flags">type of keys to return</param>
+ /// <param name="path">file name and path</param>
+ /// <param name="buildId">ELF file uuid bytes</param>
+ /// <param name="symbolFile">if true, use the symbol file tag</param>
+ /// <param name="symbolFileName">name of symbol file (from .gnu_debuglink) or null</param>
+ /// <returns>symbol store keys</returns>
+ public static IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags, string path, byte[] buildId, bool symbolFile, string symbolFileName)
+ {
+ Debug.Assert(path != null);
+ if (NormalizeBuildId(ref buildId))
+ {
+ string fileName = GetFileName(path);
+
+ if ((flags & KeyTypeFlags.IdentityKey) != 0)
+ {
+ if (symbolFile)
+ {
+ yield return BuildKey(path, SymbolPrefix, buildId, "_.debug");
+ }
+ else
+ {
+ bool clrSpecialFile = s_coreClrSpecialFiles.Contains(fileName);
+ yield return BuildKey(path, IdentityPrefix, buildId, clrSpecialFile);
+ }
+ }
+ if (!symbolFile)
+ {
+ // This is a workaround for 5.0 where the ELF file type of dotnet isn't Executable but
+ // Shared. It doesn't work for self-contained apps (apphost renamed to host program).
+ if ((flags & KeyTypeFlags.HostKeys) != 0 && fileName == "dotnet")
+ {
+ yield return BuildKey(path, IdentityPrefix, buildId, clrSpecialFile: false);
+ }
+ if ((flags & KeyTypeFlags.RuntimeKeys) != 0 && fileName == CoreClrFileName)
+ {
+ yield return BuildKey(path, IdentityPrefix, buildId);
+ }
+ if ((flags & KeyTypeFlags.SymbolKey) != 0)
+ {
+ if (string.IsNullOrEmpty(symbolFileName))
+ {
+ symbolFileName = path + s_symbolFileExtensions[0];
+ }
+ yield return BuildKey(symbolFileName, SymbolPrefix, buildId, "_.debug");
+ }
+ if ((flags & (KeyTypeFlags.ClrKeys | KeyTypeFlags.DacDbiKeys)) != 0)
+ {
+ // Creates all the special CLR keys if the path is the coreclr module for this platform
+ if (fileName == CoreClrFileName)
+ {
+ foreach (string specialFileName in (flags & KeyTypeFlags.ClrKeys) != 0 ? s_coreClrSpecialFiles : s_dacdbiSpecialFiles)
+ {
+ yield return BuildKey(specialFileName, CoreClrPrefix, buildId);
+ }
+ }
+ }
+ }
+ }
+ else
+ {
+ Debug.Fail($"Invalid ELF BuildId '{(buildId == null ? "<null>" : ToHexString(buildId))}' for {path}");
+ }
+ }
+
+ private string GetSymbolFileName()
+ {
+ try
+ {
+ ELFSection section = _elfFile.FindSectionByName(".gnu_debuglink");
+ if (section != null)
+ {
+ return section.Contents.Read<string>(0);
+ }
+ }
+ catch (Exception ex) when
+ (ex is InvalidVirtualAddressException ||
+ ex is ArgumentOutOfRangeException ||
+ ex is IndexOutOfRangeException ||
+ ex is BadInputFormatException)
+ {
+ Tracer.Verbose("ELF .gnu_debuglink section in {0}: {1}", _path, ex.Message);
+ }
+ return null;
+ }
+
+ /// <summary>
+ /// Extends build-ids of 8-20 bytes (created by MD5 or UUID build ids) to 20 bytes with proper padding
+ /// using a zero extension
+ /// </summary>
+ /// <param name="buildId">Reference to ELF build-id. This build-id must be between 8 and 20 bytes in length.</param>
+ /// <returns>True if the build-id is compliant and could be resized and padded. False otherwise.</returns>
+ private static bool NormalizeBuildId(ref byte[] buildId)
+ {
+ if (buildId == null || buildId.Length > 20 || buildId.Length < 8)
+ {
+ return false;
+ }
+ int oldLength = buildId.Length;
+ Array.Resize(ref buildId, 20);
+ for (int i = oldLength; i < buildId.Length; i++)
+ {
+ buildId[i] = 0;
+ }
+ return true;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using System.Linq;
+
+namespace Microsoft.SymbolStore.KeyGenerators
+{
+ /// <summary>
+ /// Generates a key for any kind of file (ELF core/MachO core/Minidump,
+ /// ELF/MachO/PE binary, PDB, etc).
+ /// </summary>
+ public class FileKeyGenerator : KeyGenerator
+ {
+ private readonly SymbolStoreFile _file;
+
+ public FileKeyGenerator(ITracer tracer, SymbolStoreFile file)
+ : base(tracer)
+ {
+ _file = file;
+ }
+
+ public override bool IsValid()
+ {
+ return GetGenerators().Any((generator) => generator.IsValid());
+ }
+
+ public override bool IsDump()
+ {
+ return GetGenerators().Any((generator) => generator.IsValid() && generator.IsDump());
+ }
+
+ public override IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags)
+ {
+ foreach (KeyGenerator generator in GetGenerators())
+ {
+ _file.Stream.Position = 0;
+ if (generator.IsValid())
+ {
+ return generator.GetKeys(flags);
+ }
+ }
+ Tracer.Verbose("Unknown file type: {0}", _file.FileName);
+ return SymbolStoreKey.EmptyArray;
+ }
+
+ private IEnumerable<KeyGenerator> GetGenerators()
+ {
+ if (_file.Stream.Length > 0)
+ {
+ yield return new ELFCoreKeyGenerator(Tracer, _file);
+ yield return new MachCoreKeyGenerator(Tracer, _file);
+ yield return new MinidumpKeyGenerator(Tracer, _file);
+ yield return new ELFFileKeyGenerator(Tracer, _file);
+ yield return new PEFileKeyGenerator(Tracer, _file);
+ yield return new MachOFatHeaderKeyGenerator(Tracer, _file);
+ yield return new MachOFileKeyGenerator(Tracer, _file);
+ yield return new PDBFileKeyGenerator(Tracer, _file);
+ yield return new PortablePDBFileKeyGenerator(Tracer, _file);
+ yield return new PerfMapFileKeyGenerator(Tracer, _file);
+ }
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+using Microsoft.FileFormats.PE;
+
+namespace Microsoft.SymbolStore.KeyGenerators
+{
+ /// <summary>
+ /// Type of keys to generate
+ /// </summary>
+ [Flags]
+ public enum KeyTypeFlags
+ {
+ /// <summary>
+ /// No keys.
+ /// </summary>
+ None = 0x00,
+
+ /// <summary>
+ /// Generate the key of the binary or file itself.
+ /// </summary>
+ IdentityKey = 0x01,
+
+ /// <summary>
+ /// Generate the symbol key of the binary (if one).
+ /// </summary>
+ SymbolKey = 0x02,
+
+ /// <summary>
+ /// Generate the keys for the DAC/SOS modules for a coreclr module.
+ /// </summary>
+ ClrKeys = 0x04,
+
+ /// <summary>
+ /// Forces the key generators to create a Windows Pdb key even when
+ /// the DLL debug record entry is marked as portable. Used when both
+ /// the Portable and Windows PDBs are available on the symbol server.
+ /// </summary>
+ ForceWindowsPdbs = 0x08,
+
+ /// <summary>
+ /// Generate keys for the host program. This includes the exe or main
+ /// module key (usually "dotnet") and an "apphost" symbol index using
+ /// the exe or main module's build id for self-contained apps.
+ /// </summary>
+ HostKeys = 0x10,
+
+ /// <summary>
+ /// Return only the DAC (including any cross-OS DACs) and DBI module
+ /// keys. Does not including any SOS binaries.
+ /// </summary>
+ DacDbiKeys = 0x20,
+
+ /// <summary>
+ /// Include the runtime modules (coreclr.dll, clrjit.dll, clrgc.dll,
+ /// libcoreclr.so, libclrjit.so, libcoreclr.dylib, etc.)
+ /// </summary>
+ RuntimeKeys = 0x40,
+
+ /// <summary>
+ /// Generate the r2r perfmap key of the binary (if one exists).
+ /// </summary>
+ PerfMapKeys = 0x80
+ }
+
+ /// <summary>
+ /// The base class for all the key generators. They can be for individual files
+ /// or a group of file types.
+ /// </summary>
+ public abstract class KeyGenerator
+ {
+ /// <summary>
+ /// Trace/logging source
+ /// </summary>
+ protected readonly ITracer Tracer;
+
+ /// <summary>
+ /// Key generator base class.
+ /// </summary>
+ /// <param name="tracer">logging</param>
+ public KeyGenerator(ITracer tracer)
+ {
+ Tracer = tracer;
+ }
+
+ /// <summary>
+ /// Returns true if the key generator can get keys for this file or binary.
+ /// </summary>
+ public abstract bool IsValid();
+
+ /// <summary>
+ /// Returns true if file is a mini or core dump.
+ /// </summary>
+ public virtual bool IsDump()
+ {
+ return false;
+ }
+
+ /// <summary>
+ /// Returns the symbol store keys for this file or binary.
+ /// </summary>
+ /// <param name="flags">what keys to get</param>
+ public abstract IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags);
+
+ /// <summary>
+ /// Key building helper for "file_name/string_id/file_name" formats.
+ /// </summary>
+ /// <param name="path">full path of file or binary</param>
+ /// <param name="id">id string</param>
+ /// <param name="clrSpecialFile">if true, the file is one the clr special files</param>
+ /// <param name="pdbChecksums">Checksums of pdb file. May be null.</param>
+ /// <returns>key</returns>
+ protected static SymbolStoreKey BuildKey(string path, string id, bool clrSpecialFile = false, IEnumerable<PdbChecksum> pdbChecksums = null)
+ {
+ string file = GetFileName(path).ToLowerInvariant();
+ return BuildKey(path, null, id, file, clrSpecialFile, pdbChecksums);
+ }
+
+ /// <summary>
+ /// Key building helper for "prefix/string_id/file_name" formats.
+ /// </summary>
+ /// <param name="path">full path of file or binary</param>
+ /// <param name="prefix">optional id prefix</param>
+ /// <param name="id">build id or uuid</param>
+ /// <param name="clrSpecialFile">if true, the file is one the clr special files</param>
+ /// <param name="pdbChecksums">Checksums of pdb file. May be null.</param>
+ /// <returns>key</returns>
+ protected static SymbolStoreKey BuildKey(string path, string prefix, byte[] id, bool clrSpecialFile = false, IEnumerable<PdbChecksum> pdbChecksums = null)
+ {
+ string file = GetFileName(path).ToLowerInvariant();
+ return BuildKey(path, prefix, id, file, clrSpecialFile, pdbChecksums);
+ }
+
+ /// <summary>
+ /// Key building helper for "prefix/byte_sequence_id/file_name" formats.
+ /// </summary>
+ /// <param name="path">full path of file or binary</param>
+ /// <param name="prefix">optional id prefix</param>
+ /// <param name="id">build id or uuid</param>
+ /// <param name="file">file name only</param>
+ /// <param name="clrSpecialFile">if true, the file is one the clr special files</param>
+ /// <param name="pdbChecksums">Checksums of pdb file. May be null.</param>
+ /// <returns>key</returns>
+ protected static SymbolStoreKey BuildKey(string path, string prefix, byte[] id, string file, bool clrSpecialFile = false, IEnumerable<PdbChecksum> pdbChecksums = null)
+ {
+ return BuildKey(path, prefix, ToHexString(id), file, clrSpecialFile, pdbChecksums);
+ }
+
+ /// <summary>
+ /// Key building helper for "prefix/byte_sequence_id/file_name" formats.
+ /// </summary>
+ /// <param name="path">full path of file or binary</param>
+ /// <param name="prefix">optional id prefix</param>
+ /// <param name="id">build id or uuid</param>
+ /// <param name="file">file name only</param>
+ /// <param name="clrSpecialFile">if true, the file is one the clr special files</param>
+ /// <param name="pdbChecksums">Checksums of pdb file. May be null.</param>
+ /// <returns>key</returns>
+ protected static SymbolStoreKey BuildKey(string path, string prefix, string id, string file, bool clrSpecialFile = false, IEnumerable<PdbChecksum> pdbChecksums = null)
+ {
+ StringBuilder key = new();
+ key.Append(file);
+ key.Append('/');
+ if (prefix != null)
+ {
+ key.Append(prefix);
+ key.Append('-');
+ }
+ key.Append(id);
+ key.Append('/');
+ key.Append(file);
+ return new SymbolStoreKey(key.ToString(), path, clrSpecialFile, pdbChecksums);
+ }
+
+ /// <summary> /// Convert an array of bytes to a lower case hex string. /// </summary> /// <param name="bytes">array of bytes</param>
+ /// <returns>hex string</returns>
+ public static string ToHexString(byte[] bytes)
+ {
+ if (bytes == null)
+ {
+ throw new ArgumentNullException(nameof(bytes));
+ }
+ return string.Concat(bytes.Select(b => b.ToString("x2")));
+ }
+
+ /// <summary>
+ /// The back slashes are changed to forward slashes because Path.GetFileName doesn't work
+ /// on Linux /MacOS if there are backslashes. Both back and forward slashes work on Windows.
+ /// </summary>
+ /// <param name="path">possible windows path</param>
+ /// <returns>just the file name</returns>
+ internal static string GetFileName(string path)
+ {
+ return Path.GetFileName(path.Replace('\\', '/'));
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using System.Linq;
+using Microsoft.FileFormats;
+using Microsoft.FileFormats.MachO;
+using Microsoft.FileFormats.PE;
+
+namespace Microsoft.SymbolStore.KeyGenerators
+{
+ public class MachCoreKeyGenerator : KeyGenerator
+ {
+ private readonly MachCore _core;
+
+ public MachCoreKeyGenerator(ITracer tracer, SymbolStoreFile file)
+ : base(tracer)
+ {
+ StreamAddressSpace dataSource = new(file.Stream);
+ _core = new MachCore(dataSource);
+ }
+
+ public override bool IsValid()
+ {
+ return _core.IsValid();
+ }
+
+ public override bool IsDump()
+ {
+ return true;
+ }
+
+ public override IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags)
+ {
+ if (IsValid())
+ {
+ return _core.LoadedImages
+ .Select((MachLoadedImage loadedImage) => CreateGenerator(loadedImage))
+ .Where((KeyGenerator generator) => generator != null)
+ .SelectMany((KeyGenerator generator) => generator.GetKeys(flags));
+ }
+ return SymbolStoreKey.EmptyArray;
+ }
+
+ private KeyGenerator CreateGenerator(MachLoadedImage loadedImage)
+ {
+ try
+ {
+ if (loadedImage.Image.IsValid())
+ {
+ return new MachOFileKeyGenerator(Tracer, loadedImage.Image, loadedImage.Path);
+ }
+ // TODO - mikem 7/1/17 - need to figure out a better way to determine the file vs loaded layout
+ bool layout = loadedImage.Path.StartsWith("/");
+ IAddressSpace dataSource = _core.VirtualAddressReader.DataSource;
+ PEFile peFile = new(new RelativeAddressSpace(dataSource, loadedImage.LoadAddress, dataSource.Length), layout);
+ if (peFile.IsValid())
+ {
+ return new PEFileKeyGenerator(Tracer, peFile, loadedImage.Path);
+ }
+ Tracer.Warning("Unknown Mach core image {0:X16} {1}", loadedImage.LoadAddress, loadedImage.Path);
+ }
+ catch (InvalidVirtualAddressException ex)
+ {
+ Tracer.Error("{0}: {1:X16} {2}", ex.Message, loadedImage.LoadAddress, loadedImage.Path);
+ }
+ return null;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using System.Linq;
+using Microsoft.FileFormats;
+using Microsoft.FileFormats.MachO;
+
+namespace Microsoft.SymbolStore.KeyGenerators
+{
+ public class MachOFatHeaderKeyGenerator : KeyGenerator
+ {
+ private readonly MachOFatFile _machoFatFile;
+ private readonly string _path;
+
+ public MachOFatHeaderKeyGenerator(ITracer tracer, SymbolStoreFile file)
+ : base(tracer)
+ {
+ _machoFatFile = new MachOFatFile(new StreamAddressSpace(file.Stream));
+ _path = file.FileName;
+ }
+
+ public override bool IsValid()
+ {
+ return _machoFatFile.IsValid();
+ }
+
+ public override IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags)
+ {
+ if (IsValid())
+ {
+ return _machoFatFile.ArchSpecificFiles.Select((file) => new MachOFileKeyGenerator(Tracer, file, _path)).SelectMany((generator) => generator.GetKeys(flags));
+ }
+ return SymbolStoreKey.EmptyArray;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Linq;
+using Microsoft.FileFormats;
+using Microsoft.FileFormats.MachO;
+
+namespace Microsoft.SymbolStore.KeyGenerators
+{
+ public class MachOFileKeyGenerator : KeyGenerator
+ {
+ /// <summary>
+ /// The default symbol file extension used by .NET Core.
+ /// </summary>
+ private const string SymbolFileExtension = ".dwarf";
+
+ private const string IdentityPrefix = "mach-uuid";
+ private const string SymbolPrefix = "mach-uuid-sym";
+ private const string CoreClrPrefix = "mach-uuid-coreclr";
+ private const string CoreClrFileName = "libcoreclr.dylib";
+
+ private static readonly string[] s_specialFiles = new string[] { "libmscordaccore.dylib", "libmscordbi.dylib" };
+ private static readonly string[] s_sosSpecialFiles = new string[] { "libsos.dylib", "SOS.NETCore.dll" };
+
+ private static readonly HashSet<string> s_coreClrSpecialFiles = new(s_specialFiles.Concat(s_sosSpecialFiles));
+ private static readonly HashSet<string> s_dacdbiSpecialFiles = new(s_specialFiles);
+
+ private readonly MachOFile _machoFile;
+ private readonly string _path;
+
+ public MachOFileKeyGenerator(ITracer tracer, MachOFile machoFile, string path)
+ : base(tracer)
+ {
+ _machoFile = machoFile;
+ _path = path;
+ }
+
+ public MachOFileKeyGenerator(ITracer tracer, SymbolStoreFile file)
+ : this(tracer, new MachOFile(new StreamAddressSpace(file.Stream)), file.FileName)
+ {
+ }
+
+ public override bool IsValid()
+ {
+ return _machoFile.IsValid() &&
+ (_machoFile.Header.FileType == MachHeaderFileType.Execute ||
+ _machoFile.Header.FileType == MachHeaderFileType.Dylib ||
+ _machoFile.Header.FileType == MachHeaderFileType.Dsym ||
+ _machoFile.Header.FileType == MachHeaderFileType.Bundle);
+ }
+
+ public override IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags)
+ {
+ if (IsValid())
+ {
+ byte[] uuid = _machoFile.Uuid;
+ if (uuid != null && uuid.Length == 16)
+ {
+ bool symbolFile = _machoFile.Header.FileType == MachHeaderFileType.Dsym;
+ // TODO - mikem 1/23/18 - is there a way to get the name of the "linked" dwarf symbol file
+ foreach (SymbolStoreKey key in GetKeys(flags, _path, uuid, symbolFile, symbolFileName: null))
+ {
+ yield return key;
+ }
+ if ((flags & KeyTypeFlags.HostKeys) != 0)
+ {
+ if (_machoFile.Header.FileType == MachHeaderFileType.Execute)
+ {
+ // The host program as itself (usually dotnet)
+ yield return BuildKey(_path, IdentityPrefix, uuid);
+
+ // apphost downloaded as the host program name
+ yield return BuildKey(_path, IdentityPrefix, uuid, "apphost");
+ }
+ }
+ }
+ else
+ {
+ Tracer.Error("Invalid MachO uuid {0}", _path);
+ }
+ }
+ }
+
+ /// <summary>
+ /// Creates the MachO file symbol store keys.
+ /// </summary>
+ /// <param name="flags">type of keys to return</param>
+ /// <param name="path">file name and path</param>
+ /// <param name="uuid">macho file uuid bytes</param>
+ /// <param name="symbolFile">if true, use the symbol file tag</param>
+ /// <param name="symbolFileName">name of symbol file or null</param>
+ /// <returns>symbol store keys</returns>
+ public static IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags, string path, byte[] uuid, bool symbolFile, string symbolFileName)
+ {
+ Debug.Assert(path != null);
+ Debug.Assert(uuid != null && uuid.Length == 16);
+
+ string fileName = GetFileName(path);
+
+ if ((flags & KeyTypeFlags.IdentityKey) != 0)
+ {
+ if (symbolFile)
+ {
+ yield return BuildKey(path, SymbolPrefix, uuid, "_.dwarf");
+ }
+ else
+ {
+ bool clrSpecialFile = s_coreClrSpecialFiles.Contains(fileName);
+ yield return BuildKey(path, IdentityPrefix, uuid, clrSpecialFile);
+ }
+ }
+ if (!symbolFile)
+ {
+ if ((flags & KeyTypeFlags.RuntimeKeys) != 0 && fileName == CoreClrFileName)
+ {
+ yield return BuildKey(path, IdentityPrefix, uuid);
+ }
+ if ((flags & KeyTypeFlags.SymbolKey) != 0)
+ {
+ if (string.IsNullOrEmpty(symbolFileName))
+ {
+ symbolFileName = path + SymbolFileExtension;
+ }
+ yield return BuildKey(symbolFileName, SymbolPrefix, uuid, "_.dwarf");
+ }
+ if ((flags & (KeyTypeFlags.ClrKeys | KeyTypeFlags.DacDbiKeys)) != 0)
+ {
+ /// Creates all the special CLR keys if the path is the coreclr module for this platform
+ if (fileName == CoreClrFileName)
+ {
+ foreach (string specialFileName in (flags & KeyTypeFlags.ClrKeys) != 0 ? s_coreClrSpecialFiles : s_dacdbiSpecialFiles)
+ {
+ yield return BuildKey(specialFileName, CoreClrPrefix, uuid);
+ }
+ }
+ }
+ }
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using System.Linq;
+using Microsoft.FileFormats;
+using Microsoft.FileFormats.Minidump;
+
+namespace Microsoft.SymbolStore.KeyGenerators
+{
+ public class MinidumpKeyGenerator : KeyGenerator
+ {
+ private readonly IAddressSpace _dataSource;
+
+ public MinidumpKeyGenerator(ITracer tracer, SymbolStoreFile file)
+ : base(tracer)
+ {
+ _dataSource = new StreamAddressSpace(file.Stream);
+ }
+
+ public override bool IsValid()
+ {
+ return Minidump.IsValid(_dataSource);
+ }
+
+ public override bool IsDump()
+ {
+ return true;
+ }
+
+ public override IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags)
+ {
+ if (IsValid())
+ {
+ try
+ {
+ Minidump dump = new(_dataSource);
+ return dump.LoadedImages
+ .Select((MinidumpLoadedImage loadedImage) => new PEFileKeyGenerator(Tracer, loadedImage.Image, loadedImage.ModuleName))
+ .SelectMany((KeyGenerator generator) => generator.GetKeys(flags));
+ }
+ catch (InvalidVirtualAddressException ex)
+ {
+ Tracer.Error("Minidump {0}", ex.Message);
+ }
+ }
+ return SymbolStoreKey.EmptyArray;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using Microsoft.FileFormats;
+using Microsoft.FileFormats.PDB;
+using Microsoft.FileFormats.PE;
+
+namespace Microsoft.SymbolStore.KeyGenerators
+{
+ public class PDBFileKeyGenerator : KeyGenerator
+ {
+ private readonly PDBFile _pdbFile;
+ private readonly string _path;
+
+ public PDBFileKeyGenerator(ITracer tracer, SymbolStoreFile file)
+ : base(tracer)
+ {
+ StreamAddressSpace dataSource = new(file.Stream);
+ _pdbFile = new PDBFile(dataSource);
+ _path = file.FileName;
+ }
+
+ public override bool IsValid()
+ {
+ return _pdbFile.IsValid();
+ }
+
+ public override IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags)
+ {
+ if (IsValid())
+ {
+ if ((flags & KeyTypeFlags.IdentityKey) != 0)
+ {
+ if (_pdbFile.DbiStream.IsValid())
+ {
+ yield return GetKey(_path, _pdbFile.Signature, unchecked((int)_pdbFile.DbiAge));
+ }
+ else
+ {
+ yield return GetKey(_path, _pdbFile.Signature, unchecked((int)_pdbFile.Age));
+ }
+ }
+ }
+ }
+
+ /// <summary>
+ /// Create a symbol store key for a Windows PDB.
+ /// </summary>
+ /// <param name="path">file name and path</param>
+ /// <param name="signature">mvid guid</param>
+ /// <param name="age">pdb age</param>
+ /// <returns>symbol store key</returns>
+ public static SymbolStoreKey GetKey(string path, Guid signature, int age, IEnumerable<PdbChecksum> pdbChecksums = null)
+ {
+ Debug.Assert(path != null);
+ Debug.Assert(signature != null);
+ return BuildKey(path, string.Format("{0}{1:x}", signature.ToString("N"), age));
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.IO;
+using System.Linq;
+using Microsoft.FileFormats;
+using Microsoft.FileFormats.PE;
+
+namespace Microsoft.SymbolStore.KeyGenerators
+{
+ public class PEFileKeyGenerator : KeyGenerator
+ {
+ private const string CoreClrFileName = "coreclr.dll";
+ private const string ClrFileName = "clr.dll";
+
+ private const string SosFileName = "sos.dll";
+ private const string CoreClrDACFileName = "mscordaccore.dll";
+ private const string ClrDACFileName = "mscordacwks.dll";
+ private const string DbiFileName = "mscordbi.dll";
+ private static readonly string[] s_knownFilesWithLongNameVariant = new string[] { SosFileName, CoreClrDACFileName, ClrDACFileName };
+ private static readonly string[] s_knownRuntimeSpecialFiles = new string[] { CoreClrDACFileName, ClrDACFileName, DbiFileName };
+
+ private readonly PEFile _peFile;
+ private readonly string _path;
+
+ public PEFileKeyGenerator(ITracer tracer, PEFile peFile, string path)
+ : base(tracer)
+ {
+ _peFile = peFile;
+ _path = path;
+ }
+
+ public PEFileKeyGenerator(ITracer tracer, SymbolStoreFile file)
+ : this(tracer, new PEFile(new StreamAddressSpace(file.Stream)), file.FileName)
+ {
+ }
+
+ public override bool IsValid()
+ {
+ return _peFile.IsValid();
+ }
+
+ public override IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags)
+ {
+ if (IsValid())
+ {
+ if ((flags & KeyTypeFlags.IdentityKey) != 0)
+ {
+ yield return GetKey(_path, _peFile.Timestamp, _peFile.SizeOfImage);
+ }
+ if ((flags & KeyTypeFlags.RuntimeKeys) != 0 && (GetFileName(_path) == CoreClrFileName || GetFileName(_path) == ClrFileName))
+ {
+ yield return GetKey(_path, _peFile.Timestamp, _peFile.SizeOfImage);
+ }
+ if ((flags & KeyTypeFlags.SymbolKey) != 0)
+ {
+ PEPdbRecord[] pdbs = System.Array.Empty<PEPdbRecord>();
+ try
+ {
+ pdbs = _peFile.Pdbs.ToArray();
+ }
+ catch (InvalidVirtualAddressException ex)
+ {
+ Tracer.Error("Reading PDB records for {0}: {1}", _path, ex.Message);
+ }
+
+ foreach (PEPdbRecord pdb in pdbs)
+ {
+ if (((flags & KeyTypeFlags.ForceWindowsPdbs) == 0) && pdb.IsPortablePDB)
+ {
+ yield return PortablePDBFileKeyGenerator.GetKey(pdb.Path, pdb.Signature, _peFile.PdbChecksums);
+ }
+ else
+ {
+ yield return PDBFileKeyGenerator.GetKey(pdb.Path, pdb.Signature, pdb.Age, _peFile.PdbChecksums);
+ }
+ }
+ }
+
+ if ((flags & KeyTypeFlags.PerfMapKeys) != 0)
+ {
+ foreach (PEPerfMapRecord perfmapRecord in _peFile.PerfMapsV1)
+ {
+ if (perfmapRecord.Version > FileFormats.PerfMap.PerfMapFile.MaxKnownPerfMapVersion)
+ {
+ Tracer.Warning("Trying to get key for PerfmapFile {0} associated with PE {1} with version {2}, higher than max known version {3}",
+ perfmapRecord.Path, _path, perfmapRecord.Version, FileFormats.PerfMap.PerfMapFile.MaxKnownPerfMapVersion);
+ }
+ yield return PerfMapFileKeyGenerator.GetKey(perfmapRecord.Path, perfmapRecord.Signature, perfmapRecord.Version);
+ }
+ }
+
+ // Return keys for SOS modules for a given runtime module
+ if ((flags & (KeyTypeFlags.ClrKeys)) != 0)
+ {
+ string coreclrId = BuildId(_peFile.Timestamp, _peFile.SizeOfImage);
+ foreach (string specialFileName in GetSOSFiles(GetFileName(_path)))
+ {
+ yield return BuildKey(specialFileName, coreclrId);
+ }
+ }
+
+ // Return keys for DAC and DBI modules for a given runtime module
+ if ((flags & (KeyTypeFlags.ClrKeys | KeyTypeFlags.DacDbiKeys)) != 0)
+ {
+ string coreclrId = BuildId(_peFile.Timestamp, _peFile.SizeOfImage);
+ foreach (string specialFileName in GetDACFiles(GetFileName(_path)))
+ {
+ yield return BuildKey(specialFileName, coreclrId);
+ }
+ }
+
+ if ((flags & KeyTypeFlags.HostKeys) != 0)
+ {
+ if ((_peFile.FileHeader.Characteristics & (ushort)ImageFile.Dll) == 0 && !_peFile.IsILImage)
+ {
+ string id = BuildId(_peFile.Timestamp, _peFile.SizeOfImage);
+
+ // The host program as itself (usually dotnet.exe)
+ yield return BuildKey(_path, id);
+
+ // apphost.exe downloaded as the host program name
+ yield return BuildKey(_path, prefix: null, id, "apphost.exe");
+ }
+ }
+ }
+ }
+
+ private IEnumerable<string> GetSOSFiles(string runtimeFileName)
+ {
+ if (runtimeFileName == ClrFileName)
+ {
+ return GetFilesLongNameVariants(SosFileName);
+ }
+
+ return Enumerable.Empty<string>();
+ }
+
+ private IEnumerable<string> GetDACFiles(string runtimeFileName)
+ {
+ if (runtimeFileName == CoreClrFileName)
+ {
+ string[] coreClrDACFiles = new string[] { CoreClrDACFileName, DbiFileName };
+ IEnumerable<string> longNameDACFiles = GetFilesLongNameVariants(CoreClrDACFileName);
+ return coreClrDACFiles.Concat(longNameDACFiles);
+ }
+
+ if (runtimeFileName == ClrFileName)
+ {
+ string[] clrDACFiles = new string[] { ClrDACFileName, DbiFileName };
+ IEnumerable<string> longNameDACFiles = GetFilesLongNameVariants(ClrDACFileName);
+ return clrDACFiles.Concat(longNameDACFiles);
+ }
+
+ return Enumerable.Empty<string>();
+ }
+
+ private IEnumerable<string> GetFilesLongNameVariants(string fileWithLongNameVariant)
+ {
+ if (!s_knownFilesWithLongNameVariant.Contains(fileWithLongNameVariant))
+ {
+ Tracer.Warning("{0} is not a recognized file with a long name variant", fileWithLongNameVariant);
+ return Enumerable.Empty<string>();
+ }
+
+ VsFixedFileInfo fileVersionInfo = _peFile.VersionInfo;
+ if (fileVersionInfo == null)
+ {
+ Tracer.Warning("{0} has no version resource, long name file keys could not be generated", _path);
+ return Enumerable.Empty<string>();
+ }
+
+ string targetArchitecture;
+ List<string> hostArchitectures = new();
+ ImageFileMachine machine = (ImageFileMachine)_peFile.FileHeader.Machine;
+ switch (machine)
+ {
+ case ImageFileMachine.Amd64:
+ targetArchitecture = "amd64";
+ break;
+ case ImageFileMachine.I386:
+ targetArchitecture = "x86";
+ break;
+ case ImageFileMachine.ArmNT:
+ targetArchitecture = "arm";
+ hostArchitectures.Add("x86");
+ break;
+ case ImageFileMachine.Arm64:
+ targetArchitecture = "arm64";
+ hostArchitectures.Add("amd64");
+ break;
+ default:
+ Tracer.Warning("{0} has an architecture not used to generate long name file keys", _peFile);
+ return Enumerable.Empty<string>();
+ }
+ hostArchitectures.Add(targetArchitecture);
+
+ string fileVersion = $"{fileVersionInfo.FileVersionMajor}.{fileVersionInfo.FileVersionMinor}.{fileVersionInfo.FileVersionBuild}.{fileVersionInfo.FileVersionRevision:00}";
+
+ string buildFlavor = (fileVersionInfo.FileFlags & FileInfoFlags.Debug) == 0 ? "" :
+ (fileVersionInfo.FileFlags & FileInfoFlags.SpecialBuild) != 0 ? ".dbg" : ".chk";
+
+ List<string> longNameFileVariants = new();
+ string fileNameWithoutExtension = Path.GetFileNameWithoutExtension(fileWithLongNameVariant);
+ foreach (string hostArchitecture in hostArchitectures)
+ {
+ longNameFileVariants.Add($"{fileNameWithoutExtension}_{hostArchitecture}_{targetArchitecture}_{fileVersion}{buildFlavor}.dll");
+ }
+
+ return longNameFileVariants;
+ }
+
+ /// <summary>
+ /// Creates a PE file symbol store key identity key.
+ /// </summary>
+ /// <param name="path">file name and path</param>
+ /// <param name="timestamp">time stamp of pe image</param>
+ /// <param name="sizeOfImage">size of pe image</param>
+ /// <returns>symbol store keys (or empty enumeration)</returns>
+ public static SymbolStoreKey GetKey(string path, uint timestamp, uint sizeOfImage)
+ {
+ Debug.Assert(path != null);
+
+ // The clr special file flag can not be based on the GetSpecialFiles() list because
+ // that is only valid when "path" is the coreclr.dll.
+ string fileName = GetFileName(path);
+ bool clrSpecialFile = s_knownRuntimeSpecialFiles.Contains(fileName) ||
+ (s_knownFilesWithLongNameVariant.Any((file) => fileName.StartsWith(Path.GetFileNameWithoutExtension(file).ToLowerInvariant() + "_")) && Path.GetExtension(fileName) == ".dll");
+
+ string id = BuildId(timestamp, sizeOfImage);
+ return BuildKey(path, id, clrSpecialFile);
+ }
+
+ private static string BuildId(uint timestamp, uint sizeOfImage)
+ {
+ return string.Format("{0:X8}{1:x}", timestamp, sizeOfImage);
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Linq;
+using Microsoft.FileFormats.PerfMap;
+
+namespace Microsoft.SymbolStore.KeyGenerators
+{
+ public class PerfMapFileKeyGenerator : KeyGenerator
+ {
+ private readonly SymbolStoreFile _file;
+ private readonly PerfMapFile _perfmapFile;
+
+ public PerfMapFileKeyGenerator(ITracer tracer, SymbolStoreFile file)
+ : base(tracer)
+ {
+ _file = file;
+ _perfmapFile = new PerfMapFile(_file.Stream);
+ }
+
+ public override IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags)
+ {
+ if (!IsValid() || (flags & KeyTypeFlags.IdentityKey) == 0)
+ {
+ yield break;
+ }
+ Debug.Assert(_perfmapFile.Header is not null);
+
+ PerfMapFile.PerfMapHeader header = _perfmapFile.Header;
+
+ if (header.Version > PerfMapFile.MaxKnownPerfMapVersion)
+ {
+ Tracer.Warning("Trying to get key for PerfMap {0} with version {1}, higher than max known version {2}.",
+ _file.FileName, header.Version, PerfMapFile.MaxKnownPerfMapVersion);
+ }
+ yield return PerfMapFileKeyGenerator.GetKey(_file.FileName, header.Signature, header.Version);
+ }
+
+ public override bool IsValid() => _perfmapFile.IsValid;
+
+ internal static SymbolStoreKey GetKey(string path, byte[] signature, uint version)
+ {
+ Debug.Assert(path != null);
+ Debug.Assert(signature != null);
+
+ string stringSignature = string.Concat(signature.Select(b => b.ToString("x2")));;
+ string idComponent = $"r2rmap-v{version}-{stringSignature}";
+ return BuildKey(path, idComponent, clrSpecialFile: false, pdbChecksums: null);
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Reflection.Metadata;
+using Microsoft.FileFormats.PE;
+
+namespace Microsoft.SymbolStore.KeyGenerators
+{
+ public class PortablePDBFileKeyGenerator : KeyGenerator
+ {
+ private readonly SymbolStoreFile _file;
+
+ public PortablePDBFileKeyGenerator(ITracer tracer, SymbolStoreFile file)
+ : base(tracer)
+ {
+ _file = file;
+ }
+
+ public override bool IsValid()
+ {
+ try
+ {
+ _file.Stream.Position = 0;
+ using (MetadataReaderProvider provider = MetadataReaderProvider.FromPortablePdbStream(_file.Stream, MetadataStreamOptions.LeaveOpen))
+ {
+ MetadataReader reader = provider.GetMetadataReader();
+ return true;
+ }
+ }
+ catch (BadImageFormatException)
+ {
+ }
+ return false;
+ }
+
+ public override IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags)
+ {
+ if ((flags & KeyTypeFlags.IdentityKey) != 0)
+ {
+ SymbolStoreKey key = null;
+
+ try
+ {
+ _file.Stream.Position = 0;
+ using (MetadataReaderProvider provider = MetadataReaderProvider.FromPortablePdbStream(_file.Stream, MetadataStreamOptions.LeaveOpen))
+ {
+ MetadataReader reader = provider.GetMetadataReader();
+ BlobContentId blob = new(reader.DebugMetadataHeader.Id);
+ if ((flags & KeyTypeFlags.ForceWindowsPdbs) == 0)
+ {
+ key = GetKey(_file.FileName, blob.Guid);
+ }
+ else
+ {
+ // Force the Windows PDB index
+ key = PDBFileKeyGenerator.GetKey(_file.FileName, blob.Guid, 1);
+ }
+ }
+ }
+ catch (BadImageFormatException ex)
+ {
+ Tracer.Warning("PortablePDBFileKeyGenerator {0}", ex.Message);
+ }
+
+ if (key != null)
+ {
+ yield return key;
+ }
+ }
+ }
+
+ /// <summary>
+ /// Create a symbol store key for a Portable PDB.
+ /// </summary>
+ /// <param name="path">file name and path</param>
+ /// <param name="pdbId">pdb guid</param>
+ /// <returns>symbol store key</returns>
+ public static SymbolStoreKey GetKey(string path, Guid pdbId, IEnumerable<PdbChecksum> pdbChecksums = null)
+ {
+ Debug.Assert(path != null);
+ Debug.Assert(pdbId != null);
+ return BuildKey(path, pdbId.ToString("N") + "FFFFFFFF", clrSpecialFile: false, pdbChecksums);
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Security.Cryptography;
+
+namespace Microsoft.SymbolStore.KeyGenerators
+{
+ public class SourceFileKeyGenerator : KeyGenerator
+ {
+ private readonly SymbolStoreFile _file;
+
+ public SourceFileKeyGenerator(ITracer tracer, SymbolStoreFile file)
+ : base(tracer)
+ {
+ _file = file;
+ }
+
+ public override bool IsValid()
+ {
+ return true;
+ }
+
+ public override IEnumerable<SymbolStoreKey> GetKeys(KeyTypeFlags flags)
+ {
+ if ((flags & KeyTypeFlags.IdentityKey) != 0)
+ {
+#pragma warning disable CA5350 // Do Not Use Weak Cryptographic Algorithms
+ byte[] hash = SHA1.Create().ComputeHash(_file.Stream);
+#pragma warning restore CA5350 // Do Not Use Weak Cryptographic Algorithms
+ yield return GetKey(_file.FileName, hash);
+ }
+ }
+
+ /// <summary>
+ /// Create a symbol store key for a source file
+ /// </summary>
+ /// <param name="path">file name and path</param>
+ /// <param name="hash">sha1 hash of the source file</param>
+ /// <returns>symbol store key</returns>
+ public static SymbolStoreKey GetKey(string path, byte[] hash)
+ {
+ Debug.Assert(path != null);
+ Debug.Assert(hash != null);
+ return BuildKey(path, "sha1", hash);
+ }
+ }
+}
--- /dev/null
+<Project Sdk="Microsoft.NET.Sdk">
+ <PropertyGroup>
+ <TargetFrameworks>net462;netstandard2.0</TargetFrameworks>
+ <NoWarn>;1591;1701</NoWarn>
+ <IsPackable>true</IsPackable>
+ <Description>Symbol server key generation and access protocol</Description>
+ <PackageReleaseNotes>$(Description)</PackageReleaseNotes>
+ <PackageTags>Symbol Indexing</PackageTags>
+ <IncludeSymbols>true</IncludeSymbols>
+ <IsShippingAssembly>true</IsShippingAssembly>
+ <!-- Preserve dotnet/symstore versioning scheme. -->
+ <PreReleaseVersionLabel />
+ <VersionPrefix>1.0.0</VersionPrefix>
+ </PropertyGroup>
+
+ <ItemGroup>
+ <PackageReference Include="System.Reflection.Metadata" Version="$(SystemReflectionMetadataVersion)" />
+<!--
+ <PackageReference Condition="'$(TargetFramework)' != 'net462'" Include="System.Reflection.Metadata" Version="$(SystemReflectionMetadataVersion)" />
+ <PackageReference Condition="'$(TargetFramework)' == 'net462'" Include="System.Reflection.Metadata" Version="1.6.0" />
+-->
+ </ItemGroup>
+
+ <ItemGroup>
+ <ProjectReference Include="$(MSBuildThisFileDirectory)..\Microsoft.FileFormats\Microsoft.FileFormats.csproj" />
+ </ItemGroup>
+
+ <ItemGroup Condition="'$(TargetFramework)' == 'net462'">
+ <Reference Include="System.Net.Http" />
+ </ItemGroup>
+</Project>
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Diagnostics;
+using System.IO;
+
+namespace Microsoft.SymbolStore
+{
+ /// <summary>
+ /// Symbol store file.
+ ///
+ /// Key generation: input file stream and file name/path.
+ /// Symbol store: output file stream and the file name/path it came.
+ /// </summary>
+ public sealed class SymbolStoreFile : IDisposable
+ {
+ /// <summary>
+ /// The input file stream to generate the key or the output file stream
+ /// for the symbol stores to write.
+ /// </summary>
+ public readonly Stream Stream;
+
+ /// <summary>
+ /// The name of the input file for key generation or the name of where
+ /// the output file came for symbol stores i.e. cached file name, file.ptr
+ /// UNC path or http request URL.
+ /// </summary>
+ public readonly string FileName;
+
+ /// <summary>
+ /// Create a symbol file instance
+ /// </summary>
+ /// <param name="stream">stream of the file contents</param>
+ /// <param name="fileName">name of the file</param>
+ public SymbolStoreFile(Stream stream, string fileName)
+ {
+ Debug.Assert(stream != null);
+ Debug.Assert(stream.CanSeek);
+ Debug.Assert(fileName != null);
+
+ Stream = stream;
+ FileName = fileName;
+ }
+
+ public void Dispose()
+ {
+ Stream.Dispose();
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.IO;
+using System.Linq;
+using Microsoft.FileFormats.PE;
+
+namespace Microsoft.SymbolStore
+{
+ /// <summary>
+ /// Symbol store key information
+ /// </summary>
+ public sealed class SymbolStoreKey
+ {
+ /// <summary>
+ /// Symbol server index
+ /// </summary>
+ public readonly string Index;
+
+ /// <summary>
+ /// Full path name
+ /// </summary>
+ public readonly string FullPathName;
+
+ /// <summary>
+ /// If true, this file is one of the clr special files like the DAC or SOS, but
+ /// the key is the normal identity key for this file.
+ /// </summary>
+ public readonly bool IsClrSpecialFile;
+
+ /// <summary>
+ /// Empty array of keys
+ /// </summary>
+ public static SymbolStoreKey[] EmptyArray = Array.Empty<SymbolStoreKey>();
+
+ /// <summary>
+ /// The checksums of the pdb file (if any)
+ /// </summary>
+ public readonly IEnumerable<PdbChecksum> PdbChecksums;
+
+ /// <summary>
+ /// Create key instance.
+ /// </summary>
+ /// <param name="index">index to lookup on symbol server</param>
+ /// <param name="fullPathName">the full path name of the file</param>
+ /// <param name="clrSpecialFile">if true, the file is one the clr special files</param>
+ /// <param name="pdbChecksums">if true, the file is one the clr special files</param>
+ public SymbolStoreKey(string index, string fullPathName, bool clrSpecialFile = false, IEnumerable<PdbChecksum> pdbChecksums = null)
+ {
+ Debug.Assert(index != null && fullPathName != null);
+ Index = index;
+ FullPathName = fullPathName;
+ IsClrSpecialFile = clrSpecialFile;
+ PdbChecksums = pdbChecksums ?? Enumerable.Empty<PdbChecksum>();
+ }
+
+ /// <summary>
+ /// Returns the first two parts of the index tuple. Allows a different file name
+ /// to be appended to this symbol key. Includes the trailing "/".
+ /// </summary>
+ public string IndexPrefix
+ {
+ get { return Index.Substring(0, Index.LastIndexOf("/") + 1); }
+ }
+
+ /// <summary>
+ /// Returns the hash of the index.
+ /// </summary>
+ public override int GetHashCode()
+ {
+ return Index.GetHashCode();
+ }
+
+ /// <summary>
+ /// Only the index is compared or hashed. The FileName is already
+ /// part of the index.
+ /// </summary>
+ public override bool Equals(object obj)
+ {
+ SymbolStoreKey right = (SymbolStoreKey)obj;
+ return string.Equals(Index, right.Index);
+ }
+
+ private static HashSet<char> s_invalidChars = new(Path.GetInvalidFileNameChars());
+
+ /// <summary>
+ /// Validates a symbol index.
+ ///
+ /// SSQP theoretically supports a broader set of keys, but in order to ensure that all the keys
+ /// play well with the caching scheme we enforce additional requirements (that all current key
+ /// conventions also meet).
+ /// </summary>
+ /// <param name="index">symbol key index</param>
+ /// <returns>true if valid</returns>
+ public static bool IsKeyValid(string index)
+ {
+ string[] parts = index.Split(new char[] { '/' }, StringSplitOptions.RemoveEmptyEntries);
+ if (parts.Length != 3) {
+ return false;
+ }
+ for (int i = 0; i < 3; i++)
+ {
+ foreach (char c in parts[i])
+ {
+ if (char.IsLetterOrDigit(c)) {
+ continue;
+ }
+ if (!s_invalidChars.Contains(c)) {
+ continue;
+ }
+ return false;
+ }
+ // We need to support files with . in the name, but we don't want identifiers that
+ // are meaningful to the filesystem
+ if (parts[i] == "." || parts[i] == "..") {
+ return false;
+ }
+ }
+ return true;
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace Microsoft.SymbolStore.SymbolStores
+{
+ public sealed class CacheSymbolStore : SymbolStore
+ {
+ public string CacheDirectory { get; }
+
+ public CacheSymbolStore(ITracer tracer, SymbolStore backingStore, string cacheDirectory)
+ : base(tracer, backingStore)
+ {
+ CacheDirectory = cacheDirectory ?? throw new ArgumentNullException(nameof(cacheDirectory));
+ }
+
+ protected override Task<SymbolStoreFile> GetFileInner(SymbolStoreKey key, CancellationToken token)
+ {
+ SymbolStoreFile result = null;
+ string cacheFile = GetCacheFilePath(key);
+ if (File.Exists(cacheFile))
+ {
+ Stream fileStream = File.OpenRead(cacheFile);
+ result = new SymbolStoreFile(fileStream, cacheFile);
+ }
+ return Task.FromResult(result);
+ }
+
+ protected override async Task WriteFileInner(SymbolStoreKey key, SymbolStoreFile file)
+ {
+ string cacheFile = GetCacheFilePath(key);
+ if (cacheFile != null && !File.Exists(cacheFile))
+ {
+ try
+ {
+ Directory.CreateDirectory(Path.GetDirectoryName(cacheFile));
+ using (Stream destinationStream = File.OpenWrite(cacheFile))
+ {
+ await file.Stream.CopyToAsync(destinationStream).ConfigureAwait(false);
+ Tracer.Verbose("Cached: {0}", cacheFile);
+ }
+ }
+ catch (Exception ex) when (ex is ArgumentException || ex is UnauthorizedAccessException || ex is IOException)
+ {
+ }
+ }
+ }
+
+ private string GetCacheFilePath(SymbolStoreKey key)
+ {
+ if (SymbolStoreKey.IsKeyValid(key.Index)) {
+ return Path.Combine(CacheDirectory, key.Index);
+ }
+ Tracer.Error("CacheSymbolStore: invalid key index {0}", key.Index);
+ return null;
+ }
+
+ public override bool Equals(object obj)
+ {
+ if (obj is CacheSymbolStore store)
+ {
+ return IsPathEqual(CacheDirectory, store.CacheDirectory);
+ }
+ return false;
+ }
+
+ public override int GetHashCode()
+ {
+ return HashPath(CacheDirectory);
+ }
+
+ public override string ToString()
+ {
+ return $"Cache: {CacheDirectory}";
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.SymbolStore;
+using Microsoft.SymbolStore.KeyGenerators;
+using Microsoft.SymbolStore.SymbolStores;
+
+namespace SOS
+{
+ /// <summary>
+ /// Basic http symbol store. The request can be authentication with a PAT for VSTS symbol stores.
+ /// </summary>
+ public class DirectorySymbolStore : SymbolStore
+ {
+ /// <summary>
+ /// Directory to search symbols
+ /// </summary>
+ public string Directory { get; }
+
+ /// <summary>
+ /// Create an instance of a directory symbol store
+ /// </summary>
+ /// <param name="backingStore">next symbol store or null</param>
+ /// <param name="directory">symbol search path</param>
+ public DirectorySymbolStore(ITracer tracer, SymbolStore backingStore, string directory)
+ : base(tracer, backingStore)
+ {
+ Directory = directory ?? throw new ArgumentNullException(nameof(directory));
+ }
+
+ protected override Task<SymbolStoreFile> GetFileInner(SymbolStoreKey key, CancellationToken token)
+ {
+ SymbolStoreFile result = null;
+
+ if (SymbolStoreKey.IsKeyValid(key.Index))
+ {
+ string filePath = Path.Combine(Directory, Path.GetFileName(key.FullPathName));
+ if (File.Exists(filePath))
+ {
+ try
+ {
+ Stream fileStream = File.OpenRead(filePath);
+ SymbolStoreFile file = new(fileStream, filePath);
+ FileKeyGenerator generator = new(Tracer, file);
+
+ foreach (SymbolStoreKey targetKey in generator.GetKeys(KeyTypeFlags.IdentityKey))
+ {
+ if (key.Equals(targetKey))
+ {
+ result = file;
+ break;
+ }
+ }
+ }
+ catch (Exception ex) when (ex is UnauthorizedAccessException || ex is IOException)
+ {
+ }
+ }
+ }
+ else
+ {
+ Tracer.Error("DirectorySymbolStore: invalid key index {0}", key.Index);
+ }
+
+ return Task.FromResult(result);
+ }
+
+ public override bool Equals(object obj)
+ {
+ if (obj is DirectorySymbolStore store)
+ {
+ return IsPathEqual(Directory, store.Directory);
+ }
+ return false;
+ }
+
+ public override int GetHashCode()
+ {
+ return HashPath(Directory);
+ }
+
+ public override string ToString()
+ {
+ return $"Directory: {Directory}";
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Net;
+using System.Net.Http;
+using System.Net.Http.Headers;
+using System.Net.Sockets;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace Microsoft.SymbolStore.SymbolStores
+{
+ /// <summary>
+ /// Basic http symbol store. The request can be authentication with a PAT for VSTS symbol stores.
+ /// </summary>
+ public class HttpSymbolStore : SymbolStore
+ {
+ private readonly HttpClient _client;
+ private readonly HttpClient _authenticatedClient;
+ private bool _clientFailure;
+
+ /// <summary>
+ /// For example, https://dotnet.myget.org/F/dev-feed/symbols.
+ /// </summary>
+ public Uri Uri { get; }
+
+ /// <summary>
+ /// Get or set the request timeout. Default 4 minutes.
+ /// </summary>
+ public TimeSpan Timeout
+ {
+ get
+ {
+ return _client.Timeout;
+ }
+ set
+ {
+ _client.Timeout = value;
+ if (_authenticatedClient != null)
+ {
+ _authenticatedClient.Timeout = value;
+ }
+ }
+ }
+
+ /// <summary>
+ /// The number of retries to do on a retryable status or socket error
+ /// </summary>
+ public int RetryCount { get; set; }
+
+ /// <summary>
+ /// Setups the underlying fields for HttpSymbolStore
+ /// </summary>
+ /// <param name="tracer">logger</param>
+ /// <param name="backingStore">next symbol store or null</param>
+ /// <param name="symbolServerUri">symbol server url</param>
+ /// <param name="hasPAT">flag to indicate to create an authenticatedClient if there is a PAT</param>
+ private HttpSymbolStore(ITracer tracer, SymbolStore backingStore, Uri symbolServerUri, bool hasPAT)
+ : base(tracer, backingStore)
+ {
+ Uri = symbolServerUri ?? throw new ArgumentNullException(nameof(symbolServerUri));
+ if (!symbolServerUri.IsAbsoluteUri || symbolServerUri.IsFile)
+ {
+ throw new ArgumentException(nameof(symbolServerUri));
+ }
+
+ // Normal unauthenticated client
+ _client = new HttpClient
+ {
+ Timeout = TimeSpan.FromMinutes(4)
+ };
+
+ if (hasPAT)
+ {
+ HttpClientHandler handler = new()
+ {
+ AllowAutoRedirect = false
+ };
+ HttpClient client = new(handler)
+ {
+ Timeout = TimeSpan.FromMinutes(4)
+ };
+ client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
+ // Authorization is set in associated constructors.
+ _authenticatedClient = client;
+ }
+ }
+
+ /// <summary>
+ /// Create an instance of a http symbol store
+ /// </summary>
+ /// <param name="tracer">logger</param>
+ /// <param name="backingStore">next symbol store or null</param>
+ /// <param name="symbolServerUri">symbol server url</param>
+ /// <param name="personalAccessToken">optional Basic Auth PAT or null if no authentication</param>
+ public HttpSymbolStore(ITracer tracer, SymbolStore backingStore, Uri symbolServerUri, string personalAccessToken = null)
+ : this(tracer, backingStore, symbolServerUri, !string.IsNullOrEmpty(personalAccessToken))
+ {
+ // If PAT, create authenticated client with Basic Auth
+ if (!string.IsNullOrEmpty(personalAccessToken))
+ {
+ _authenticatedClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", Convert.ToBase64String(ASCIIEncoding.ASCII.GetBytes(string.Format("{0}:{1}", "", personalAccessToken))));
+ }
+ }
+
+ /// <summary>
+ /// Create an instance of a http symbol store with an authenticated client
+ /// </summary>
+ /// <param name="tracer">logger</param>
+ /// <param name="backingStore">next symbol store or null</param>
+ /// <param name="symbolServerUri">symbol server url</param>
+ /// <param name="scheme">The scheme information to use for the AuthenticationHeaderValue</param>
+ /// <param name="parameter">The parameter information to use for the AuthenticationHeaderValue</param>
+ public HttpSymbolStore(ITracer tracer, SymbolStore backingStore, Uri symbolServerUri, string scheme, string parameter)
+ : this(tracer, backingStore, symbolServerUri, true)
+ {
+ if (string.IsNullOrEmpty(scheme))
+ {
+ throw new ArgumentNullException(nameof(scheme));
+ }
+
+ if (string.IsNullOrEmpty(parameter))
+ {
+ throw new ArgumentNullException(nameof(parameter));
+ }
+
+ // Create authenticated header with given SymbolAuthHeader
+ _authenticatedClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(scheme, parameter);
+ // Force redirect logins to fail.
+ _authenticatedClient.DefaultRequestHeaders.Add("X-TFS-FedAuthRedirect", "Suppress");
+ }
+
+ /// <summary>
+ /// Resets the sticky client failure flag. This client instance will now
+ /// attempt to download again instead of automatically failing.
+ /// </summary>
+ public void ResetClientFailure()
+ {
+ _clientFailure = false;
+ }
+
+ protected override async Task<SymbolStoreFile> GetFileInner(SymbolStoreKey key, CancellationToken token)
+ {
+ Uri uri = GetRequestUri(key.Index);
+
+ bool needsChecksumMatch = key.PdbChecksums.Any();
+
+ if (needsChecksumMatch)
+ {
+ string checksumHeader = string.Join(";", key.PdbChecksums);
+ HttpClient client = _authenticatedClient ?? _client;
+ Tracer.Information($"SymbolChecksum: {checksumHeader}");
+ client.DefaultRequestHeaders.Add("SymbolChecksum", checksumHeader);
+ }
+
+ Stream stream = await GetFileStream(key.FullPathName, uri, token).ConfigureAwait(false);
+ if (stream != null)
+ {
+ if (needsChecksumMatch)
+ {
+ ChecksumValidator.Validate(Tracer, stream, key.PdbChecksums);
+ }
+ return new SymbolStoreFile(stream, uri.ToString());
+ }
+ return null;
+ }
+
+ protected Uri GetRequestUri(string index)
+ {
+ // Escape everything except the forward slashes (/) in the index
+ index = string.Join("/", index.Split('/').Select(part => Uri.EscapeDataString(part)));
+ if (!Uri.TryCreate(Uri, index, out Uri requestUri))
+ {
+ throw new ArgumentException(message: null, paramName: nameof(index));
+ }
+ if (requestUri.IsFile)
+ {
+ throw new ArgumentException(message: null, paramName: nameof(index));
+ }
+ return requestUri;
+ }
+
+ protected async Task<Stream> GetFileStream(string path, Uri requestUri, CancellationToken token)
+ {
+ // Just return if previous failure
+ if (_clientFailure)
+ {
+ return null;
+ }
+ string fileName = Path.GetFileName(path);
+ HttpClient client = _authenticatedClient ?? _client;
+ int retries = 0;
+ while (true)
+ {
+ bool retryable;
+ string message;
+ try
+ {
+ // Can not dispose the response (like via using) on success because then the content stream
+ // is disposed and it is returned by this function.
+ HttpResponseMessage response = await client.GetAsync(requestUri, token).ConfigureAwait(false);
+ if (response.StatusCode == HttpStatusCode.OK)
+ {
+ return await response.Content.ReadAsStreamAsync().ConfigureAwait(false);
+ }
+ if (response.StatusCode == HttpStatusCode.Found)
+ {
+ Uri location = response.Headers.Location;
+ response.Dispose();
+
+ response = await _client.GetAsync(location, token).ConfigureAwait(false);
+ if (response.StatusCode == HttpStatusCode.OK)
+ {
+ return await response.Content.ReadAsStreamAsync().ConfigureAwait(false);
+ }
+ }
+ HttpStatusCode statusCode = response.StatusCode;
+ string reasonPhrase = response.ReasonPhrase;
+ response.Dispose();
+
+ // The GET failed
+
+ if (statusCode == HttpStatusCode.NotFound)
+ {
+ Tracer.Error("Not Found: {0} - '{1}'", fileName, requestUri.AbsoluteUri);
+ break;
+ }
+
+ retryable = IsRetryableStatus(statusCode);
+
+ // Build the status code error message
+ message = string.Format("{0} {1}: {2} - '{3}'", (int)statusCode, reasonPhrase, fileName, requestUri.AbsoluteUri);
+ }
+ catch (HttpRequestException ex)
+ {
+ SocketError socketError = SocketError.Success;
+ retryable = false;
+
+ Exception innerException = ex.InnerException;
+ while (innerException != null)
+ {
+ if (innerException is SocketException se)
+ {
+ socketError = se.SocketErrorCode;
+ retryable = IsRetryableSocketError(socketError);
+ break;
+ }
+
+ innerException = innerException.InnerException;
+ }
+
+ // Build the socket error message
+ message = string.Format($"HttpSymbolStore: {fileName} retryable {retryable} socketError {socketError} '{requestUri.AbsoluteUri}' {ex}");
+ }
+
+ // If the status code or socket error isn't some temporary or retryable condition, mark failure
+ if (!retryable)
+ {
+ MarkClientFailure();
+ Tracer.Error(message);
+ break;
+ }
+ else
+ {
+ Tracer.Warning(message);
+ }
+
+ // Retry the operation?
+ if (retries++ >= RetryCount)
+ {
+ break;
+ }
+
+ Tracer.Information($"HttpSymbolStore: retry #{retries}");
+
+ // Delay for a while before doing the retry
+ await Task.Delay(TimeSpan.FromMilliseconds((Math.Pow(2, retries) * 100) + new Random().Next(200)), token).ConfigureAwait(false);
+ }
+ return null;
+ }
+
+ public override void Dispose()
+ {
+ _client.Dispose();
+ _authenticatedClient?.Dispose();
+ base.Dispose();
+ }
+
+ private HashSet<HttpStatusCode> s_retryableStatusCodes = new()
+ {
+ HttpStatusCode.RequestTimeout,
+ HttpStatusCode.InternalServerError,
+ HttpStatusCode.BadGateway,
+ HttpStatusCode.ServiceUnavailable,
+ HttpStatusCode.GatewayTimeout,
+ };
+
+ /// <summary>
+ /// Returns true if the http status code is temporary or retryable condition.
+ /// </summary>
+ protected bool IsRetryableStatus(HttpStatusCode status) => s_retryableStatusCodes.Contains(status);
+
+ private HashSet<SocketError> s_retryableSocketErrors = new()
+ {
+ SocketError.ConnectionReset,
+ SocketError.ConnectionAborted,
+ SocketError.Shutdown,
+ SocketError.TimedOut,
+ SocketError.TryAgain,
+ };
+
+ protected bool IsRetryableSocketError(SocketError se) => s_retryableSocketErrors.Contains(se);
+
+ /// <summary>
+ /// Marks this client as a failure where any subsequent calls to
+ /// GetFileStream() will return null.
+ /// </summary>
+ protected void MarkClientFailure()
+ {
+ _clientFailure = true;
+ }
+
+ public override bool Equals(object obj)
+ {
+ if (obj is HttpSymbolStore store)
+ {
+ return Uri.Equals(store.Uri);
+ }
+ return false;
+ }
+
+ public override int GetHashCode()
+ {
+ return Uri.GetHashCode();
+ }
+
+ public override string ToString()
+ {
+ return $"Server: {Uri}";
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Runtime.InteropServices;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace Microsoft.SymbolStore.SymbolStores
+{
+ public abstract class SymbolStore : IDisposable
+ {
+ /// <summary>
+ /// Next symbol store to chain if this store refuses the request
+ /// </summary>
+ public SymbolStore BackingStore { get; }
+
+ /// <summary>
+ /// Trace/logging source
+ /// </summary>
+ protected readonly ITracer Tracer;
+
+ public SymbolStore(ITracer tracer)
+ {
+ Tracer = tracer;
+ }
+
+ public SymbolStore(ITracer tracer, SymbolStore backingStore)
+ : this(tracer)
+ {
+ BackingStore = backingStore;
+ }
+
+ /// <summary>
+ /// Downloads the file or retrieves it from a cache from the symbol store chain.
+ /// </summary>
+ /// <param name="key">symbol index to retrieve</param>
+ /// <param name="token">to cancel requests</param>
+ /// <exception cref="InvalidChecksumException">
+ /// Thrown for a pdb file when its checksum
+ /// does not match the expected value.
+ /// </exception>
+ /// <returns>file or null if not found</returns>
+ public async Task<SymbolStoreFile> GetFile(SymbolStoreKey key, CancellationToken token)
+ {
+ SymbolStoreFile file = await GetFileInner(key, token).ConfigureAwait(false);
+ if (file == null)
+ {
+ if (BackingStore != null)
+ {
+ file = await BackingStore.GetFile(key, token).ConfigureAwait(false);
+ if (file != null)
+ {
+ await WriteFileInner(key, file).ConfigureAwait(false);
+ }
+ }
+ }
+ if (file != null)
+ {
+ // Reset stream to the beginning because the stream may have
+ // been read or written by the symbol store implementation.
+ file.Stream.Position = 0;
+ }
+ return file;
+ }
+
+ protected virtual Task<SymbolStoreFile> GetFileInner(SymbolStoreKey key, CancellationToken token)
+ {
+ return Task.FromResult<SymbolStoreFile>(null);
+ }
+
+ protected virtual Task WriteFileInner(SymbolStoreKey key, SymbolStoreFile file)
+ {
+ return Task.FromResult(0);
+ }
+
+ public virtual void Dispose()
+ {
+ BackingStore?.Dispose();
+ }
+
+ /// <summary>
+ /// Compares two file paths using OS specific casing.
+ /// </summary>
+ internal static bool IsPathEqual(string path1, string path2)
+ {
+#if !NET462
+ if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
+ {
+ return string.Equals(path1, path2);
+ }
+#endif
+ return StringComparer.OrdinalIgnoreCase.Equals(path1, path2);
+ }
+
+ internal static int HashPath(string path)
+ {
+#if !NET462
+ if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
+ {
+ return path.GetHashCode();
+ }
+#endif
+ return StringComparer.OrdinalIgnoreCase.GetHashCode(path);
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace Microsoft.SymbolStore.SymbolStores
+{
+ /// <summary>
+ /// The symbol store for the internal symweb symbol server that handles the "file.ptr" support.
+ /// </summary>
+ [Obsolete]
+ public sealed class SymwebHttpSymbolStore : HttpSymbolStore
+ {
+ /// <summary>
+ /// Create an instance of a http symbol store
+ /// </summary>
+ /// <param name="tracer">trace source for logging</param>
+ /// <param name="backingStore">next symbol store or null</param>
+ /// <param name="symbolServerUri">symbol server url</param>
+ /// <param name="personalAccessToken">PAT or null if no authentication</param>
+ public SymwebHttpSymbolStore(ITracer tracer, SymbolStore backingStore, Uri symbolServerUri, string personalAccessToken = null)
+ : base(tracer, backingStore, symbolServerUri, personalAccessToken)
+ {
+ }
+
+ protected override async Task<SymbolStoreFile> GetFileInner(SymbolStoreKey key, CancellationToken token)
+ {
+ SymbolStoreFile file = await base.GetFileInner(key, token).ConfigureAwait(false);
+ if (file != null)
+ {
+ return file;
+ }
+ Uri filePtrUri = GetRequestUri(key.IndexPrefix + "file.ptr");
+ Stream filePtrStream = await GetFileStream(key.FullPathName, filePtrUri, token).ConfigureAwait(false);
+ if (filePtrStream != null)
+ {
+ using (filePtrStream)
+ {
+ try
+ {
+ using (TextReader reader = new StreamReader(filePtrStream))
+ {
+ string filePtr = await reader.ReadToEndAsync().ConfigureAwait(false);
+ Tracer.Verbose("SymwebHttpSymbolStore: file.ptr '{0}'", filePtr);
+ if (filePtr.StartsWith("PATH:"))
+ {
+ filePtr = filePtr.Replace("PATH:", "");
+ Stream stream = File.OpenRead(filePtr);
+ return new SymbolStoreFile(stream, filePtr);
+ }
+ }
+ }
+ catch (Exception ex) when (ex is InvalidOperationException || ex is IOException)
+ {
+ Tracer.Error("SymwebHttpSymbolStore: {0}", ex.Message);
+ MarkClientFailure();
+ }
+ }
+ }
+ return null;
+ }
+ }
+}
set(DIASYMREADER_ARCH amd64)
endif()
- install(FILES ${NUGET_PACKAGES}/microsoft.diasymreader.native/16.11.27-beta1.23180.1/runtimes/win/native/Microsoft.DiaSymReader.Native.${DIASYMREADER_ARCH}.dll DESTINATION . )
+ install(FILES ${NUGET_PACKAGES}/microsoft.diasymreader.native/17.10.0-beta1.24272.1/runtimes/win/native/Microsoft.DiaSymReader.Native.${DIASYMREADER_ARCH}.dll DESTINATION . )
endif()
if(NOT ${CLR_MANAGED_BINARY_DIR} STREQUAL "")
</PropertyGroup>
<ItemGroup>
- <PackageReference Include="Microsoft.SymbolStore" Version="$(MicrosoftSymbolStoreVersion)" />
<PackageReference Include="Microsoft.Diagnostics.Runtime" Version="$(MicrosoftDiagnosticsRuntimeVersion)" />
- <PackageReference Include="System.Memory" Version="$(SystemMemoryVersion)" />
<PackageReference Include="Microsoft.DiaSymReader.Native" Version="$(MicrosoftDiaSymReaderNativeVersion)" Condition="'$(OS)' == 'Windows_NT'" />
+ <PackageReference Include="System.Memory" Version="$(SystemMemoryVersion)" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="$(MSBuildThisFileDirectory)..\..\Microsoft.Diagnostics.DebugServices\Microsoft.Diagnostics.DebugServices.csproj" />
<ProjectReference Include="$(MSBuildThisFileDirectory)..\..\Microsoft.Diagnostics.DebugServices.Implementation\Microsoft.Diagnostics.DebugServices.Implementation.csproj" />
<ProjectReference Include="$(MSBuildThisFileDirectory)..\..\Microsoft.Diagnostics.ExtensionCommands\Microsoft.Diagnostics.ExtensionCommands.csproj" />
+ <ProjectReference Include="$(MSBuildThisFileDirectory)..\..\Microsoft.SymbolStore\Microsoft.SymbolStore.csproj" />
<ProjectReference Include="$(MSBuildThisFileDirectory)..\SOS.Hosting\SOS.Hosting.csproj" />
</ItemGroup>
</Project>
<ItemGroup>
<PackageReference Include="Microsoft.Diagnostics.Runtime" Version="$(MicrosoftDiagnosticsRuntimeVersion)" />
- <PackageReference Include="Microsoft.SymbolStore" Version="$(MicrosoftSymbolStoreVersion)" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="$(MSBuildThisFileDirectory)..\SOS.InstallHelper\SOS.InstallHelper.csproj" />
<ProjectReference Include="$(MSBuildThisFileDirectory)..\..\Microsoft.Diagnostics.DebugServices\Microsoft.Diagnostics.DebugServices.csproj" />
+ <ProjectReference Include="$(MSBuildThisFileDirectory)..\..\Microsoft.SymbolStore\Microsoft.SymbolStore.csproj" />
</ItemGroup>
</Project>
#region Symbol service delegates
- [UnmanagedFunctionPointer(CallingConvention.Winapi)]
- private delegate bool IsSymbolStoreEnabledDelegate(
- [In] IntPtr self);
-
- [UnmanagedFunctionPointer(CallingConvention.Winapi)]
- private delegate bool InitializeSymbolStoreDelegate(
- [In] IntPtr self,
- [In] bool msdl,
- [In] bool symweb,
- [In] string symbolServerPath,
- [In] string authToken,
- [In] int timeoutInMinutes,
- [In] string symbolCachePath,
- [In] string symbolDirectoryPath);
-
[UnmanagedFunctionPointer(CallingConvention.Winapi)]
private delegate bool ParseSymbolPathDelegate(
[In] IntPtr self,
[In] string windowsSymbolPath);
- [UnmanagedFunctionPointer(CallingConvention.Winapi)]
- private delegate void DisplaySymbolStoreDelegate(
- [In] IntPtr self,
- [In] WriteLine writeLine);
-
- [UnmanagedFunctionPointer(CallingConvention.Winapi)]
- private delegate void DisableSymbolStoreDelegate(
- [In] IntPtr self);
-
- [UnmanagedFunctionPointer(CallingConvention.Winapi)]
- private delegate void LoadNativeSymbolsDelegate(
- [In] IntPtr self,
- [In] SymbolFileCallback callback,
- [In] IntPtr parameter,
- [In] RuntimeConfiguration config,
- [In] string moduleFilePath,
- [In] ulong address,
- [In] uint size);
-
- [UnmanagedFunctionPointer(CallingConvention.Winapi)]
- private delegate void LoadNativeSymbolsFromIndexDelegate(
- [In] IntPtr self,
- [In] SymbolFileCallback callback,
- [In] IntPtr parameter,
- [In] RuntimeConfiguration config,
- [In] string moduleFilePath,
- [In] bool specialKeys,
- [In] int moduleIndexSize,
- [In] IntPtr moduleIndex);
-
[UnmanagedFunctionPointer(CallingConvention.Winapi)]
private delegate IntPtr LoadSymbolsForModuleDelegate(
[In] IntPtr self,
<LogDir>$(RootBinDir)\TestResults\$(TargetConfiguration)\sos.unittests_$(Timestamp)</LogDir>
<DumpDir>$(RootBinDir)\tmp\$(TargetConfiguration)\dumps</DumpDir>
<CDBHelperExtension>$(InstallDir)\runcommand.dll</CDBHelperExtension>
+ <MicrosoftNETCoreAppPath>$(DotNetRoot)\shared\Microsoft.NETCore.App\</MicrosoftNETCoreAppPath>
<SetHostExe>true</SetHostExe>
<SetFxVersion>true</SetFxVersion>
Debugger.Break(); // GCWhere should temp in Gen2
GC.Collect();
int genFourthTime = GC.GetGeneration(temp);
- Console.WriteLine("1st: {0} 2nd: {1}, 3rd: {2} 4th: {3}", genFirstTime, genSecondTime, genThirdTime, genFourthTime);
+ Console.WriteLine($"1st: {genFirstTime} 2nd: {genSecondTime}, 3rd: {genThirdTime} 4th: {genFourthTime}");
Debugger.Break(); // GCWhere should temp in Gen2
PrintIt(temp);
GC.KeepAlive(temp);
private static readonly string _nodePath = _root + @"Microsoft\Windows NT\CurrentVersion\";
private static readonly string _auxiliaryNode = _nodePath + "MiniDumpAuxiliaryDlls";
private static readonly string _knownNode = _nodePath + "KnownManagedDebuggingDlls";
+ private static readonly string _settingsNode = _nodePath + "MiniDumpSettings";
+ private static readonly string _disableCheckValue = "DisableAuxProviderSignatureCheck";
private HashSet<string> _paths;
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
- // Create a unique list of all the runtime paths used by the tests
+ // Create the key for the newer Windows (11 or greater)
+ try
+ {
+ using RegistryKey settingsKey = Registry.LocalMachine.CreateSubKey(_settingsNode, writable: true);
+ settingsKey.SetValue(_disableCheckValue, 1, RegistryValueKind.DWord);
+ }
+ catch (Exception ex) when (ex is UnauthorizedAccessException)
+ {
+ }
+
+ // Create a unique list of all the installed test runtime paths
HashSet<string> paths = new();
foreach (TestConfiguration config in TestRunConfiguration.Instance.Configurations)
{
- if (config.IsNETCore && config.RuntimeFrameworkVersionMajor >= 8)
+ // Enumerate configs until we see this property
+ if (config.AllSettings.TryGetValue("MicrosoftNETCoreAppPath", out string path))
{
- string path = config.RuntimeSymbolsPath;
if (!string.IsNullOrEmpty(path))
{
- paths.Add(path);
+ path = TestConfiguration.MakeCanonicalPath(path);
+ try
+ {
+ foreach (string directory in Directory.GetDirectories(path))
+ {
+ if (Path.GetFileName(directory).StartsWith("9"))
+ {
+ paths.Add(directory);
+ }
+ }
+ }
+ catch (Exception ex) when (ex is IOException or UnauthorizedAccessException)
+ {
+ }
+ break;
}
}
}
- try
+ if (paths.Count > 0)
{
- using RegistryKey auxiliaryKey = Registry.LocalMachine.CreateSubKey(_auxiliaryNode, writable: true);
- using RegistryKey knownKey = Registry.LocalMachine.CreateSubKey(_knownNode, writable: true);
+ // Now try to create the keys for the older Windows versions
+ try
+ {
+ using RegistryKey auxiliaryKey = Registry.LocalMachine.CreateSubKey(_auxiliaryNode, writable: true);
+ using RegistryKey knownKey = Registry.LocalMachine.CreateSubKey(_knownNode, writable: true);
- foreach (string path in paths)
+ foreach (string path in paths)
+ {
+ string dacPath = Path.Combine(path, "mscordaccore.dll");
+ string runtimePath = Path.Combine(path, "coreclr.dll");
+ knownKey.SetValue(dacPath, 0, RegistryValueKind.DWord);
+ auxiliaryKey.SetValue(runtimePath, dacPath, RegistryValueKind.String);
+ }
+
+ // Save the paths after writing them successfully to registry
+ _paths = paths;
+ }
+ catch (Exception ex) when (ex is UnauthorizedAccessException)
{
- string dacPath = Path.Combine(path, "mscordaccore.dll");
- string runtimePath = Path.Combine(path, "coreclr.dll");
- knownKey.SetValue(dacPath, 0, RegistryValueKind.DWord);
- auxiliaryKey.SetValue(runtimePath, dacPath, RegistryValueKind.String);
}
-
- // Save the paths after writing them successfully to registry
- _paths = paths;
- }
- catch (Exception ex) when (ex is UnauthorizedAccessException)
- {
}
}
}
ProcessRunner processRunner = new ProcessRunner(exePath, ReplaceVariables(variables, arguments.ToString())).
WithEnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0").
WithEnvironmentVariable("DOTNET_ROOT", config.DotNetRoot).
+ WithEnvironmentVariable("DOTNET_LegacyExceptionHandling", "1").
WithRuntimeConfiguration("DbgEnableElfDumpOnMacOS", "1").
WithLog(new TestRunner.TestLogger(outputHelper.IndentedOutput)).
WithTimeout(TimeSpan.FromMinutes(10));
ProcessRunner processRunner = new ProcessRunner(debuggerPath, ReplaceVariables(variables, arguments.ToString())).
WithEnvironmentVariable("DOTNET_MULTILEVEL_LOOKUP", "0").
WithEnvironmentVariable("DOTNET_ROOT", config.DotNetRoot).
+ WithEnvironmentVariable("DOTNET_LegacyExceptionHandling", "1").
WithLog(scriptLogger).
WithTimeout(TimeSpan.FromMinutes(10));
VERIFY:\s*Class Name:\s+SymbolTestApp.Program\s+
VERIFY:\s*File:\s+.*SymbolTestApp\.(dll|exe)\s+
+# Issue: https://github.com/dotnet/diagnostics/issues/4654
+!IFDEF:ALPINE
# Verify DumpMT
SOSCOMMAND:DumpMT <POUT>\s*Method Table:\s+(<HEXVAL>)\s+<POUT>
VERIFY:\s*Name:\s+SymbolTestApp.Program\s+
VERIFY:\s*File:\s+.*SymbolTestApp\.(dll|exe)\s+
+ENDIF:ALPINE
SOSCOMMAND:FinalizeQueue
VERIFY:\s*SyncBlocks to be cleaned up:\s+<DECVAL>\s+
{
ULONG cbCur = 0;
ULONG cb;
- ULONG ulData = NULL;
+ ULONG ulData = (TADDR)0;
ULONG ulArgs;
HRESULT hr = NOERROR;
return objPtr;
}
}
- return NULL;
+ return (TADDR)0;
}
{
DWORD_PTR runtimeTypeHandle = GetObj(tokenArray, RidFromToken(token));
- DWORD_PTR runtimeType = NULL;
+ DWORD_PTR runtimeType = (TADDR)0;
MOVE(runtimeType, runtimeTypeHandle + sizeof(DWORD_PTR));
int offset = GetObjFieldOffset(runtimeType, W("m_handle"));
- DWORD_PTR methodTable = NULL;
+ DWORD_PTR methodTable = (TADDR)0;
MOVE(methodTable, runtimeType + offset);
if (NameForMT_s(methodTable, g_mdName,mdNameLen))
CLRDATA_ADDRESS runtimeMethodHandle = GetObj(tokenArray, RidFromToken(token));
int offset = GetObjFieldOffset(runtimeMethodHandle, W("m_value"));
- TADDR runtimeMethodInfo = NULL;
+ TADDR runtimeMethodInfo = (TADDR)0;
MOVE(runtimeMethodInfo, runtimeMethodHandle+offset);
offset = GetObjFieldOffset(runtimeMethodInfo, W("m_handle"));
- TADDR methodDesc = NULL;
+ TADDR methodDesc = (TADDR)0;
MOVE(methodDesc, runtimeMethodInfo+offset);
NameForMD_s((DWORD_PTR)methodDesc, g_mdName, mdNameLen);
TADDR Object::GetMT() const
{
- if (mMT == NULL)
+ if (mMT == (TADDR)0)
{
TADDR temp;
if (FAILED(MOVE(temp, mAddress)))
sos::Throw<DataRead>("Object %s has an invalid method table.", DMLListNearObj(mAddress));
- if (temp == NULL)
+ if (temp == (TADDR)0)
sos::Throw<HeapCorruption>("Object %s has an invalid method table.", DMLListNearObj(mAddress));
mMT = temp & ~METHODTABLE_PTR_LOW_BITMASK;
TADDR Object::GetComponentMT() const
{
- if (mMT != NULL && mMT != sos::MethodTable::GetArrayMT())
- return NULL;
+ if (mMT != (TADDR)0 && mMT != sos::MethodTable::GetArrayMT())
+ return (TADDR)0;
DacpObjectData objData;
if (FAILED(objData.Request(g_sos, TO_CDADDR(mAddress))))
sos::Throw<DataRead>("Failed to request object data for %s.", DMLListNearObj(mAddress));
- if (mMT == NULL)
+ if (mMT == (TADDR)0)
mMT = TO_TADDR(objData.MethodTable) & ~METHODTABLE_PTR_LOW_BITMASK;
return TO_TADDR(objData.ElementTypeHandle);
if (FAILED(MOVE(dwTmp, dwTmp)))
return false;
- if (dwTmp != NULL)
+ if (dwTmp != (TADDR)0)
{
DacpObjectData objData;
if (FAILED(objData.Request(g_sos, TO_CDADDR(dwTmp))))
out.ThreadId = header & SBLK_MASK_LOCK_THREADID;
out.Recursion = (header & SBLK_MASK_LOCK_RECLEVEL) >> SBLK_RECLEVEL_SHIFT;
- CLRDATA_ADDRESS threadPtr = NULL;
+ CLRDATA_ADDRESS threadPtr = (TADDR)0;
if (g_sos->GetThreadFromThinlockID(out.ThreadId, &threadPtr) != S_OK)
{
- out.ThreadPtr = NULL;
+ out.ThreadPtr = (TADDR)0;
}
else
{
out.ThreadPtr = TO_TADDR(threadPtr);
}
- return out.ThreadId != 0 && out.ThreadPtr != NULL;
+ return out.ThreadId != 0 && out.ThreadPtr != (TADDR)0;
}
bool Object::GetStringData(__out_ecount(size) WCHAR *buffer, size_t size) const
sos::Throw<DataRead>("Failed to read object data at %p.", mAddress);
// We get the method table for free here, if we don't have it already.
- SOS_Assert((mMT == NULL) || (mMT == TO_TADDR(stInfo.methodTable)));
- if (mMT == NULL)
+ SOS_Assert((mMT == (TADDR)0) || (mMT == TO_TADDR(stInfo.methodTable)));
+ if (mMT == (TADDR)0)
mMT = TO_TADDR(stInfo.methodTable);
return (size_t)stInfo.m_StringLength;
INIT_API_PROBE_MANAGED("dumpmd");
MINIDUMP_NOT_SUPPORTED();
- DWORD_PTR dwStartAddr = NULL;
+ DWORD_PTR dwStartAddr = (TADDR)0;
BOOL dml = FALSE;
CMDOption option[] =
{
INIT_API_PROBE_MANAGED("dumpil");
MINIDUMP_NOT_SUPPORTED();
- DWORD_PTR dwStartAddr = NULL;
- DWORD_PTR dwDynamicMethodObj = NULL;
+ DWORD_PTR dwStartAddr = (TADDR)0;
+ DWORD_PTR dwDynamicMethodObj = (TADDR)0;
BOOL dml = FALSE;
BOOL fILPointerDirectlySpecified = FALSE;
}
EnableDMLHolder dmlHolder(dml);
- if (dwStartAddr == NULL)
+ if (dwStartAddr == (TADDR)0)
{
ExtOut("Must pass a valid expression\n");
return Status;
dwDynamicMethodObj = dwStartAddr;
}
- if (dwDynamicMethodObj == NULL)
+ if (dwDynamicMethodObj == (TADDR)0)
{
// We have been given a MethodDesc
DacpMethodDescData MethodDescData;
if (MethodDescData.bIsDynamic && MethodDescData.managedDynamicMethodObject)
{
dwDynamicMethodObj = TO_TADDR(MethodDescData.managedDynamicMethodObject);
- if (dwDynamicMethodObj == NULL)
+ if (dwDynamicMethodObj == (TADDR)0)
{
ExtOut("Unable to print IL for DynamicMethodDesc %p\n", SOS_PTR(dwDynamicMethodObj));
return Status;
else
{
GetILAddressResult result = GetILAddress(MethodDescData);
- if (std::get<0>(result) == NULL)
+ if (std::get<0>(result) == (TADDR)0)
{
ExtOut("ilAddr is %p\n", SOS_PTR(std::get<0>(result)));
return E_FAIL;
}
}
- if (dwDynamicMethodObj != NULL)
+ if (dwDynamicMethodObj != (TADDR)0)
{
// We have a DynamicMethod managed object, let us visit the town and paint.
DacpObjectData codeArray;
ExtOut("mdToken: %p\n", SOS_PTR(mtdata.cl));
ExtOut("File: %S\n", fileName);
- CLRDATA_ADDRESS ParentEEClass = NULL;
+ CLRDATA_ADDRESS ParentEEClass = (TADDR)0;
if (mtdata.ParentMethodTable)
{
DacpMethodTableData mtdataparent;
if (vMethodTableFields.wNumInstanceFields + vMethodTableFields.wNumStaticFields > 0)
{
- DisplayFields(methodTable, &mtdata, &vMethodTableFields, NULL, TRUE, FALSE);
+ DisplayFields(methodTable, &mtdata, &vMethodTableFields, (TADDR)0, TRUE, FALSE);
}
}
table.WriteRow("mdToken:", Pointer(vMethTable.cl));
table.WriteRow("File:", fileName[0] ? fileName : W("Unknown Module"));
- if (vMethTableCollectible.LoaderAllocatorObjectHandle != NULL)
+ if (vMethTableCollectible.LoaderAllocatorObjectHandle != (TADDR)0)
{
TADDR loaderAllocator;
if (SUCCEEDED(MOVE(loaderAllocator, vMethTableCollectible.LoaderAllocatorObjectHandle)))
table.WriteColumn(0, entry);
table.WriteColumn(1, MethodDescPtr(methodDesc));
- if (jitType == TYPE_UNKNOWN && methodDesc != NULL)
+ if (jitType == TYPE_UNKNOWN && methodDesc != (TADDR)0)
{
// We can get a more accurate jitType from NativeCodeAddr of the methoddesc,
// because the methodtable entry hasn't always been patched.
return Status;
}
- if (objData.RCW != NULL)
+ if (objData.RCW != (TADDR)0)
{
DMLOut("RCW: %s\n", DMLRCWrapper(objData.RCW));
}
- if (objData.CCW != NULL)
+ if (objData.CCW != (TADDR)0)
{
DMLOut("CCW: %s\n", DMLCCWrapper(objData.CCW));
}
ExtOut("Tracked Type: %s\n", isTrackedType ? "true" : "false");
if (hasTaggedMemory)
{
- CLRDATA_ADDRESS taggedMemory = NULL;
+ CLRDATA_ADDRESS taggedMemory = (TADDR)0;
size_t taggedMemorySizeInBytes = 0;
(void)sos11->GetTaggedMemory(objAddr, &taggedMemory, &taggedMemorySizeInBytes);
DMLOut("Tagged Memory: %s (%" POINTERSIZE_TYPE "d(0x%" POINTERSIZE_TYPE "x) bytes)\n",
{
TADDR tbSetPtr;
MOVE(tbSetPtr, p_PermSet + iOffset);
- if (tbSetPtr != NULL)
+ if (tbSetPtr != (TADDR)0)
{
DacpObjectData tbSetData;
if ((Status=tbSetData.Request(g_sos, TO_CDADDR(tbSetPtr))) != S_OK)
{
DWORD_PTR PermsArrayPtr;
MOVE(PermsArrayPtr, tbSetPtr + iOffset);
- if (PermsArrayPtr != NULL)
+ if (PermsArrayPtr != (TADDR)0)
{
// Print all the permissions in the array
DacpObjectData objData;
{
DWORD_PTR PermObjPtr;
MOVE(PermObjPtr, tbSetPtr + iOffset);
- if (PermObjPtr != NULL)
+ if (PermObjPtr != (TADDR)0)
{
// Print the permission object
return PrintObj(PermObjPtr);
}
TADDR elementAddress = TO_TADDR(objData.ArrayDataPtr + offset * objData.dwComponentSize);
- TADDR p_Element = NULL;
+ TADDR p_Element = (TADDR)0;
if (isElementValueType)
{
p_Element = elementAddress;
{
PrintVC(TO_TADDR(objData.ElementTypeHandle), elementAddress, !flags.bNoFieldsForElement);
}
- else if (p_Element != NULL)
+ else if (p_Element != (TADDR)0)
{
PrintObj(p_Element, !flags.bNoFieldsForElement);
}
int invocationCount;
MOVE(invocationCount, delegateObj.GetAddress() + offset);
- if (invocationList == NULL)
+ if (invocationList == (TADDR)0)
{
CLRDATA_ADDRESS md;
DMLOut("%s ", DMLObject(target));
{
CLRDATA_ADDRESS elementPtr;
MOVE(elementPtr, TO_CDADDR(objData.ArrayDataPtr + (i * objData.dwComponentSize)));
- if (elementPtr != NULL && sos::IsObject(elementPtr, false))
+ if (elementPtr != (TADDR)0 && sos::IsObject(elementPtr, false))
{
delegatesRemaining.push_back(elementPtr);
}
// We want to follow back until we get the mt for System.Exception
DacpMethodTableData dmtd;
CLRDATA_ADDRESS walkMT = mtObj;
- while(walkMT != NULL)
+ while(walkMT != (TADDR)0)
{
if (dmtd.Request(g_sos, walkMT) != S_OK)
{
}
walkMT = dmtd.ParentMethodTable;
}
- return NULL;
+ return (TADDR)0;
}
CLRDATA_ADDRESS isSecurityExceptionObj(CLRDATA_ADDRESS mtObj)
// We want to follow back until we get the mt for System.Exception
DacpMethodTableData dmtd;
CLRDATA_ADDRESS walkMT = mtObj;
- while(walkMT != NULL)
+ while(walkMT != (TADDR)0)
{
if (dmtd.Request(g_sos, walkMT) != S_OK)
{
}
walkMT = dmtd.ParentMethodTable;
}
- return NULL;
+ return (TADDR)0;
}
// Fill the passed in buffer with a text header for generated exception information.
ExtOut("%S", pwsz);
}
+DWORD_PTR GetFirstArrayElementPointer(TADDR taArray)
+{
+#ifdef _TARGET_WIN64_
+ return taArray + sizeof(DWORD_PTR) + sizeof(DWORD) + sizeof(DWORD);
+#else
+ return taArray + sizeof(DWORD_PTR) + sizeof(DWORD);
+#endif // _TARGET_WIN64_
+}
+
+TADDR GetStackTraceArray(CLRDATA_ADDRESS taExceptionObj, DacpObjectData *pExceptionObjData, DacpExceptionObjectData *pExcData)
+{
+ TADDR taStackTrace = 0;
+ if (pExcData)
+ {
+ taStackTrace = TO_TADDR(pExcData->StackTrace);
+ }
+ else
+ {
+ int iOffset = GetObjFieldOffset (taExceptionObj, pExceptionObjData->MethodTable, W("_stackTrace"));
+ if (iOffset > 0)
+ {
+ MOVE(taStackTrace, taExceptionObj + iOffset);
+ }
+ }
+
+ if (taStackTrace)
+ {
+ // If the stack trace is object[], the stack trace array is actually referenced by its first element
+ sos::Object objStackTrace(taStackTrace);
+ TADDR stackTraceComponentMT = objStackTrace.GetComponentMT();
+ if (stackTraceComponentMT == g_special_usefulGlobals.ObjectMethodTable)
+ {
+ DWORD_PTR arrayDataPtr = GetFirstArrayElementPointer(taStackTrace);
+ MOVE(taStackTrace, arrayDataPtr);
+ }
+ }
+
+ return taStackTrace;
+}
+
HRESULT FormatException(CLRDATA_ADDRESS taObj, BOOL bLineNumbers = FALSE)
{
HRESULT Status = S_OK;
// Make sure it is an exception object, and get the MT of Exception
CLRDATA_ADDRESS exceptionMT = isExceptionObj(objData.MethodTable);
- if (exceptionMT == NULL)
+ if (exceptionMT == (TADDR)0)
{
ExtOut("Not a valid exception object\n");
return Status;
: IsAsyncException(taObj, objData.MethodTable);
{
- TADDR taStackTrace = 0;
- if (bGotExcData)
- {
- taStackTrace = TO_TADDR(excData.StackTrace);
- }
- else
- {
- int iOffset = GetObjFieldOffset (taObj, objData.MethodTable, W("_stackTrace"));
- if (iOffset > 0)
- {
- MOVE(taStackTrace, taObj + iOffset);
- }
- }
+ TADDR taStackTrace = GetStackTraceArray(taObj, &objData, bGotExcData ? &excData : NULL);
ExtOut("StackTrace (generated):\n");
if (taStackTrace)
if (arrayLen != 0 && hr == S_OK)
{
- // This code is accessing the StackTraceInfo class in the runtime.
- // See: https://github.com/dotnet/runtime/blob/main/src/coreclr/vm/clrex.h
-#ifdef _TARGET_WIN64_
- DWORD_PTR dataPtr = taStackTrace + sizeof(DWORD_PTR) + sizeof(DWORD) + sizeof(DWORD);
-#else
- DWORD_PTR dataPtr = taStackTrace + sizeof(DWORD_PTR) + sizeof(DWORD);
-#endif // _TARGET_WIN64_
+ DWORD_PTR dataPtr = GetFirstArrayElementPointer(taStackTrace);
size_t stackTraceSize = 0;
MOVE (stackTraceSize, dataPtr);
ExtOut("HResult: %lx\n", hResult);
}
- if (isSecurityExceptionObj(objData.MethodTable) != NULL)
+ if (isSecurityExceptionObj(objData.MethodTable) != (TADDR)0)
{
// We have a SecurityException Object: print out the debugString if present
int iOffset = GetObjFieldOffset (taObj, objData.MethodTable, W("m_debugString"));
return E_INVALIDARG;
}
- CheckBreakingRuntimeChange();
+ if (CheckBreakingRuntimeChange())
+ {
+ return E_FAIL;
+ }
if (bLineNumbers)
{
}
EnableDMLHolder dmlHolder(dml);
- DWORD_PTR p_Object = NULL;
+ DWORD_PTR p_Object = (TADDR)0;
if (nArg == 0)
{
if (bCCW)
CLRDATA_ADDRESS threadAddr = GetCurrentManagedThread();
DacpThreadData Thread;
- if ((threadAddr == NULL) || (Thread.Request(g_sos, threadAddr) != S_OK))
+ if ((threadAddr == (TADDR)0) || (Thread.Request(g_sos, threadAddr) != S_OK))
{
ExtOut("The current thread is unmanaged\n");
return Status;
}
- DWORD_PTR dwAddr = NULL;
+ DWORD_PTR dwAddr = (TADDR)0;
if ((!SafeReadMemory(TO_TADDR(Thread.lastThrownObjectHandle),
&dwAddr,
- sizeof(dwAddr), NULL)) || (dwAddr==NULL))
+ sizeof(dwAddr), NULL)) || (dwAddr==(TADDR)0))
{
ExtOut("There is no current managed exception on this thread\n");
}
CLRDATA_ADDRESS threadAddr = GetCurrentManagedThread();
DacpThreadData Thread;
- if ((threadAddr == NULL) || (Thread.Request(g_sos, threadAddr) != S_OK))
+ if ((threadAddr == (TADDR)0) || (Thread.Request(g_sos, threadAddr) != S_OK))
{
ExtOut("The current thread is unmanaged\n");
return E_INVALIDARG;
currentNested = next;
}
- while(currentNested != NULL);
+ while(currentNested != (TADDR)0);
}
return Status;
}
INIT_API_PROBE_MANAGED("dumpvc");
MINIDUMP_NOT_SUPPORTED();
- DWORD_PTR p_MT = NULL;
- DWORD_PTR p_Object = NULL;
+ DWORD_PTR p_MT = (TADDR)0;
+ DWORD_PTR p_Object = (TADDR)0;
BOOL dml = FALSE;
CMDOption option[] =
{
ExtOut(" orphaned ");
}
- else if (syncBlockData.HoldingThread != NULL)
+ else if (syncBlockData.HoldingThread != (TADDR)0)
{
DacpThreadData Thread;
if ((Status = Thread.Request(g_sos, syncBlockData.HoldingThread)) != S_OK)
MINIDUMP_NOT_SUPPORTED();
- DWORD_PTR p_ModuleAddr = NULL;
+ DWORD_PTR p_ModuleAddr = (TADDR)0;
BOOL bMethodTables = FALSE;
BOOL bProfilerModified = FALSE;
BOOL dml = FALSE;
}
DomainInfo(&appDomain);
- if (adsData.sharedDomain != NULL)
+ if (adsData.sharedDomain != (TADDR)0)
{
ExtOut("--------------------------------------\n");
DMLOut("Shared Domain: %s\n", DMLDomain(adsData.sharedDomain));
}
TADDR taLTOH;
- if (Thread.lastThrownObjectHandle != NULL)
+ if (Thread.lastThrownObjectHandle != (TADDR)0)
{
if (SafeReadMemory(TO_TADDR(Thread.lastThrownObjectHandle), &taLTOH, sizeof(taLTOH), NULL))
{
- if (taLTOH != NULL)
+ if (taLTOH != (TADDR)0)
{
ULONG id;
if (g_ExtSystem->GetThreadIdBySystemId(Thread.osThreadId, &id) == S_OK)
if (bSupported)
{
- CheckBreakingRuntimeChange();
-
+ if (CheckBreakingRuntimeChange())
+ {
+ return E_FAIL;
+ }
HRESULT Status2 = PrintSpecialThreads();
if (!SUCCEEDED(Status2))
Status = Status2;
HRESULT ResolvePendingNonModuleBoundBreakpoint(TADDR mod, PendingBreakpoint *pCur, SymbolReader* pSymbolReader)
{
// This function only works with pending breakpoints that are not module bound.
- if (pCur->pModule == NULL)
+ if (pCur->pModule == (TADDR)0)
{
if (pCur->szModuleName[0] != L'\0')
{
int lineNumber = 0;
size_t Offset = 0;
- DWORD_PTR pMD = NULL;
+ DWORD_PTR pMD = (TADDR)0;
BOOL fNoFutureModule = FALSE;
BOOL fList = FALSE;
size_t clearItem = 0;
bool fIsFilename = false;
int commandsParsed = 0;
- if (pMD != NULL)
+ if (pMD != (TADDR)0)
{
if (nArg != 0)
{
BOOL bNeedNotificationExceptions = FALSE;
- if (pMD == NULL)
+ if (pMD == (TADDR)0)
{
int numModule = 0;
int numMethods = 0;
// wait for the module load notification.
if (!fIsFilename)
{
- g_bpoints.Add(ModuleName, FunctionName, NULL, (DWORD)Offset);
+ g_bpoints.Add(ModuleName, FunctionName, (TADDR)0, (DWORD)Offset);
}
else
{
- g_bpoints.Add(Filename, lineNumber, NULL);
+ g_bpoints.Add(Filename, lineNumber, (TADDR)0);
}
if (g_clrData != nullptr)
{
INIT_API();
MINIDUMP_NOT_SUPPORTED();
- DWORD_PTR p_Object = NULL;
+ DWORD_PTR p_Object = (TADDR)0;
BOOL dml = FALSE;
CMDOption option[] =
CLRDATA_ADDRESS appDomain = GetAppDomain (TO_CDADDR(p_Object));
- if (appDomain != NULL)
+ if (appDomain != (TADDR)0)
{
DMLOut("AppDomain: %s\n", DMLDomain(appDomain));
if (appDomain == adstore.sharedDomain)
INIT_API_PROBE_MANAGED("ehinfo");
MINIDUMP_NOT_SUPPORTED();
- DWORD_PTR dwStartAddr = NULL;
+ DWORD_PTR dwStartAddr = (TADDR)0;
BOOL dml = FALSE;
CMDOption option[] =
INIT_API_PROBE_MANAGED("gcinfo");
MINIDUMP_NOT_SUPPORTED();
- TADDR taStartAddr = NULL;
+ TADDR taStartAddr = (TADDR)0;
TADDR taGCInfoAddr;
BOOL dml = FALSE;
GetILAddressResult GetILAddress(const DacpMethodDescData& MethodDescData)
{
- GetILAddressResult error = std::make_tuple(NULL, nullptr);
- TADDR ilAddr = NULL;
+ GetILAddressResult error = std::make_tuple((TADDR)0, nullptr);
+ TADDR ilAddr = (TADDR)0;
struct DacpProfilerILData ilData;
ReleaseHolder<ISOSDacInterface7> sos7;
if (SUCCEEDED(g_sos->QueryInterface(__uuidof(ISOSDacInterface7), &sos7)) &&
return error;
}
- if (ilAddr == NULL)
+ if (ilAddr == (TADDR)0)
{
ULONG pRva;
DWORD dwFlags;
ilAddr = TO_TADDR(ilAddrClr);
}
- if (ilAddr == NULL)
+ if (ilAddr == (TADDR)0)
{
ExtOut("Unknown error in reading function IL\n");
return error;
INIT_API();
MINIDUMP_NOT_SUPPORTED();
- DWORD_PTR dwStartAddr = NULL;
+ DWORD_PTR dwStartAddr = (TADDR)0;
BOOL fWithGCInfo = FALSE;
BOOL fWithEHInfo = FALSE;
BOOL bSuppressLines = FALSE;
}
GetILAddressResult result = GetILAddress(MethodDescData);
- if (std::get<0>(result) == NULL)
+ if (std::get<0>(result) == (TADDR)0)
{
ExtOut("ilAddr is %p\n", SOS_PTR(std::get<0>(result)));
return E_FAIL;
}
};
- if (codeHeaderData.ColdRegionStart != NULL)
+ if (codeHeaderData.ColdRegionStart != (TADDR)0)
{
ExtOut("Begin %p, size %x. Cold region begin %p, size %x\n",
SOS_PTR(codeHeaderData.MethodStart), codeHeaderData.HotRegionSize,
}
}
- if (codeHeaderData.ColdRegionStart == NULL)
+ if (codeHeaderData.ColdRegionStart == (TADDR)0)
{
g_targetMachine->Unassembly (
(DWORD_PTR) codeHeaderData.MethodStart,
HRESULT Status =
g_sos->GetMethodDescData(
TO_CDADDR(methodDesc),
- dwStartAddr == methodDesc ? NULL : dwStartAddr,
+ dwStartAddr == methodDesc ? (TADDR)0 : dwStartAddr,
&MethodDescData,
0, // cRevertedRejitVersions
NULL, // rgRevertedRejitData
return E_FAIL;
}
- CheckBreakingRuntimeChange();
+ if (CheckBreakingRuntimeChange())
+ {
+ return E_FAIL;
+ }
+
LoadRuntimeSymbols();
const char* fileName = "StressLog.txt";
- CLRDATA_ADDRESS StressLogAddress = NULL;
+ CLRDATA_ADDRESS StressLogAddress = (TADDR)0;
StringHolder sFileName, sLogAddr;
CMDOption option[] =
StressLogAddress = GetExpression(sLogAddr.data);
}
- if (StressLogAddress == NULL)
+ if (StressLogAddress == (TADDR)0)
{
if (g_bDacBroken)
{
}
}
- if (StressLogAddress == NULL)
+ if (StressLogAddress == (TADDR)0)
{
ExtOut("Please provide the -addr argument for the address of the stress log, since no recognized runtime is loaded.\n");
return E_FAIL;
LONG_PTR gen = -100; // initialized outside the legal range: [-1, 2]
StringHolder sgen;
- TADDR taObj = NULL;
+ TADDR taObj = (TADDR)0;
BOOL dml = FALSE;
size_t nArg;
if (adsData.Request(g_sos) != S_OK)
return FALSE;
- LONG numSpecialDomains = (adsData.sharedDomain != NULL) ? 2 : 1;
+ LONG numSpecialDomains = (adsData.sharedDomain != (TADDR)0) ? 2 : 1;
m_numDomains = adsData.DomainCount + numSpecialDomains;
ArrayHolder<CLRDATA_ADDRESS> pArray = new NOTHROW CLRDATA_ADDRESS[m_numDomains];
if (pArray == NULL)
return FALSE;
int i = 0;
- if (adsData.sharedDomain != NULL)
+ if (adsData.sharedDomain != (TADDR)0)
{
pArray[i++] = adsData.sharedDomain;
}
CLRDATA_ADDRESS threadAddr = GetCurrentManagedThread();
DacpThreadData Thread;
- if ((threadAddr == NULL) || (Thread.Request(g_sos, threadAddr) != S_OK))
+ if ((threadAddr == (TADDR)0) || (Thread.Request(g_sos, threadAddr) != S_OK))
{
ExtOut("The current thread is unmanaged\n");
return Status;
BOOL bAsync = bGotExcData ? IsAsyncException(excData)
: IsAsyncException(cdaObj, objData.MethodTable);
- DWORD_PTR arrayPtr;
- if (bGotExcData)
- {
- arrayPtr = TO_TADDR(excData.StackTrace);
- }
- else
- {
- iOffset = GetObjFieldOffset (cdaObj, objData.MethodTable, W("_stackTrace"));
- MOVE (arrayPtr, TO_TADDR(cdaObj) + iOffset);
- }
+ DWORD_PTR arrayPtr = GetStackTraceArray(cdaObj, &objData, bGotExcData ? &excData : NULL);
if (arrayPtr)
{
if (arrayLen)
{
- // This code is accessing the StackTraceInfo class in the runtime.
- // See: https://github.com/dotnet/runtime/blob/main/src/coreclr/vm/clrex.h
-#ifdef _TARGET_WIN64_
- DWORD_PTR dataPtr = arrayPtr + sizeof(DWORD_PTR) + sizeof(DWORD) + sizeof(DWORD);
-#else
- DWORD_PTR dataPtr = arrayPtr + sizeof(DWORD_PTR) + sizeof(DWORD);
-#endif // _TARGET_WIN64_
+ DWORD_PTR dataPtr = GetFirstArrayElementPointer(arrayPtr);
size_t stackTraceSize = 0;
MOVE (stackTraceSize, dataPtr); // data length is stored at the beginning of the array in this case
CLRDATA_ADDRESS EIP = TO_CDADDR(IP);
DacpCodeHeaderData codeHeaderData;
- methodDesc = NULL;
- gcinfoAddr = NULL;
+ methodDesc = (TADDR)0;
+ gcinfoAddr = (TADDR)0;
if (codeHeaderData.Request(g_sos, EIP) != S_OK)
{
// > 0 = offset to field from objAddr
int GetObjFieldOffset(CLRDATA_ADDRESS cdaObj, __in_z LPCWSTR wszFieldName, BOOL bFirst)
{
- TADDR mt = NULL;
+ TADDR mt = (TADDR)0;
if FAILED(GetMTOfObject(TO_TADDR(cdaObj), &mt))
return -1;
// returns NULL
CLRDATA_ADDRESS IsInOneDomainOnly(CLRDATA_ADDRESS AssemblyPtr)
{
- CLRDATA_ADDRESS appDomain = NULL;
+ CLRDATA_ADDRESS appDomain = (TADDR)0;
DacpAppDomainStoreData adstore;
if (adstore.Request(g_sos) != S_OK)
{
ExtOut("Unable to get appdomain store\n");
- return NULL;
+ return (TADDR)0;
}
size_t AllocSize;
if (!ClrSafeInt<size_t>::multiply(sizeof(CLRDATA_ADDRESS), adstore.DomainCount, AllocSize))
{
ReportOOM();
- return NULL;
+ return (TADDR)0;
}
ArrayHolder<CLRDATA_ADDRESS> pArray = new CLRDATA_ADDRESS[adstore.DomainCount];
if (pArray==NULL)
{
ReportOOM();
- return NULL;
+ return (TADDR)0;
}
if (g_sos->GetAppDomainList(adstore.DomainCount, pArray, NULL)!=S_OK)
{
ExtOut ("Failed to get appdomain list\n");
- return NULL;
+ return (TADDR)0;
}
for (int i = 0; i < adstore.DomainCount; i++)
{
if (IsInterrupt())
- return NULL;
+ return (TADDR)0;
DacpAppDomainData dadd;
if (dadd.Request(g_sos, pArray[i]) != S_OK)
{
ExtOut ("Unable to get AppDomain %p\n", SOS_PTR(pArray[i]));
- return NULL;
+ return (TADDR)0;
}
if (dadd.AssemblyCount)
if (!ClrSafeInt<size_t>::multiply(sizeof(CLRDATA_ADDRESS), dadd.AssemblyCount, AssemblyAllocSize))
{
ReportOOM();
- return NULL;
+ return (TADDR)0;
}
ArrayHolder<CLRDATA_ADDRESS> pAsmArray = new CLRDATA_ADDRESS[dadd.AssemblyCount];
if (pAsmArray==NULL)
{
ReportOOM();
- return NULL;
+ return (TADDR)0;
}
if (g_sos->GetAssemblyList(dadd.AppDomainPtr,dadd.AssemblyCount,pAsmArray, NULL)!=S_OK)
{
ExtOut("Unable to get array of Assemblies\n");
- return NULL;
+ return (TADDR)0;
}
for (LONG n = 0; n < dadd.AssemblyCount; n ++)
{
if (IsInterrupt())
- return NULL;
+ return (TADDR)0;
if (AssemblyPtr == pAsmArray[n])
{
- if (appDomain != NULL)
+ if (appDomain != (TADDR)0)
{
// We have found more than one AppDomain that loaded this
// assembly, we must return NULL.
- return NULL;
+ return (TADDR)0;
}
appDomain = dadd.AppDomainPtr;
}
DacpMethodTableData mt;
if (mt.Request(g_sos, mtPtr) != S_OK)
{
- return NULL;
+ return (TADDR)0;
}
DacpModuleData module;
if (module.Request(g_sos, mt.Module) != S_OK)
{
- return NULL;
+ return (TADDR)0;
}
DacpAssemblyData assembly;
if (assembly.Request(g_sos, module.Assembly) != S_OK)
{
- return NULL;
+ return (TADDR)0;
}
DacpAppDomainStoreData adstore;
if (adstore.Request(g_sos) != S_OK)
{
- return NULL;
+ return (TADDR)0;
}
return (assembly.ParentDomain == adstore.sharedDomain) ?
CLRDATA_ADDRESS GetAppDomain(CLRDATA_ADDRESS objPtr)
{
- CLRDATA_ADDRESS appDomain = NULL;
+ CLRDATA_ADDRESS appDomain = (TADDR)0;
DacpObjectData objData;
if (objData.Request(g_sos,objPtr) != S_OK)
{
- return NULL;
+ return (TADDR)0;
}
// First check eeclass->module->assembly->domain.
DacpMethodTableData mt;
if (mt.Request(g_sos,objData.MethodTable) != S_OK)
{
- return NULL;
+ return (TADDR)0;
}
DacpModuleData module;
if (module.Request(g_sos,mt.Module) != S_OK)
{
- return NULL;
+ return (TADDR)0;
}
DacpAssemblyData assembly;
if (assembly.Request(g_sos,module.Assembly) != S_OK)
{
- return NULL;
+ return (TADDR)0;
}
DacpAppDomainStoreData adstore;
if (adstore.Request(g_sos) != S_OK)
{
- return NULL;
+ return (TADDR)0;
}
if (assembly.ParentDomain == adstore.sharedDomain)
ULONG value = 0;
if (!obj.TryGetHeader(value))
{
- return NULL;
+ return (TADDR)0;
}
DWORD adIndex = (value >> SBLK_APPDOMAIN_SHIFT) & SBLK_MASK_APPDOMAININDEX;
// being in domain X if the only other domain that has the assembly
// loaded is domain X.
appDomain = IsInOneDomainOnly(assembly.AssemblyPtr);
- if (appDomain == NULL && ((value & BIT_SBLK_IS_HASH_OR_SYNCBLKINDEX) != 0))
+ if (appDomain == (TADDR)0 && ((value & BIT_SBLK_IS_HASH_OR_SYNCBLKINDEX) != 0))
{
if ((value & BIT_SBLK_IS_HASHCODE) == 0)
{
size_t AllocSize;
if (!ClrSafeInt<size_t>::multiply(sizeof(CLRDATA_ADDRESS), adstore.DomainCount, AllocSize))
{
- return NULL;
+ return (TADDR)0;
}
// we know we have a non-zero adIndex. Find the appdomain.
ArrayHolder<CLRDATA_ADDRESS> pArray = new CLRDATA_ADDRESS[adstore.DomainCount];
if (pArray==NULL)
{
- return NULL;
+ return (TADDR)0;
}
if (g_sos->GetAppDomainList(adstore.DomainCount, pArray, NULL)!=S_OK)
{
- return NULL;
+ return (TADDR)0;
}
for (int i = 0; i < adstore.DomainCount; i++)
DacpAppDomainData dadd;
if (dadd.Request(g_sos, pArray[i]) != S_OK)
{
- return NULL;
+ return (TADDR)0;
}
if (dadd.dwId == adIndex)
{
void AssemblyInfo(DacpAssemblyData *pAssembly)
{
- if ((ULONG64)pAssembly->AssemblySecDesc != NULL)
+ if ((ULONG64)pAssembly->AssemblySecDesc != (TADDR)0)
ExtOut("SecurityDescriptor: %p\n", SOS_PTR(pAssembly->AssemblySecDesc));
ExtOut(" Module\n");
ExtOut("HighFrequencyHeap: %p\n", SOS_PTR(pDomain->pHighFrequencyHeap));
ExtOut("StubHeap: %p\n", SOS_PTR(pDomain->pStubHeap));
ExtOut("Stage: %s\n", GetStageText(pDomain->appDomainStage));
- if ((ULONG64)pDomain->AppSecDesc != NULL)
+ if ((ULONG64)pDomain->AppSecDesc != (TADDR)0)
ExtOut("SecurityDescriptor: %p\n", SOS_PTR(pDomain->AppSecDesc));
ExtOut("Name: ");
return res;
}
- if (mt == sos::MethodTable::GetArrayMT() && cmt != NULL)
+ if (mt == sos::MethodTable::GetArrayMT() && cmt != (TADDR)0)
{
mt = cmt;
array = true;
BOOL IsObjectArray (DWORD_PTR obj)
{
- DWORD_PTR mtAddr = NULL;
+ DWORD_PTR mtAddr = (TADDR)0;
if (SUCCEEDED(GetMTOfObject(obj, &mtAddr)))
return TO_TADDR(g_special_usefulGlobals.ArrayMethodTable) == mtAddr;
BOOL IsStringObject (size_t obj)
{
- DWORD_PTR mtAddr = NULL;
+ DWORD_PTR mtAddr = (TADDR)0;
if (SUCCEEDED(GetMTOfObject(obj, &mtAddr)))
return TO_TADDR(g_special_usefulGlobals.StringMethodTable) == mtAddr;
{
DacpMethodTableData dmtd;
CLRDATA_ADDRESS walkMT = mtObj;
- while (walkMT != NULL)
+ while (walkMT != (TADDR)0)
{
if (dmtd.Request(g_sos, walkMT) != S_OK)
{
DacpMethodTableData dmtd;
for (CLRDATA_ADDRESS walkMT = mtObj;
- walkMT != NULL && dmtd.Request(g_sos, walkMT) == S_OK;
+ walkMT != (TADDR)0 && dmtd.Request(g_sos, walkMT) == S_OK;
walkMT = dmtd.ParentMethodTable)
{
if (dmtd.Module == modulePtr && dmtd.cl == typeDef)
{
CLRDATA_ADDRESS methodPtr;
MOVE(methodPtr, delegateObj.GetAddress() + offset);
- if (methodPtr != NULL)
+ if (methodPtr != (TADDR)0)
{
if (g_sos->GetMethodDescPtrFromIP(methodPtr, pMD) == S_OK)
{
ArrayHolder<CLRDATA_ADDRESS> pAssemblyArray = NULL;
ArrayHolder<CLRDATA_ADDRESS> pModules = NULL;
int arrayLength = 0;
- int numSpecialDomains = (adsData.sharedDomain != NULL) ? 2 : 1;
+ int numSpecialDomains = (adsData.sharedDomain != (TADDR)0) ? 2 : 1;
if (!ClrSafeInt<int>::addition(adsData.DomainCount, numSpecialDomains, arrayLength))
{
ExtOut("<integer overflow>\n");
}
pArray[0] = adsData.systemDomain;
- if (adsData.sharedDomain != NULL)
+ if (adsData.sharedDomain != (TADDR)0)
{
pArray[1] = adsData.sharedDomain;
}
\**********************************************************************/
void GetInfoFromName(DWORD_PTR ModulePtr, const char* name, mdTypeDef* retMdTypeDef)
{
- DWORD_PTR ignoredModuleInfoRet = NULL;
+ DWORD_PTR ignoredModuleInfoRet = (TADDR)0;
if (retMdTypeDef)
*retMdTypeDef = 0;
DWORD_PTR GetMethodDescFromModule(DWORD_PTR ModuleAddr, ULONG token)
{
if (TypeFromToken(token) != mdtMethodDef)
- return NULL;
+ return (TADDR)0;
CLRDATA_ADDRESS md = 0;
if (FAILED(g_sos->GetMethodDescFromToken(ModuleAddr, token, &md)))
{
- return NULL;
+ return (TADDR)0;
}
else if (0 == md)
{
}
else if ( !IsMethodDesc((DWORD_PTR)md))
{
- return NULL;
+ return (TADDR)0;
}
return (DWORD_PTR)md;
{
mdTypeDef token;
if (pMeth->GetTokenAndScope(&token, NULL) != S_OK)
- (*pOut)[i] = NULL;
+ (*pOut)[i] = (TADDR)0;
(*pOut)[i] = GetMethodDescFromModule(ModulePtr, token);
- if ((*pOut)[i] == NULL)
+ if ((*pOut)[i] == (TADDR)0)
{
*numMethods = 0;
return E_FAIL;
// Do prefast integer checks before the malloc.
size_t AllocSize;
LONG DomainAllocCount;
- LONG NumExtraDomains = (adsData.sharedDomain != NULL) ? 2 : 1;
+ LONG NumExtraDomains = (adsData.sharedDomain != (TADDR)0) ? 2 : 1;
if (!ClrSafeInt<LONG>::addition(adsData.DomainCount, NumExtraDomains, DomainAllocCount) ||
!ClrSafeInt<size_t>::multiply(DomainAllocCount, sizeof(PVOID), AllocSize) ||
(domainList = new DWORD_PTR[DomainAllocCount]) == NULL)
}
domainList[numDomain++] = (DWORD_PTR) adsData.systemDomain;
- if (adsData.sharedDomain != NULL)
+ if (adsData.sharedDomain != (TADDR)0)
{
domainList[numDomain++] = (DWORD_PTR) adsData.sharedDomain;
}
}
CLRDATA_ADDRESS CurThread = ThreadStore.firstThread;
- while (CurThread != NULL)
+ while (CurThread != (TADDR)0)
{
if (IsInterrupt())
return S_FALSE;
DacpThreadData Thread;
if (Thread.Request(g_sos, CurThread) != S_OK)
{
- return NULL;
+ return (TADDR)0;
}
if (Thread.osThreadId == Tid)
CurThread = Thread.nextThread;
}
- return NULL;
+ return (TADDR)0;
}
#define MSCOREE_SHIM_A "mscoree.dll"
{
if (version > SOS_BREAKING_CHANGE_VERSION)
{
- ExtWarn("WARNING: SOS needs to be upgraded for this version of the runtime. Some commands may not work correctly.\n");
- ExtWarn("For more information see https://go.microsoft.com/fwlink/?linkid=2135652\n");
- ExtWarn("\n");
+ ExtErr("SOS needs to be upgraded for this version of the runtime. Some commands may not work correctly.\n");
+ ExtErr("For more information see https://go.microsoft.com/fwlink/?linkid=2135652\n");
+ ExtErr("\n");
result = true;
}
}
BOOL NameForMD_s (DWORD_PTR pMD, __out_ecount (capacity_mdName) WCHAR *mdName, size_t capacity_mdName);
BOOL NameForMT_s (DWORD_PTR MTAddr, __out_ecount (capacity_mdName) WCHAR *mdName, size_t capacity_mdName);
-WCHAR *CreateMethodTableName(TADDR mt, TADDR cmt = NULL);
+WCHAR *CreateMethodTableName(TADDR mt, TADDR cmt = (TADDR)0);
void isRetAddr(DWORD_PTR retAddr, DWORD_PTR* whereCalled);
DWORD_PTR GetValueFromExpression (___in __in_z const char *const str);
info.GCInfo = NULL;
info.ArrayOfVC = false;
info.GCInfoBuffer = NULL;
- info.LoaderAllocatorObjectHandle = NULL;
+ info.LoaderAllocatorObjectHandle = (TADDR)0;
}
};
Node *head;
struct MemRange
{
- MemRange (ULONG64 s = NULL, size_t l = 0, MemRange * n = NULL)
+ MemRange (ULONG64 s = (TADDR)0, size_t l = 0, MemRange * n = NULL)
: start(s), len (l), next (n)
{}
try
{
using DataTarget dataTarget = DataTarget.LoadDump(dump_path.FullName);
-
OSPlatform targetPlatform = dataTarget.DataReader.TargetPlatform;
- if (targetPlatform != OSPlatform.OSX &&
- (RuntimeInformation.IsOSPlatform(OSPlatform.OSX) ||
- dataTarget.DataReader.EnumerateModules().Any((module) => Path.GetExtension(module.FileName) == ".dylib")))
+ if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX) && (targetPlatform != OSPlatform.OSX))
+ {
+ throw new NotSupportedException("Analyzing Windows or Linux dumps not supported when running on MacOS");
+ }
+ if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux) && (targetPlatform != OSPlatform.Linux))
{
- targetPlatform = OSPlatform.OSX;
+ throw new NotSupportedException("Analyzing Windows or MacOS dumps not supported when running on Linux");
}
TargetFromDataReader target = new(dataTarget.DataReader, targetPlatform, this, _targetIdFactory++, dump_path.FullName);
contextService.SetCurrentTarget(target);
// Automatically enable symbol server support, default cache and search for symbols in the dump directory
- symbolService.AddSymbolServer(msdl: true, symweb: false, retryCount: 3);
+ symbolService.AddSymbolServer(retryCount: 3);
symbolService.AddCachePath(symbolService.DefaultSymbolCache);
symbolService.AddDirectoryPath(Path.GetDirectoryName(dump_path.FullName));
-<Project Sdk="Microsoft.NET.Sdk">
+<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<ItemGroup>
<PackageReference Include="Microsoft.Diagnostics.Runtime" Version="$(MicrosoftDiagnosticsRuntimeVersion)" />
- <PackageReference Include="Microsoft.SymbolStore" Version="$(MicrosoftSymbolStoreVersion)" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="$(MSBuildThisFileDirectory)..\..\Microsoft.Diagnostics.DebugServices\Microsoft.Diagnostics.DebugServices.csproj" />
<ProjectReference Include="$(MSBuildThisFileDirectory)..\..\Microsoft.Diagnostics.DebugServices.Implementation\Microsoft.Diagnostics.DebugServices.Implementation.csproj" />
<ProjectReference Include="$(MSBuildThisFileDirectory)..\..\Microsoft.Diagnostics.ExtensionCommands\Microsoft.Diagnostics.ExtensionCommands.csproj" />
+ <ProjectReference Include="$(MSBuildThisFileDirectory)..\..\Microsoft.SymbolStore\Microsoft.SymbolStore.csproj" />
</ItemGroup>
<Import Project="$(MSBuildThisFileDirectory)..\..\sos-packaging.props" />
log.WriteLine("{0,5:n1}s: Creating type table flushing task", getElapsed().TotalSeconds);
using (EventPipeSessionController typeFlushSession = new(processId, diagnosticPort, new List<EventPipeProvider> {
- new EventPipeProvider("Microsoft-DotNETCore-SampleProfiler", EventLevel.Informational)
+ new("Microsoft-DotNETCore-SampleProfiler", EventLevel.Informational)
}, false))
{
log.WriteLine("{0,5:n1}s: Flushing the type table", getElapsed().TotalSeconds);
log.WriteLine("{0,5:n1}s: Requesting a .NET Heap Dump", getElapsed().TotalSeconds);
using EventPipeSessionController gcDumpSession = new(processId, diagnosticPort, new List<EventPipeProvider> {
- new EventPipeProvider("Microsoft-Windows-DotNETRuntime", EventLevel.Verbose, (long)(ClrTraceEventParser.Keywords.GCHeapSnapshot))
+ new("Microsoft-Windows-DotNETRuntime", EventLevel.Verbose, (long)(ClrTraceEventParser.Keywords.GCHeapSnapshot))
});
log.WriteLine("{0,5:n1}s: gcdump EventPipe Session started", getElapsed().TotalSeconds);
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Runtime.InteropServices;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Diagnostic.Tools.Symbol.Properties;
+using Microsoft.FileFormats;
+using Microsoft.FileFormats.ELF;
+using Microsoft.FileFormats.MachO;
+using Microsoft.FileFormats.PE;
+using Microsoft.SymbolStore;
+using Microsoft.SymbolStore.KeyGenerators;
+using Microsoft.SymbolStore.SymbolStores;
+
+namespace Microsoft.Diagnostics.Tools.Symbol
+{
+ public class Program
+ {
+ private struct ServerInfo
+ {
+ public Uri Uri;
+ public string PersonalAccessToken;
+ }
+
+ private readonly List<string> InputFilePaths = new();
+ private readonly List<string> CacheDirectories = new();
+ private readonly List<ServerInfo> SymbolServers = new();
+ private string OutputDirectory;
+ private TimeSpan? Timeout;
+ private bool Overwrite;
+ private bool Subdirectories;
+ private bool Symbols;
+ private bool Debugging;
+ private bool Modules;
+ private bool ForceWindowsPdbs;
+ private bool HostOnly;
+ private bool VerifyCore;
+ private Tracer Tracer;
+
+ public static void Main(string[] args)
+ {
+ if (args.Length == 0)
+ {
+ goto usage;
+ }
+ Program program = new();
+ Tracer tracer = new();
+ program.Tracer = tracer;
+
+ for (int i = 0; i < args.Length; i++)
+ {
+ string personalAccessToken = null;
+ Uri uri;
+ switch (args[i])
+ {
+ case "--microsoft-symbol-server":
+ Uri.TryCreate("https://msdl.microsoft.com/download/symbols/", UriKind.Absolute, out uri);
+ program.SymbolServers.Add(new ServerInfo { Uri = uri, PersonalAccessToken = null });
+ break;
+
+ case "--authenticated-server-path":
+ if (++i < args.Length)
+ {
+ personalAccessToken = args[i];
+ }
+ else
+ {
+ goto usage;
+ }
+ if (string.IsNullOrEmpty(personalAccessToken))
+ {
+ tracer.Error("No personal access token option");
+ goto usage;
+ }
+ goto case "--server-path";
+
+ case "--server-path":
+ if (++i < args.Length)
+ {
+ // Make sure the server Uri ends with "/"
+ string serverPath = args[i].TrimEnd('/') + '/';
+ if (!Uri.TryCreate(serverPath, UriKind.Absolute, out uri) || uri.IsFile)
+ {
+ tracer.Error(Resources.InvalidServerPath, args[i]);
+ goto usage;
+ }
+ Uri.TryCreate(serverPath, UriKind.Absolute, out uri);
+ program.SymbolServers.Add(new ServerInfo { Uri = uri, PersonalAccessToken = personalAccessToken });
+ }
+ else
+ {
+ goto usage;
+ }
+ break;
+
+ case "-o":
+ case "--output":
+ if (++i < args.Length)
+ {
+ program.OutputDirectory = args[i];
+ }
+ else
+ {
+ goto usage;
+ }
+ break;
+
+ case "--overwrite":
+ program.Overwrite = true;
+ break;
+
+ case "--timeout":
+ if (++i < args.Length)
+ {
+ double timeoutInMinutes = double.Parse(args[i]);
+ program.Timeout = TimeSpan.FromMinutes(timeoutInMinutes);
+ }
+ else
+ {
+ goto usage;
+ }
+ break;
+
+ case "--cache-directory":
+ if (++i < args.Length)
+ {
+ program.CacheDirectories.Add(args[i]);
+ }
+ else
+ {
+ goto usage;
+ }
+ break;
+
+ case "--recurse-subdirectories":
+ program.Subdirectories = true;
+ break;
+
+ case "--modules":
+ program.Modules = true;
+ break;
+
+ case "--symbols":
+ program.Symbols = true;
+ break;
+
+ case "--debugging":
+ program.Debugging = true;
+ break;
+
+ case "--windows-pdbs":
+ program.ForceWindowsPdbs = true;
+ break;
+
+ case "--host-only":
+ program.HostOnly = true;
+ break;
+
+ case "--verifycore":
+ program.VerifyCore = true;
+ break;
+
+ case "-d":
+ case "--diagnostics":
+ tracer.Enabled = true;
+ tracer.EnabledVerbose = true;
+ break;
+
+ case "-h":
+ case "-?":
+ case "--help":
+ goto usage;
+
+ default:
+ string inputFile = args[i];
+ if (inputFile.StartsWith("-") || inputFile.StartsWith("--"))
+ {
+ tracer.Error(Resources.InvalidCommandLineOption, inputFile);
+ goto usage;
+ }
+ program.InputFilePaths.Add(inputFile);
+ break;
+ }
+ }
+ if (program.VerifyCore)
+ {
+ program.VerifyCoreDump();
+ }
+ else
+ {
+ // Default to public Microsoft symbol server
+ if (program.SymbolServers.Count == 0)
+ {
+ Uri.TryCreate("https://msdl.microsoft.com/download/symbols/", UriKind.Absolute, out Uri uri);
+ program.SymbolServers.Add(new ServerInfo { Uri = uri, PersonalAccessToken = null });
+ }
+ foreach (ServerInfo server in program.SymbolServers)
+ {
+ tracer.WriteLine(Resources.DownloadFromUri, server.Uri);
+ }
+ if (program.OutputDirectory != null)
+ {
+ Directory.CreateDirectory(program.OutputDirectory);
+ tracer.WriteLine(Resources.WritingFilesToOutput, program.OutputDirectory);
+ }
+ try
+ {
+ program.DownloadFiles().GetAwaiter().GetResult();
+ }
+ catch (Exception ex)
+ {
+ tracer.Error("{0}{1}", ex.Message, ex.InnerException != null ? " -> " + ex.InnerException.Message : "");
+ }
+ }
+ return;
+
+ usage:
+ PrintUsage();
+ }
+
+ private static void PrintUsage()
+ {
+ Console.WriteLine();
+ Console.WriteLine(Resources.UsageOptions);
+ }
+
+ internal async Task DownloadFiles()
+ {
+ using (Microsoft.SymbolStore.SymbolStores.SymbolStore symbolStore = BuildSymbolStore())
+ {
+ foreach (SymbolStoreKeyWrapper wrapper in GetKeys().Distinct())
+ {
+ SymbolStoreKey key = wrapper.Key;
+ if (symbolStore != null)
+ {
+ using (SymbolStoreFile symbolFile = await symbolStore.GetFile(key, CancellationToken.None).ConfigureAwait(false))
+ {
+ if (symbolFile != null)
+ {
+ await WriteFile(symbolFile, wrapper).ConfigureAwait(false);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private Microsoft.SymbolStore.SymbolStores.SymbolStore BuildSymbolStore()
+ {
+ Microsoft.SymbolStore.SymbolStores.SymbolStore store = null;
+
+ foreach (ServerInfo server in ((IEnumerable<ServerInfo>)SymbolServers).Reverse())
+ {
+ store = new HttpSymbolStore(Tracer, store, server.Uri, server.PersonalAccessToken);
+ if (Timeout.HasValue && store is HttpSymbolStore http)
+ {
+ http.Timeout = Timeout.Value;
+ }
+ }
+
+ // Add default symbol cache if one wasn't set by the command line
+ if (CacheDirectories.Count == 0)
+ {
+ CacheDirectories.Add(GetDefaultSymbolCache());
+ }
+
+ foreach (string cache in ((IEnumerable<string>)CacheDirectories).Reverse())
+ {
+ store = new CacheSymbolStore(Tracer, store, cache);
+ }
+
+ return store;
+ }
+
+ private sealed class SymbolStoreKeyWrapper
+ {
+ public readonly SymbolStoreKey Key;
+ public readonly string InputFile;
+
+ internal SymbolStoreKeyWrapper(SymbolStoreKey key, string inputFile)
+ {
+ Key = key;
+ InputFile = inputFile;
+ }
+
+ /// <summary>
+ /// Returns the hash of the index.
+ /// </summary>
+ public override int GetHashCode()
+ {
+ return Key.GetHashCode();
+ }
+
+ /// <summary>
+ /// Only the index is compared or hashed. The FileName is already
+ /// part of the index.
+ /// </summary>
+ public override bool Equals(object obj)
+ {
+ SymbolStoreKeyWrapper wrapper = (SymbolStoreKeyWrapper)obj;
+ return Key.Equals(wrapper.Key);
+ }
+ }
+
+ private IEnumerable<SymbolStoreKeyWrapper> GetKeys()
+ {
+ IEnumerable<string> inputFiles = GetInputFiles();
+
+ foreach (string inputFile in inputFiles)
+ {
+ foreach (KeyGenerator generator in GetKeyGenerators(inputFile))
+ {
+ KeyTypeFlags flags = KeyTypeFlags.None;
+ if (HostOnly)
+ {
+ flags |= KeyTypeFlags.HostKeys;
+ }
+ if (Symbols)
+ {
+ flags |= KeyTypeFlags.SymbolKey | KeyTypeFlags.PerfMapKeys;
+ }
+ if (Modules)
+ {
+ flags |= KeyTypeFlags.IdentityKey;
+ }
+ if (Debugging)
+ {
+ flags |= KeyTypeFlags.RuntimeKeys | KeyTypeFlags.ClrKeys;
+ }
+ if (flags == KeyTypeFlags.None)
+ {
+ if (generator.IsDump())
+ {
+ // The default for dumps is to download everything
+ flags = KeyTypeFlags.IdentityKey | KeyTypeFlags.SymbolKey | KeyTypeFlags.ClrKeys | KeyTypeFlags.HostKeys;
+ }
+ else
+ {
+ // Otherwise the default is just the symbol files
+ flags = KeyTypeFlags.SymbolKey | KeyTypeFlags.PerfMapKeys;
+ }
+ }
+ if (ForceWindowsPdbs)
+ {
+ flags |= KeyTypeFlags.ForceWindowsPdbs;
+ }
+ foreach (SymbolStoreKeyWrapper wrapper in generator.GetKeys(flags).Select((key) => new SymbolStoreKeyWrapper(key, inputFile)))
+ {
+ yield return wrapper;
+ }
+ }
+ }
+ }
+
+ private IEnumerable<KeyGenerator> GetKeyGenerators(string inputFile)
+ {
+ using (Stream inputStream = File.Open(inputFile, FileMode.Open, FileAccess.Read, FileShare.Read))
+ {
+ SymbolStoreFile file = new(inputStream, inputFile);
+ string extension = Path.GetExtension(inputFile);
+ yield return new FileKeyGenerator(Tracer, file);
+ }
+ }
+
+ private async Task WriteFile(SymbolStoreFile file, SymbolStoreKeyWrapper wrapper)
+ {
+ if (OutputDirectory != null)
+ {
+ await WriteFileToDirectory(file.Stream, wrapper.Key.FullPathName, OutputDirectory).ConfigureAwait(false);
+ }
+ else
+ {
+ await WriteFileToDirectory(file.Stream, wrapper.Key.FullPathName, Path.GetDirectoryName(wrapper.InputFile)).ConfigureAwait(false);
+ }
+ }
+
+ private async Task WriteFileToDirectory(Stream stream, string fileName, string destinationDirectory)
+ {
+ stream.Position = 0;
+ string destination = Path.Combine(destinationDirectory, Path.GetFileName(fileName.Replace('\\', '/')));
+ if (!Overwrite && File.Exists(destination))
+ {
+ Tracer.WriteLine(Resources.FileAlreadyExists, destination);
+ }
+ else
+ {
+ Tracer.WriteLine(Resources.WritingFile, destination);
+ using (Stream destinationStream = File.OpenWrite(destination))
+ {
+ await stream.CopyToAsync(destinationStream).ConfigureAwait(false);
+ }
+ }
+ }
+
+ private static string GetDefaultSymbolCache()
+ {
+ if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
+ {
+ return Path.Combine(Path.GetTempPath(), "SymbolCache");
+ }
+ else
+ {
+ return Path.Combine(Environment.GetEnvironmentVariable("HOME"), ".dotnet", "symbolcache");
+ }
+ }
+
+ internal void VerifyCoreDump()
+ {
+ foreach (string inputFile in GetInputFiles())
+ {
+ Console.WriteLine($"{inputFile}");
+
+ using Stream inputStream = File.Open(inputFile, FileMode.Open, FileAccess.Read, FileShare.Read);
+ StreamAddressSpace dataSource = new(inputStream);
+ ELFCoreFile core = new(dataSource);
+
+ if (Tracer.Enabled)
+ {
+ foreach (ELFProgramSegment segment in core.Segments)
+ {
+ Tracer.Information("{0:X16}-{1:X16} {2:X8} {3:X8} {4}",
+ segment.Header.VirtualAddress.Value,
+ segment.Header.VirtualAddress + segment.Header.VirtualSize,
+ segment.Header.FileOffset.Value,
+ (ulong)segment.Header.FileSize,
+ segment.Header.Type);
+ }
+ }
+
+ foreach (ELFLoadedImage image in core.LoadedImages)
+ {
+ Console.WriteLine("{0:X16} {1}", image.LoadAddress, image.Path);
+ Exception elfException = null;
+ Exception machoException = null;
+ Exception peException = null;
+ try
+ {
+ ELFFile elfFile = image.Image;
+ if (elfFile.IsValid())
+ {
+ try
+ {
+ byte[] buildid = elfFile.BuildID;
+ }
+ catch (Exception ex)
+ {
+ Console.WriteLine(" ELF file invalid build id - {0}", ex.Message);
+ }
+ foreach (ELFProgramSegment segment in elfFile.Segments)
+ {
+ Tracer.Verbose(" {0:X16}-{1:X16} file off {2:X8} file size {3:X8} {4}",
+ segment.Header.VirtualAddress.Value,
+ segment.Header.VirtualAddress + segment.Header.VirtualSize,
+ segment.Header.FileOffset.Value,
+ (ulong)segment.Header.FileSize,
+ segment.Header.Type);
+
+ if (segment.Header.Type == ELFProgramHeaderType.Note ||
+ segment.Header.Type == ELFProgramHeaderType.Dynamic ||
+ segment.Header.Type == ELFProgramHeaderType.GnuEHFrame)
+ {
+ try
+ {
+ byte[] data = segment.Contents.Read(0, (uint)segment.Header.VirtualSize);
+ }
+ catch (Exception ex)
+ {
+ Console.WriteLine(" ELF file segment {0} virt addr {1:X16} virt size {2:X8} INVALID - {3}",
+ segment.Header.Type, segment.Header.VirtualAddress, segment.Header.VirtualSize, ex.Message);
+ }
+ }
+ }
+
+ // The ELF module was valid try next module
+ continue;
+ }
+ }
+ catch (Exception ex)
+ {
+ elfException = ex;
+ }
+
+ IAddressSpace addressSpace = new RelativeAddressSpace(core.DataSource, image.LoadAddress, core.DataSource.Length);
+ try
+ {
+ MachOFile machoFile = new(addressSpace);
+ if (machoFile.IsValid())
+ {
+ try
+ {
+ byte[] uuid = machoFile.Uuid;
+ }
+ catch (Exception ex)
+ {
+ Console.WriteLine(" MachO file invalid uuid - {0}", ex.Message);
+ }
+ foreach (MachSegment segment in machoFile.Segments)
+ {
+ Tracer.Verbose(" {0:X16}-{1:X16} offset {2:X16} size {3:X16} {4} {5}",
+ (ulong)segment.LoadCommand.VMAddress,
+ segment.LoadCommand.VMAddress + segment.LoadCommand.VMSize,
+ (ulong)segment.LoadCommand.FileOffset,
+ (ulong)segment.LoadCommand.FileSize,
+ segment.LoadCommand.Command,
+ segment.LoadCommand.SegName);
+
+ foreach (MachSection section in segment.Sections)
+ {
+ Tracer.Verbose(" addr {0:X16} size {1:X16} offset {2:X8} {3}",
+ (ulong)section.Address,
+ (ulong)section.Size,
+ section.Offset,
+ section.SectionName);
+ }
+ }
+
+ // The MachO module was valid try next module
+ continue;
+ }
+ }
+ catch (Exception ex)
+ {
+ machoException = ex;
+ }
+
+ try
+ {
+ PEFile peFile = new(addressSpace, true);
+ if (peFile.IsValid())
+ {
+ // The PE module was valid try next module
+ continue;
+ }
+ }
+ catch (Exception ex)
+ {
+ peException = ex;
+ }
+
+ Console.WriteLine("{0:X16} invalid image - {1}", image.LoadAddress, image.Path);
+ if (elfException != null)
+ {
+ Tracer.Verbose("ELF {0}", elfException.Message);
+ }
+ if (machoException != null)
+ {
+ Tracer.Verbose("MachO {0}", machoException.Message);
+ }
+ if (peException != null)
+ {
+ Tracer.Verbose("PE {0}", peException.Message);
+ }
+ }
+
+ ulong segmentsTotal = core.Segments.Max(s => s.Header.FileOffset + s.Header.FileSize);
+ if (segmentsTotal > dataSource.Length)
+ {
+ Console.WriteLine($"ERROR: Core file not complete: file size 0x{dataSource.Length:X8} segments total 0x{segmentsTotal:X8}");
+ }
+ }
+ }
+
+ private IEnumerable<string> GetInputFiles()
+ {
+ IEnumerable<string> inputFiles = InputFilePaths.SelectMany((string file) =>
+ {
+ string directory = Path.GetDirectoryName(file);
+ string pattern = Path.GetFileName(file);
+ return Directory.EnumerateFiles(string.IsNullOrWhiteSpace(directory) ? "." : directory, pattern,
+ Subdirectories ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly);
+ });
+
+ if (!inputFiles.Any())
+ {
+ throw new ArgumentException("Input files not found");
+ }
+ return inputFiles;
+ }
+ }
+}
--- /dev/null
+//------------------------------------------------------------------------------
+// <auto-generated>
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.42000
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+// </auto-generated>
+//------------------------------------------------------------------------------
+
+namespace Microsoft.Diagnostic.Tools.Symbol.Properties {
+ using System;
+
+
+ /// <summary>
+ /// A strongly-typed resource class, for looking up localized strings, etc.
+ /// </summary>
+ // This class was auto-generated by the StronglyTypedResourceBuilder
+ // class via a tool like ResGen or Visual Studio.
+ // To add or remove a member, edit your .ResX file then rerun ResGen
+ // with the /str option, or rebuild your VS project.
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "17.0.0.0")]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ internal class Resources {
+
+ private static global::System.Resources.ResourceManager resourceMan;
+
+ private static global::System.Globalization.CultureInfo resourceCulture;
+
+ [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
+ internal Resources() {
+ }
+
+ /// <summary>
+ /// Returns the cached ResourceManager instance used by this class.
+ /// </summary>
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Resources.ResourceManager ResourceManager {
+ get {
+ if (object.ReferenceEquals(resourceMan, null)) {
+ global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Microsoft.Diagnostic.Tools.Symbol.Properties.Resources", typeof(Resources).Assembly);
+ resourceMan = temp;
+ }
+ return resourceMan;
+ }
+ }
+
+ /// <summary>
+ /// Overrides the current thread's CurrentUICulture property for all
+ /// resource lookups using this strongly typed resource class.
+ /// </summary>
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Globalization.CultureInfo Culture {
+ get {
+ return resourceCulture;
+ }
+ set {
+ resourceCulture = value;
+ }
+ }
+
+ /// <summary>
+ /// Looks up a localized string similar to Downloading from {0}.
+ /// </summary>
+ internal static string DownloadFromUri {
+ get {
+ return ResourceManager.GetString("DownloadFromUri", resourceCulture);
+ }
+ }
+
+ /// <summary>
+ /// Looks up a localized string similar to {0} already exists, file not written.
+ /// </summary>
+ internal static string FileAlreadyExists {
+ get {
+ return ResourceManager.GetString("FileAlreadyExists", resourceCulture);
+ }
+ }
+
+ /// <summary>
+ /// Looks up a localized string similar to Invalid option {0}.
+ /// </summary>
+ internal static string InvalidCommandLineOption {
+ get {
+ return ResourceManager.GetString("InvalidCommandLineOption", resourceCulture);
+ }
+ }
+
+ /// <summary>
+ /// Looks up a localized string similar to Invalid server path '{0}'.
+ /// </summary>
+ internal static string InvalidServerPath {
+ get {
+ return ResourceManager.GetString("InvalidServerPath", resourceCulture);
+ }
+ }
+
+ /// <summary>
+ /// Looks up a localized string similar to Input files not found.
+ /// </summary>
+ internal static string NoInputFiles {
+ get {
+ return ResourceManager.GetString("NoInputFiles", resourceCulture);
+ }
+ }
+
+ /// <summary>
+ /// Looks up a localized string similar to Usage: dotnet symbol [options] <FILES>
+ ///
+ ///Arguments:
+ /// <FILES> List of files. Can contain wildcards.
+ ///
+ ///Options:
+ /// --microsoft-symbol-server Add 'https://msdl.microsoft.com/download/symbols' symbol server path (default).
+ /// --server-path <symbol server path> Add a http server path.
+ /// --authenticated-server-path <pat> <server path> Add a http PAT authenticated server path.
+ /// --cache-directory <file cache directory> Add a cache directory.
+ /// --timeout <m [rest of string was truncated]";.
+ /// </summary>
+ internal static string UsageOptions {
+ get {
+ return ResourceManager.GetString("UsageOptions", resourceCulture);
+ }
+ }
+
+ /// <summary>
+ /// Looks up a localized string similar to Writing: {0}.
+ /// </summary>
+ internal static string WritingFile {
+ get {
+ return ResourceManager.GetString("WritingFile", resourceCulture);
+ }
+ }
+
+ /// <summary>
+ /// Looks up a localized string similar to Writing files to {0}.
+ /// </summary>
+ internal static string WritingFilesToOutput {
+ get {
+ return ResourceManager.GetString("WritingFilesToOutput", resourceCulture);
+ }
+ }
+ }
+}
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<root>
+ <!--
+ Microsoft ResX Schema
+
+ Version 2.0
+
+ The primary goals of this format is to allow a simple XML format
+ that is mostly human readable. The generation and parsing of the
+ various data types are done through the TypeConverter classes
+ associated with the data types.
+
+ Example:
+
+ ... ado.net/XML headers & schema ...
+ <resheader name="resmimetype">text/microsoft-resx</resheader>
+ <resheader name="version">2.0</resheader>
+ <resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
+ <resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
+ <data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
+ <data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
+ <data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
+ <value>[base64 mime encoded serialized .NET Framework object]</value>
+ </data>
+ <data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
+ <value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
+ <comment>This is a comment</comment>
+ </data>
+
+ There are any number of "resheader" rows that contain simple
+ name/value pairs.
+
+ Each data row contains a name, and value. The row also contains a
+ type or mimetype. Type corresponds to a .NET class that support
+ text/value conversion through the TypeConverter architecture.
+ Classes that don't support this are serialized and stored with the
+ mimetype set.
+
+ The mimetype is used for serialized objects, and tells the
+ ResXResourceReader how to depersist the object. This is currently not
+ extensible. For a given mimetype the value must be set accordingly:
+
+ Note - application/x-microsoft.net.object.binary.base64 is the format
+ that the ResXResourceWriter will generate, however the reader can
+ read any of the formats listed below.
+
+ mimetype: application/x-microsoft.net.object.binary.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.soap.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Soap.SoapFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.bytearray.base64
+ value : The object must be serialized into a byte array
+ : using a System.ComponentModel.TypeConverter
+ : and then encoded with base64 encoding.
+ -->
+ <xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
+ <xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
+ <xsd:element name="root" msdata:IsDataSet="true">
+ <xsd:complexType>
+ <xsd:choice maxOccurs="unbounded">
+ <xsd:element name="metadata">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" />
+ </xsd:sequence>
+ <xsd:attribute name="name" use="required" type="xsd:string" />
+ <xsd:attribute name="type" type="xsd:string" />
+ <xsd:attribute name="mimetype" type="xsd:string" />
+ <xsd:attribute ref="xml:space" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="assembly">
+ <xsd:complexType>
+ <xsd:attribute name="alias" type="xsd:string" />
+ <xsd:attribute name="name" type="xsd:string" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="data">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ <xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
+ <xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
+ <xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
+ <xsd:attribute ref="xml:space" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="resheader">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" />
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:choice>
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:schema>
+ <resheader name="resmimetype">
+ <value>text/microsoft-resx</value>
+ </resheader>
+ <resheader name="version">
+ <value>2.0</value>
+ </resheader>
+ <resheader name="reader">
+ <value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+ <resheader name="writer">
+ <value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+ <data name="DownloadFromUri" xml:space="preserve">
+ <value>Downloading from {0}</value>
+ </data>
+ <data name="FileAlreadyExists" xml:space="preserve">
+ <value>{0} already exists, file not written</value>
+ </data>
+ <data name="InvalidCommandLineOption" xml:space="preserve">
+ <value>Invalid option {0}</value>
+ </data>
+ <data name="InvalidServerPath" xml:space="preserve">
+ <value>Invalid server path '{0}'</value>
+ </data>
+ <data name="NoInputFiles" xml:space="preserve">
+ <value>Input files not found</value>
+ </data>
+ <data name="UsageOptions" xml:space="preserve">
+ <value>Usage: dotnet symbol [options] <FILES>
+
+Arguments:
+ <FILES> List of files. Can contain wildcards.
+
+Options:
+ --microsoft-symbol-server Add 'https://msdl.microsoft.com/download/symbols' symbol server path (default).
+ --server-path <symbol server path> Add a http server path.
+ --authenticated-server-path <pat> <server path> Add a http PAT authenticated server path.
+ --cache-directory <file cache directory> Add a cache directory.
+ --timeout <minutes> Change http timeout in minutes (default: 4).
+ --recurse-subdirectories Process input files in all subdirectories.
+ --host-only Download only the host program (i.e. dotnet) that lldb needs for loading coredumps.
+ --symbols Download the symbol files (.pdb, .dbg, .dwarf).
+ --modules Download the module files (.dll, .so, .dylib).
+ --debugging Download the special debugging modules (DAC, DBI, SOS).
+ --windows-pdbs Force the downloading of the Windows PDBs when Portable PDBs are also available.
+ --overwrite Overwrite existing files in output directory.
+ --verifycore Verify ELF core dump.
+ -o, --output <output directory> Set the output directory. Otherwise, write next to the input file (default).
+ -d, --diagnostics Enable diagnostic output.
+ -h, --help Show help information.</value>
+ </data>
+ <data name="WritingFile" xml:space="preserve">
+ <value>Writing: {0}</value>
+ </data>
+ <data name="WritingFilesToOutput" xml:space="preserve">
+ <value>Writing files to {0}</value>
+ </data>
+</root>
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="1.2" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 xliff-core-1.2-transitional.xsd">
+ <file datatype="xml" source-language="en" target-language="cs" original="../Resources.resx">
+ <body>
+ <trans-unit id="DownloadFromUri">
+ <source>"Downloading from {0}"</source>
+ <target state="new">"Downloading from {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="FileAlreadyExists">
+ <source>"{0} already exists, file not written"</source>
+ <target state="new">"{0} already exists, file not written"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidCommandLineOption">
+ <source>"Invalid option {0}"</source>
+ <target state="new">"Invalid option {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidServerPath">
+ <source>"Invalid server path '{0}'"</source>
+ <target state="new">"Invalid server path '{0}'"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="NoInputFiles">
+ <source>"Input files not found"</source>
+ <target state="new">"Input files not found"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFile">
+ <source>"Writing: {0}"</source>
+ <target state="new">"Writing: {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFilesToOutput">
+ <source>"Writing files to {0}"</source>
+ <target state="new">"Writing files to {0}"</target>
+ <note />
+ </trans-unit>
+ </body>
+ </file>
+</xliff>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="1.2" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 xliff-core-1.2-transitional.xsd">
+ <file datatype="xml" source-language="en" target-language="de" original="../Resources.resx">
+ <body>
+ <trans-unit id="DownloadFromUri">
+ <source>"Downloading from {0}"</source>
+ <target state="new">"Downloading from {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="FileAlreadyExists">
+ <source>"{0} already exists, file not written"</source>
+ <target state="new">"{0} already exists, file not written"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidCommandLineOption">
+ <source>"Invalid option {0}"</source>
+ <target state="new">"Invalid option {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidServerPath">
+ <source>"Invalid server path '{0}'"</source>
+ <target state="new">"Invalid server path '{0}'"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="NoInputFiles">
+ <source>"Input files not found"</source>
+ <target state="new">"Input files not found"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFile">
+ <source>"Writing: {0}"</source>
+ <target state="new">"Writing: {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFilesToOutput">
+ <source>"Writing files to {0}"</source>
+ <target state="new">"Writing files to {0}"</target>
+ <note />
+ </trans-unit>
+ </body>
+ </file>
+</xliff>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="1.2" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 xliff-core-1.2-transitional.xsd">
+ <file datatype="xml" source-language="en" target-language="es" original="../Resources.resx">
+ <body>
+ <trans-unit id="DownloadFromUri">
+ <source>"Downloading from {0}"</source>
+ <target state="new">"Downloading from {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="FileAlreadyExists">
+ <source>"{0} already exists, file not written"</source>
+ <target state="new">"{0} already exists, file not written"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidCommandLineOption">
+ <source>"Invalid option {0}"</source>
+ <target state="new">"Invalid option {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidServerPath">
+ <source>"Invalid server path '{0}'"</source>
+ <target state="new">"Invalid server path '{0}'"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="NoInputFiles">
+ <source>"Input files not found"</source>
+ <target state="new">"Input files not found"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFile">
+ <source>"Writing: {0}"</source>
+ <target state="new">"Writing: {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFilesToOutput">
+ <source>"Writing files to {0}"</source>
+ <target state="new">"Writing files to {0}"</target>
+ <note />
+ </trans-unit>
+ </body>
+ </file>
+</xliff>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="1.2" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 xliff-core-1.2-transitional.xsd">
+ <file datatype="xml" source-language="en" target-language="fr" original="../Resources.resx">
+ <body>
+ <trans-unit id="DownloadFromUri">
+ <source>"Downloading from {0}"</source>
+ <target state="new">"Downloading from {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="FileAlreadyExists">
+ <source>"{0} already exists, file not written"</source>
+ <target state="new">"{0} already exists, file not written"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidCommandLineOption">
+ <source>"Invalid option {0}"</source>
+ <target state="new">"Invalid option {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidServerPath">
+ <source>"Invalid server path '{0}'"</source>
+ <target state="new">"Invalid server path '{0}'"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="NoInputFiles">
+ <source>"Input files not found"</source>
+ <target state="new">"Input files not found"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFile">
+ <source>"Writing: {0}"</source>
+ <target state="new">"Writing: {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFilesToOutput">
+ <source>"Writing files to {0}"</source>
+ <target state="new">"Writing files to {0}"</target>
+ <note />
+ </trans-unit>
+ </body>
+ </file>
+</xliff>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="1.2" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 xliff-core-1.2-transitional.xsd">
+ <file datatype="xml" source-language="en" target-language="it" original="../Resources.resx">
+ <body>
+ <trans-unit id="DownloadFromUri">
+ <source>"Downloading from {0}"</source>
+ <target state="new">"Downloading from {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="FileAlreadyExists">
+ <source>"{0} already exists, file not written"</source>
+ <target state="new">"{0} already exists, file not written"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidCommandLineOption">
+ <source>"Invalid option {0}"</source>
+ <target state="new">"Invalid option {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidServerPath">
+ <source>"Invalid server path '{0}'"</source>
+ <target state="new">"Invalid server path '{0}'"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="NoInputFiles">
+ <source>"Input files not found"</source>
+ <target state="new">"Input files not found"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFile">
+ <source>"Writing: {0}"</source>
+ <target state="new">"Writing: {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFilesToOutput">
+ <source>"Writing files to {0}"</source>
+ <target state="new">"Writing files to {0}"</target>
+ <note />
+ </trans-unit>
+ </body>
+ </file>
+</xliff>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="1.2" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 xliff-core-1.2-transitional.xsd">
+ <file datatype="xml" source-language="en" target-language="ja" original="../Resources.resx">
+ <body>
+ <trans-unit id="DownloadFromUri">
+ <source>"Downloading from {0}"</source>
+ <target state="new">"Downloading from {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="FileAlreadyExists">
+ <source>"{0} already exists, file not written"</source>
+ <target state="new">"{0} already exists, file not written"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidCommandLineOption">
+ <source>"Invalid option {0}"</source>
+ <target state="new">"Invalid option {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidServerPath">
+ <source>"Invalid server path '{0}'"</source>
+ <target state="new">"Invalid server path '{0}'"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="NoInputFiles">
+ <source>"Input files not found"</source>
+ <target state="new">"Input files not found"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFile">
+ <source>"Writing: {0}"</source>
+ <target state="new">"Writing: {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFilesToOutput">
+ <source>"Writing files to {0}"</source>
+ <target state="new">"Writing files to {0}"</target>
+ <note />
+ </trans-unit>
+ </body>
+ </file>
+</xliff>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="1.2" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 xliff-core-1.2-transitional.xsd">
+ <file datatype="xml" source-language="en" target-language="ko" original="../Resources.resx">
+ <body>
+ <trans-unit id="DownloadFromUri">
+ <source>"Downloading from {0}"</source>
+ <target state="new">"Downloading from {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="FileAlreadyExists">
+ <source>"{0} already exists, file not written"</source>
+ <target state="new">"{0} already exists, file not written"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidCommandLineOption">
+ <source>"Invalid option {0}"</source>
+ <target state="new">"Invalid option {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidServerPath">
+ <source>"Invalid server path '{0}'"</source>
+ <target state="new">"Invalid server path '{0}'"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="NoInputFiles">
+ <source>"Input files not found"</source>
+ <target state="new">"Input files not found"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFile">
+ <source>"Writing: {0}"</source>
+ <target state="new">"Writing: {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFilesToOutput">
+ <source>"Writing files to {0}"</source>
+ <target state="new">"Writing files to {0}"</target>
+ <note />
+ </trans-unit>
+ </body>
+ </file>
+</xliff>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="1.2" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 xliff-core-1.2-transitional.xsd">
+ <file datatype="xml" source-language="en" target-language="pl" original="../Resources.resx">
+ <body>
+ <trans-unit id="DownloadFromUri">
+ <source>"Downloading from {0}"</source>
+ <target state="new">"Downloading from {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="FileAlreadyExists">
+ <source>"{0} already exists, file not written"</source>
+ <target state="new">"{0} already exists, file not written"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidCommandLineOption">
+ <source>"Invalid option {0}"</source>
+ <target state="new">"Invalid option {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidServerPath">
+ <source>"Invalid server path '{0}'"</source>
+ <target state="new">"Invalid server path '{0}'"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="NoInputFiles">
+ <source>"Input files not found"</source>
+ <target state="new">"Input files not found"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFile">
+ <source>"Writing: {0}"</source>
+ <target state="new">"Writing: {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFilesToOutput">
+ <source>"Writing files to {0}"</source>
+ <target state="new">"Writing files to {0}"</target>
+ <note />
+ </trans-unit>
+ </body>
+ </file>
+</xliff>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="1.2" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 xliff-core-1.2-transitional.xsd">
+ <file datatype="xml" source-language="en" target-language="pt-BR" original="../Resources.resx">
+ <body>
+ <trans-unit id="DownloadFromUri">
+ <source>"Downloading from {0}"</source>
+ <target state="new">"Downloading from {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="FileAlreadyExists">
+ <source>"{0} already exists, file not written"</source>
+ <target state="new">"{0} already exists, file not written"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidCommandLineOption">
+ <source>"Invalid option {0}"</source>
+ <target state="new">"Invalid option {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidServerPath">
+ <source>"Invalid server path '{0}'"</source>
+ <target state="new">"Invalid server path '{0}'"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="NoInputFiles">
+ <source>"Input files not found"</source>
+ <target state="new">"Input files not found"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFile">
+ <source>"Writing: {0}"</source>
+ <target state="new">"Writing: {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFilesToOutput">
+ <source>"Writing files to {0}"</source>
+ <target state="new">"Writing files to {0}"</target>
+ <note />
+ </trans-unit>
+ </body>
+ </file>
+</xliff>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="1.2" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 xliff-core-1.2-transitional.xsd">
+ <file datatype="xml" source-language="en" target-language="ru" original="../Resources.resx">
+ <body>
+ <trans-unit id="DownloadFromUri">
+ <source>"Downloading from {0}"</source>
+ <target state="new">"Downloading from {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="FileAlreadyExists">
+ <source>"{0} already exists, file not written"</source>
+ <target state="new">"{0} already exists, file not written"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidCommandLineOption">
+ <source>"Invalid option {0}"</source>
+ <target state="new">"Invalid option {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidServerPath">
+ <source>"Invalid server path '{0}'"</source>
+ <target state="new">"Invalid server path '{0}'"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="NoInputFiles">
+ <source>"Input files not found"</source>
+ <target state="new">"Input files not found"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFile">
+ <source>"Writing: {0}"</source>
+ <target state="new">"Writing: {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFilesToOutput">
+ <source>"Writing files to {0}"</source>
+ <target state="new">"Writing files to {0}"</target>
+ <note />
+ </trans-unit>
+ </body>
+ </file>
+</xliff>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="1.2" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 xliff-core-1.2-transitional.xsd">
+ <file datatype="xml" source-language="en" target-language="tr" original="../Resources.resx">
+ <body>
+ <trans-unit id="DownloadFromUri">
+ <source>"Downloading from {0}"</source>
+ <target state="new">"Downloading from {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="FileAlreadyExists">
+ <source>"{0} already exists, file not written"</source>
+ <target state="new">"{0} already exists, file not written"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidCommandLineOption">
+ <source>"Invalid option {0}"</source>
+ <target state="new">"Invalid option {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidServerPath">
+ <source>"Invalid server path '{0}'"</source>
+ <target state="new">"Invalid server path '{0}'"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="NoInputFiles">
+ <source>"Input files not found"</source>
+ <target state="new">"Input files not found"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFile">
+ <source>"Writing: {0}"</source>
+ <target state="new">"Writing: {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFilesToOutput">
+ <source>"Writing files to {0}"</source>
+ <target state="new">"Writing files to {0}"</target>
+ <note />
+ </trans-unit>
+ </body>
+ </file>
+</xliff>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="1.2" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 xliff-core-1.2-transitional.xsd">
+ <file datatype="xml" source-language="en" target-language="zh-Hans" original="../Resources.resx">
+ <body>
+ <trans-unit id="DownloadFromUri">
+ <source>"Downloading from {0}"</source>
+ <target state="new">"Downloading from {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="FileAlreadyExists">
+ <source>"{0} already exists, file not written"</source>
+ <target state="new">"{0} already exists, file not written"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidCommandLineOption">
+ <source>"Invalid option {0}"</source>
+ <target state="new">"Invalid option {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidServerPath">
+ <source>"Invalid server path '{0}'"</source>
+ <target state="new">"Invalid server path '{0}'"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="NoInputFiles">
+ <source>"Input files not found"</source>
+ <target state="new">"Input files not found"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFile">
+ <source>"Writing: {0}"</source>
+ <target state="new">"Writing: {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFilesToOutput">
+ <source>"Writing files to {0}"</source>
+ <target state="new">"Writing files to {0}"</target>
+ <note />
+ </trans-unit>
+ </body>
+ </file>
+</xliff>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="utf-8"?>
+<xliff xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="1.2" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 xliff-core-1.2-transitional.xsd">
+ <file datatype="xml" source-language="en" target-language="zh-Hant" original="../Resources.resx">
+ <body>
+ <trans-unit id="DownloadFromUri">
+ <source>"Downloading from {0}"</source>
+ <target state="new">"Downloading from {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="FileAlreadyExists">
+ <source>"{0} already exists, file not written"</source>
+ <target state="new">"{0} already exists, file not written"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidCommandLineOption">
+ <source>"Invalid option {0}"</source>
+ <target state="new">"Invalid option {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="InvalidServerPath">
+ <source>"Invalid server path '{0}'"</source>
+ <target state="new">"Invalid server path '{0}'"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="NoInputFiles">
+ <source>"Input files not found"</source>
+ <target state="new">"Input files not found"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFile">
+ <source>"Writing: {0}"</source>
+ <target state="new">"Writing: {0}"</target>
+ <note />
+ </trans-unit>
+ <trans-unit id="WritingFilesToOutput">
+ <source>"Writing files to {0}"</source>
+ <target state="new">"Writing files to {0}"</target>
+ <note />
+ </trans-unit>
+ </body>
+ </file>
+</xliff>
\ No newline at end of file
--- /dev/null
+# Symbol downloader dotnet cli extension #
+
+This tool can download all the files needed for debugging (symbols, modules, SOS and DAC for the coreclr module given) for any given core dump, minidump or any supported platform's file formats like ELF, MachO, Windows DLLs, PDBs and portable PDBs. See [debugging coredumps](https://github.com/dotnet/diagnostics/blob/main/documentation/debugging-coredump.md) for more details.
+
+ Usage: dotnet symbol [options] <FILES>
+
+ Arguments:
+ <FILES> List of files. Can contain wildcards.
+
+ Options:
+ --microsoft-symbol-server Add 'https://msdl.microsoft.com/download/symbols' symbol server path (default).
+ --server-path <symbol server path> Add a http server path.
+ --authenticated-server-path <pat> <server path> Add a http PAT authenticated server path.
+ --cache-directory <file cache directory> Add a cache directory.
+ --recurse-subdirectories Process input files in all subdirectories.
+ --host-only Download only the host program (i.e. dotnet) that lldb needs for loading coredumps.
+ --symbols Download the symbol files (.pdb, .dbg, .dwarf).
+ --modules Download the module files (.dll, .so, .dylib).
+ --debugging Download the special debugging modules (DAC, DBI, SOS).
+ --windows-pdbs Force the downloading of the Windows PDBs when Portable PDBs are also available.
+ -o, --output <output directory> Set the output directory. Otherwise, write next to the input file (default).
+ -d, --diagnostics Enable diagnostic output.
+ -h, --help Show help information.
+
+## Install ##
+
+This is a dotnet global tool "extension" supported only by [.NET Core 2.1](https://www.microsoft.com/net/download/) or greater. The latest version of the downloader can be installed with the following command. Make sure you are not in any project directory with a NuGet.Config that doesn't include nuget.org as a source. See the Notes section about any errors.
+
+ dotnet tool install -g dotnet-symbol
+
+If you already have dotnet-symbol installed you can update it with:
+
+ dotnet tool update -g dotnet-symbol
+
+## Examples ##
+
+This will attempt to download all the modules, symbols and DAC/DBI files needed to debug the core dump including the managed assemblies and their PDBs if Linux/ELF core dump or Windows minidump:
+
+ dotnet-symbol coredump.4507
+
+This downloads just the host program needed to load a core dump on Linux or macOS under lldb. SOS under lldb can download the rest of the symbols and modules needed on demand or with the "loadsymbols" command. See [debugging coredumps](https://github.com/dotnet/diagnostics/blob/main/documentation/debugging-coredump.md) for more details.
+
+ dotnet-symbol --host-only coredump.4507
+
+To download the symbol files for a specific assembly:
+
+ dotnet-symbol --symbols --cache-directory c:\temp\symcache --server-path https://msdl.microsoft.com/download/symbols --output c:\temp\symout System.Threading.dll
+
+Downloads all the symbol files for the shared runtime:
+
+ dotnet-symbol --symbols --output /tmp/symbols /usr/share/dotnet/shared/Microsoft.NETCore.App/2.0.3/*
+
+After the symbols are downloaded to `/tmp/symbols` they can be copied back to the above runtime directory so the native debuggers like lldb or gdb can find them, but the copy needs to be superuser:
+
+ sudo cp /tmp/symbols/* /usr/share/dotnet/shared/Microsoft.NETCore.App/2.0.3
+
+To verify a symbol package on a local VSTS symbol server:
+
+ dotnet-symbol --authenticated-server-path x349x9dfkdx33333livjit4wcvaiwc3v4wjyvnq https://mikemvsts.artifacts.visualstudio.com/defaultcollection/_apis/Symbol/symsrv coredump.45634
+
+## Notes ##
+
+Symbol download is only supported for official .NET Core runtime versions acquired through official channels such as [the official web site](https://dotnet.microsoft.com/download/dotnet-core) and the [default sources in the dotnet installation scripts](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-install-scripts). Runtimes obtained from community sites like [archlinux](https://www.archlinux.org/packages/community/x86_64/dotnet-runtime/) are not supported.
+
+Core dumps generated with gdb (generate-core-file command) or gcore (utility that comes with gdb) do not currently work with this utility (issue [#47](https://github.com/dotnet/symstore/issues/47)).
+
+The best way to generate core dumps on Linux (not supported on Windows or MacOS) is to use the [createdump](https://github.com/dotnet/runtime/blob/main/docs/design/coreclr/botr/xplat-minidump-generation.md) facility that is part of .NET Core 2.0 and greater. It can be setup (see [createdump](https://github.com/dotnet/runtime/blob/main/docs/design/coreclr/botr/xplat-minidump-generation.md#configurationpolicy) for the details) to automatically generate a "minidump" like ELF core dump when your .NET Core app crashes. The coredump will contain all the necessary managed state to analyze with SOS or dotnet-dump.
+
+ Linux system core generation (enabled with `ulimit -c unlimited`) also works if the coredump_filter flags are set (see [core](http://man7.org/linux/man-pages/man5/core.5.html)) to at least 0x3f but they are usually a lot larger than necessary.
+```
+echo 0x3f > /proc/self/coredump_filter
+ulimit -c unlimited
+```
+
+
+If you receive the below error when installing the extension, you are in a project or directory that contains a NuGet.Config that doesn't contain nuget.org.
+
+ error NU1101: Unable to find package dotnet-symbol. No packages exist with this id in source(s): ...
+ The tool package could not be restored.
+ Tool 'dotnet-symbol' failed to install. This failure may have been caused by:
+
+ * You are attempting to install a preview release and did not use the --version option to specify the version.
+ * A package by this name was found, but it was not a .NET Core tool.
+ * The required NuGet feed cannot be accessed, perhaps because of an Internet connection problem.
+ * You mistyped the name of the tool.
+
+You can either run the install command from your $HOME or %HOME% directory or override this behavior with the `--add-source` option:
+
+`dotnet tool install -g --add-source https://api.nuget.org/v3/index.json dotnet-symbol`
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+
+namespace Microsoft.Diagnostics.Tools.Symbol
+{
+ /// <summary>
+ /// Simple trace/logging support.
+ /// </summary>
+ internal sealed class Tracer : Microsoft.SymbolStore.ITracer
+ {
+ public bool Enabled;
+ public bool EnabledVerbose;
+
+ public void WriteLine(string message)
+ {
+ Console.WriteLine(message);
+ }
+
+ public void WriteLine(string format, params object[] arguments)
+ {
+ Console.WriteLine(format, arguments);
+ }
+
+ public void Information(string message)
+ {
+ if (Enabled)
+ {
+ Console.WriteLine(message);
+ }
+ }
+
+ public void Information(string format, params object[] arguments)
+ {
+ if (Enabled)
+ {
+ Console.WriteLine(format, arguments);
+ }
+ }
+
+ public void Warning(string message)
+ {
+ if (Enabled)
+ {
+ Console.WriteLine("WARNING: " + message);
+ }
+ }
+
+ public void Warning(string format, params object[] arguments)
+ {
+ if (Enabled)
+ {
+ Console.WriteLine("WARNING: " + format, arguments);
+ }
+ }
+
+ public void Error(string message)
+ {
+ Console.WriteLine("ERROR: " + message);
+ }
+
+ public void Error(string format, params object[] arguments)
+ {
+ Console.WriteLine("ERROR: " + format, arguments);
+ }
+
+ public void Verbose(string message)
+ {
+ if (EnabledVerbose)
+ {
+ Console.WriteLine(message);
+ }
+ }
+
+ public void Verbose(string format, params object[] arguments)
+ {
+ if (EnabledVerbose)
+ {
+ Console.WriteLine(format, arguments);
+ }
+ }
+ }
+}
--- /dev/null
+<Project Sdk="Microsoft.NET.Sdk">
+ <PropertyGroup>
+ <TargetFramework>net6.0</TargetFramework>
+ <ToolCommandName>dotnet-symbol</ToolCommandName>
+ <RootNamespace>Microsoft.Diagnostic.Tools.Symbol</RootNamespace>
+ <Description>Symbols download utility</Description>
+ <PackageTags>Symbols</PackageTags>
+ <PackageReleaseNotes>$(Description)</PackageReleaseNotes>
+ <NoWarn>;1591;1701</NoWarn>
+ </PropertyGroup>
+
+ <ItemGroup>
+ <ProjectReference Include="$(MSBuildThisFileDirectory)..\..\Microsoft.SymbolStore\Microsoft.SymbolStore.csproj">
+ <PrivateAssets>All</PrivateAssets>
+ </ProjectReference>
+ </ItemGroup>
+
+ <ItemGroup>
+ <Compile Update="Properties\Resources.Designer.cs">
+ <DesignTime>True</DesignTime>
+ <AutoGen>True</AutoGen>
+ <DependentUpon>Resources.resx</DependentUpon>
+ </Compile>
+ </ItemGroup>
+
+ <ItemGroup>
+ <EmbeddedResource Update="Properties\Resources.resx">
+ <Generator>ResXFileCodeGenerator</Generator>
+ <LastGenOutput>Resources.Designer.cs</LastGenOutput>
+ </EmbeddedResource>
+ </ItemGroup>
+</Project>
--- /dev/null
+{
+ "rollForwardOnNoCandidateFx": 2
+}
\ No newline at end of file
enabledBy[providerCollectionProvider.Name] = "--providers ";
}
- bool collectRundownEvents = true;
+ long rundownKeyword = EventPipeSession.DefaultRundownKeyword;
+ RetryStrategy retryStrategy = RetryStrategy.NothingToRetry;
if (profile.Length != 0)
{
return (int)ReturnCode.ArgumentError;
}
- collectRundownEvents = selectedProfile.Rundown;
+ rundownKeyword = selectedProfile.RundownKeyword;
+ retryStrategy = selectedProfile.RetryStrategy;
Profile.MergeProfileAndProviders(selectedProfile, providerCollection, enabledBy);
}
if (rundown.HasValue)
{
- collectRundownEvents = rundown.Value;
+ if (rundown.Value)
+ {
+ rundownKeyword |= EventPipeSession.DefaultRundownKeyword;
+ retryStrategy = (rundownKeyword == EventPipeSession.DefaultRundownKeyword) ? RetryStrategy.NothingToRetry : RetryStrategy.DropKeywordKeepRundown;
+ }
+ else
+ {
+ rundownKeyword = 0;
+ retryStrategy = RetryStrategy.NothingToRetry;
+ }
}
// Parse --clrevents parameter
EventPipeSession session = null;
try
{
- session = diagnosticsClient.StartEventPipeSession(providerCollection, collectRundownEvents, (int)buffersize);
- if (resumeRuntime)
- {
- try
- {
- diagnosticsClient.ResumeRuntime();
- }
- catch (UnsupportedCommandException)
- {
- // Noop if command is unsupported, since the target is most likely a 3.1 app.
- }
- }
+ EventPipeSessionConfiguration config = new(providerCollection, (int)buffersize, rundownKeyword: rundownKeyword, requestStackwalk: true);
+ session = diagnosticsClient.StartEventPipeSession(config);
}
- catch (DiagnosticsClientException e)
+ catch (UnsupportedCommandException e)
{
- Console.Error.WriteLine($"Unable to start a tracing session: {e}");
- return (int)ReturnCode.SessionCreationError;
+ if (retryStrategy == RetryStrategy.DropKeywordKeepRundown)
+ {
+ Console.Error.WriteLine("The runtime version being traced doesn't support the custom rundown feature used by this tracing configuration, retrying with the standard rundown keyword");
+ //
+ // If you are building new profiles or options, you can test with these asserts to make sure you are writing
+ // the retry strategies correctly.
+ //
+ // If these assert ever fires, it means something is wrong with the option generation logic leading to unnecessary retries.
+ // unnecessary retries is not fatal.
+ //
+ // Debug.Assert(rundownKeyword != 0);
+ // Debug.Assert(rundownKeyword != EventPipeSession.DefaultRundownKeyword);
+ //
+ EventPipeSessionConfiguration config = new(providerCollection, (int)buffersize, rundownKeyword: EventPipeSession.DefaultRundownKeyword, requestStackwalk: true);
+ session = diagnosticsClient.StartEventPipeSession(config);
+ }
+ else if (retryStrategy == RetryStrategy.DropKeywordDropRundown)
+ {
+ Console.Error.WriteLine("The runtime version being traced doesn't support the custom rundown feature used by this tracing configuration, retrying with the rundown omitted");
+ //
+ // If you are building new profiles or options, you can test with these asserts to make sure you are writing
+ // the retry strategies correctly.
+ //
+ // If these assert ever fires, it means something is wrong with the option generation logic leading to unnecessary retries.
+ // unnecessary retries is not fatal.
+ //
+ // Debug.Assert(rundownKeyword != 0);
+ // Debug.Assert(rundownKeyword != EventPipeSession.DefaultRundownKeyword);
+ //
+ EventPipeSessionConfiguration config = new(providerCollection, (int)buffersize, rundownKeyword: 0, requestStackwalk: true);
+ session = diagnosticsClient.StartEventPipeSession(config);
+ }
+ else
+ {
+ Console.Error.WriteLine($"Unable to start a tracing session: {e}");
+ return (int)ReturnCode.SessionCreationError;
+ }
}
catch (UnauthorizedAccessException e)
{
Console.Error.WriteLine($"dotnet-trace does not have permission to access the specified app: {e.GetType()}");
return (int)ReturnCode.SessionCreationError;
}
-
+ if (resumeRuntime)
+ {
+ try
+ {
+ diagnosticsClient.ResumeRuntime();
+ }
+ catch (UnsupportedCommandException)
+ {
+ // Noop if command is unsupported, since the target is most likely a 3.1 app.
+ }
+ }
if (session == null)
{
Console.Error.WriteLine("Unable to create session.");
new Profile(
"cpu-sampling",
new EventPipeProvider[] {
- new EventPipeProvider("Microsoft-DotNETCore-SampleProfiler", EventLevel.Informational),
- new EventPipeProvider("Microsoft-Windows-DotNETRuntime", EventLevel.Informational, (long)ClrTraceEventParser.Keywords.Default)
+ new("Microsoft-DotNETCore-SampleProfiler", EventLevel.Informational),
+ new("Microsoft-Windows-DotNETRuntime", EventLevel.Informational, (long)ClrTraceEventParser.Keywords.Default)
},
"Useful for tracking CPU usage and general .NET runtime information. This is the default option if no profile or providers are specified."),
new Profile(
"gc-verbose",
new EventPipeProvider[] {
- new EventPipeProvider(
+ new(
name: "Microsoft-Windows-DotNETRuntime",
eventLevel: EventLevel.Verbose,
keywords: (long)ClrTraceEventParser.Keywords.GC |
(long)ClrTraceEventParser.Keywords.GCHandle |
(long)ClrTraceEventParser.Keywords.Exception
- ),
+ )
},
"Tracks GC collections and samples object allocations."),
new Profile(
"gc-collect",
new EventPipeProvider[] {
- new EventPipeProvider(
+ new(
name: "Microsoft-Windows-DotNETRuntime",
eventLevel: EventLevel.Informational,
keywords: (long)ClrTraceEventParser.Keywords.GC
),
- new EventPipeProvider(
+ new(
name: "Microsoft-Windows-DotNETRuntimePrivate",
eventLevel: EventLevel.Informational,
keywords: (long)ClrTraceEventParser.Keywords.GC
)
},
- "Tracks GC collections only at very low overhead.") { Rundown = false },
+ "Tracks GC collections only at very low overhead.") { RundownKeyword = (long)ClrTraceEventParser.Keywords.GC, RetryStrategy = RetryStrategy.DropKeywordDropRundown },
new Profile(
"database",
new EventPipeProvider[] {
- new EventPipeProvider(
+ new(
name: "System.Threading.Tasks.TplEventSource",
eventLevel: EventLevel.Informational,
keywords: (long)TplEtwProviderTraceEventParser.Keywords.TasksFlowActivityIds
),
- new EventPipeProvider(
+ new(
name: "Microsoft-Diagnostics-DiagnosticSource",
eventLevel: EventLevel.Verbose,
keywords: (long)DiagnosticSourceKeywords.Messages |
{ "compilationdiagnostic", 0x2000000000 },
{ "methoddiagnostic", 0x4000000000 },
{ "typediagnostic", 0x8000000000 },
+ { "waithandle", 0x40000000000 },
};
public static List<EventPipeProvider> ToProviders(string providersRawInput)
public string Description { get; }
- public bool Rundown { get; set; } = true;
+ public long RundownKeyword { get; set; } = EventPipeSession.DefaultRundownKeyword;
+
+ public RetryStrategy RetryStrategy { get; set; } = RetryStrategy.NothingToRetry;
public static void MergeProfileAndProviders(Profile selectedProfile, List<EventPipeProvider> providerCollection, Dictionary<string, string> enabledBy)
{
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+//
+// This class describes the various strategies for retrying a command.
+// The rough idea is that these numbers form a state machine.
+// Any time a command execution fails, a retry will be attempted by matching the
+// condition of the config as well as this strategy number to generate a
+// modified config as well as a modified strategy.
+//
+// This is designed with forward compatibility in mind. We might have newer
+// capabilities that only exists in newer runtimes, but we will never know exactly
+// how we should retry. So this give us a way to encode the retry strategy in the
+// profiles without having to introducing new concepts.
+//
+namespace Microsoft.Diagnostics.Tools.Trace
+{
+ internal enum RetryStrategy
+ {
+ NothingToRetry = 0,
+ DropKeywordKeepRundown = 1,
+ DropKeywordDropRundown = 2,
+ ForbiddenToRetry = 3
+ }
+}
#undef REG
#elif defined(TARGET_RISCV64)
#undef REG
-#define REG(reg, field) { offsetof(RiscV64VolatileContextPointer, field) }
- REG(zero, R0),
- REG(a0, A0),
- REG(a1, A1),
- REG(a2, A2),
- REG(a3, A3),
- REG(a4, A4),
- REG(a5, A5),
- REG(a6, A6),
- REG(a7, A7),
- REG(t0, T0),
- REG(t1, T1),
- REG(t2, T2),
- REG(t3, T3),
- REG(t4, T4),
- REG(t5, T5),
- REG(t6, T6),
-#undef REG
-#define REG(reg, field) { offsetof(T_KNONVOLATILE_CONTEXT_POINTERS, field) }
+#define REG(reg, field) { FIELD_OFFSET(T_KNONVOLATILE_CONTEXT_POINTERS, field) }
+#define vREG(reg, field) { FIELD_OFFSET(RiscV64VolatileContextPointer, field) }
+ vREG(zero, R0),
+ REG(Ra, Ra),
+ { FIELD_OFFSET(T_CONTEXT, Sp) },
+ REG(Gp, Gp),
+ REG(Tp, Tp),
+ vREG(t0, T0),
+ vREG(t1, T1),
+ vREG(t2, T2),
+ REG(Fp, Fp),
REG(s1, S1),
+ vREG(a0, A0),
+ vREG(a1, A1),
+ vREG(a2, A2),
+ vREG(a3, A3),
+ vREG(a4, A4),
+ vREG(a5, A5),
+ vREG(a6, A6),
+ vREG(a7, A7),
REG(s2, S2),
REG(s3, S3),
REG(s4, S4),
REG(s9, S9),
REG(s10, S10),
REG(s11, S11),
- REG(ra, Ra),
- REG(gp, Gp),
- REG(tp, Tp),
- REG(fp, Fp),
- { offsetof(T_CONTEXT, Sp) },
+ vREG(t3, T3),
+ vREG(t4, T4),
+ vREG(t5, T5),
+ vREG(t6, T6),
+#undef vREG
#undef REG
#else
PORTABILITY_ASSERT("GcInfoDumper::ReportPointerRecord is not implemented on this platform.")
iSPRegister = (FIELD_OFFSET(T_CONTEXT, Sp) - FIELD_OFFSET(T_CONTEXT, R0)) / sizeof(ULONG);
UINT iBFRegister = m_StackBaseRegister;
#elif defined(TARGET_RISCV64)
- assert(!"unimplemented on RISCV64 yet");
- iSPRegister = 0;
+ iSPRegister = (FIELD_OFFSET(T_CONTEXT, Sp) - FIELD_OFFSET(T_CONTEXT, R0)) / sizeof(ULONGLONG);
#endif
-#if defined(TARGET_ARM) || defined(TARGET_ARM64)
+#if defined(TARGET_ARM) || defined(TARGET_ARM64) || defined(TARGET_RISCV64)
BYTE* pContext = (BYTE*)&(pRD->volatileCurrContextPointers);
#else
BYTE* pContext = (BYTE*)pRD->pCurrentContext;
{
break;
}
+#elif defined(TARGET_RISCV64)
+ bool isVolatile = (iReg == 0 || (iReg >= 5 && iReg <= 7) || (iReg >= 10 && iReg <= 17) || iReg >= 28);
+ if (ctx == 0)
+ {
+ if (!isVolatile)
+ {
+ continue;
+ }
+ }
+ else if (isVolatile) // skip volatile registers for second context
+ {
+ continue;
+ }
#endif
{
_ASSERTE(iReg < ARRAY_SIZE(rgRegisters));
pReg = *(SIZE_T**)((BYTE*)pRD->pCurrentContextPointers + rgRegisters[iEncodedReg].cbContextOffset);
}
-#elif defined(TARGET_ARM64)
+#elif defined(TARGET_ARM64) || defined(TARGET_RISCV64)
pReg = *(SIZE_T**)(pContext + rgRegisters[iReg].cbContextOffset);
if (iEncodedReg == iSPRegister)
{
GcStackSlotBase base;
if (iSPRegister == iEncodedReg)
{
-#if defined(TARGET_ARM) || defined(TARGET_ARM64)
+#if defined(TARGET_ARM) || defined(TARGET_ARM64) || defined(TARGET_RISCV64)
base = GC_SP_REL;
#else
if (0 == ctx)
base = GC_SP_REL;
else
base = GC_CALLER_SP_REL;
-#endif //defined(TARGET_ARM) || defined(TARGET_ARM64)
+#endif //defined(TARGET_ARM) || defined(TARGET_ARM64) || defined(TARGET_RISCV64)
}
else
{
}
}
-#if defined(TARGET_ARM) || defined(TARGET_ARM64)
+#if defined(TARGET_ARM) || defined(TARGET_ARM64) || defined(TARGET_RISCV64)
pContext = (BYTE*)pRD->pCurrentContextPointers;
#else
pContext = (BYTE*)pRD->pCallerContext;
*(ppVolatileReg+iReg) = ®disp.pCurrentContext->X0 + iReg;
}
#elif defined(TARGET_RISCV64)
-#pragma message("Unimplemented for RISCV64 yet.")
- assert(!"unimplemented on RISCV64 yet");
+ FILL_REGS(pCurrentContext->R0, 33);
+ FILL_REGS(pCallerContext->R0, 33);
+
+ regdisp.pCurrentContextPointers = ®disp.ctxPtrsOne;
+ regdisp.pCallerContextPointers = ®disp.ctxPtrsTwo;
+
+ // Set S1
+ regdisp.pCurrentContextPointers->S1 = ®disp.pCurrentContext->S1;
+ regdisp.pCallerContextPointers ->S1 = ®disp.pCallerContext ->S1;
+
+ ULONG64 **ppCurrentReg = ®disp.pCurrentContextPointers->S2;
+ ULONG64 **ppCallerReg = ®disp.pCallerContextPointers ->S2;
+ // Set S2-S11
+ for (iReg = 0; iReg < 10; iReg++)
+ {
+ *(ppCurrentReg + iReg) = ®disp.pCurrentContext->S2 + iReg;
+ *(ppCallerReg + iReg) = ®disp.pCallerContext ->S2 + iReg;
+ }
+
+ // Set Fp
+ regdisp.pCurrentContextPointers->Fp = ®disp.pCurrentContext->Fp;
+ regdisp.pCallerContextPointers ->Fp = ®disp.pCallerContext ->Fp;
+
+ // Set Gp
+ regdisp.pCurrentContextPointers->Gp = ®disp.pCurrentContext->Gp;
+ regdisp.pCallerContextPointers ->Gp = ®disp.pCallerContext ->Gp;
+
+ // Set Tp
+ regdisp.pCurrentContextPointers->Tp = ®disp.pCurrentContext->Tp;
+ regdisp.pCallerContextPointers ->Tp = ®disp.pCallerContext ->Tp;
+
+ // Set Ra
+ regdisp.pCurrentContextPointers->Ra = ®disp.pCurrentContext->Ra;
+ regdisp.pCallerContextPointers ->Ra = ®disp.pCallerContext ->Ra;
+
+ regdisp.volatileCurrContextPointers.R0 = ®disp.pCurrentContext->R0;
+ regdisp.volatileCurrContextPointers.A0 = ®disp.pCurrentContext->A0;
+ regdisp.volatileCurrContextPointers.A1 = ®disp.pCurrentContext->A1;
+ regdisp.volatileCurrContextPointers.A2 = ®disp.pCurrentContext->A2;
+ regdisp.volatileCurrContextPointers.A3 = ®disp.pCurrentContext->A3;
+ regdisp.volatileCurrContextPointers.A4 = ®disp.pCurrentContext->A4;
+ regdisp.volatileCurrContextPointers.A5 = ®disp.pCurrentContext->A5;
+ regdisp.volatileCurrContextPointers.A6 = ®disp.pCurrentContext->A6;
+ regdisp.volatileCurrContextPointers.A7 = ®disp.pCurrentContext->A7;
+ regdisp.volatileCurrContextPointers.T0 = ®disp.pCurrentContext->T0;
+ regdisp.volatileCurrContextPointers.T1 = ®disp.pCurrentContext->T1;
+ regdisp.volatileCurrContextPointers.T2 = ®disp.pCurrentContext->T2;
+ regdisp.volatileCurrContextPointers.T3 = ®disp.pCurrentContext->T3;
+ regdisp.volatileCurrContextPointers.T4 = ®disp.pCurrentContext->T4;
+ regdisp.volatileCurrContextPointers.T5 = ®disp.pCurrentContext->T5;
+ regdisp.volatileCurrContextPointers.T6 = ®disp.pCurrentContext->T6;
#else
PORTABILITY_ASSERT("GcInfoDumper::EnumerateStateChanges is not implemented on this platform.")
#endif
(GcInfoDecoderFlags)( DECODE_SECURITY_OBJECT
| DECODE_CODE_LENGTH
| DECODE_VARARG
-#if defined(TARGET_ARM) || defined(TARGET_ARM64)
+#if defined(TARGET_ARM) || defined(TARGET_ARM64) || defined(TARGET_RISCV64)
| DECODE_HAS_TAILCALLS
-#endif // TARGET_ARM || TARGET_ARM64
+#endif // TARGET_ARM || TARGET_ARM64 || TARGET_RISCV64
| DECODE_INTERRUPTIBILITY),
offset);
#ifdef PARTIALLY_INTERRUPTIBLE_GC_SUPPORTED
UINT32 safePointOffset = offset;
-#if defined(TARGET_AMD64) || defined(TARGET_ARM) || defined(TARGET_ARM64)
+#if defined(TARGET_AMD64) || defined(TARGET_ARM) || defined(TARGET_ARM64) || defined(TARGET_RISCV64)
safePointOffset++;
#endif
if(safePointDecoder.IsSafePoint(safePointOffset))
HRESULT Request(ISOSDacInterface *sos, CLRDATA_ADDRESS addr)
{
- return Request(sos, addr, NULL);
+ return Request(sos, addr, (TADDR)0);
}
};
{
return sos->GetMethodDescData(
addr,
- NULL, // IP address
+ (TADDR)0, // IP address
this,
0, // cRejitData
NULL, // rejitData[]
template <typename T>
inline T* InterlockedExchangePointerT(
T* volatile * target,
- int value) // When NULL is provided as argument.
+ std::nullptr_t value) // When NULL is provided as argument.
{
//STATIC_ASSERT(value == 0);
- return InterlockedExchangePointerT(target, reinterpret_cast<T*>(value));
+ return InterlockedExchangePointerT(target, (T*)(void*)value);
}
template <typename T>
inline T* InterlockedCompareExchangePointerT(
T* volatile * destination,
T* exchange,
- int comparand) // When NULL is provided as argument.
+ std::nullptr_t comparand) // When NULL is provided as argument.
{
//STATIC_ASSERT(comparand == 0);
- return InterlockedCompareExchangePointerT(destination, exchange, reinterpret_cast<T*>(comparand));
+ return InterlockedCompareExchangePointerT(destination, exchange, (T*)(void*)comparand);
}
#undef InterlockedExchangePointer
HRESULT SString::LoadResourceAndReturnHR(CCompRC* pResourceDLL, CCompRC::ResourceCategory eCategory, int resourceID)
{
- CONTRACT(BOOL)
+ CONTRACT(HRESULT)
{
INSTANCE_CHECK;
NOTHROW;
if (_symbolService is null)
{
_symbolService = new SymbolService(this);
- _symbolService.AddSymbolServer(msdl: true, symweb: false, timeoutInMinutes: 6, retryCount: 5);
+ _symbolService.AddSymbolServer(timeoutInMinutes: 6, retryCount: 5);
_symbolService.AddCachePath(SymbolService.DefaultSymbolCache);
}
return _symbolService;
Assert.Equal(defaultPath, symbolService.FormatSymbolStores());
symbolService.DisableSymbolStore();
- Assert.True(symbolService.ParseSymbolPath($"srv*{localSymbolCache}*{SymbolService.SymwebSymbolServer}"));
- string testpath1 = $"Cache: {localSymbolCache} Server: {SymbolService.SymwebSymbolServer}";
+ Assert.True(symbolService.ParseSymbolPath($"srv*{localSymbolCache}*https://symweb/"));
+ string testpath1 = $"Cache: {localSymbolCache} Server: https://symweb/";
Assert.Equal(testpath1, symbolService.FormatSymbolStores());
symbolService.DisableSymbolStore();
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Linq;
+using Xunit;
+
+namespace Microsoft.FileFormats.Tests
+{
+ public class AddressSpace
+ {
+ [Fact]
+ public void GoodReads()
+ {
+ MemoryBufferAddressSpace buffer = new MemoryBufferAddressSpace(new byte[] { 1, 2, 3, 4, 5 });
+ Assert.True(Enumerable.SequenceEqual(new byte[] { 1 }, buffer.Read(0, 1)));
+ Assert.True(Enumerable.SequenceEqual(new byte[] { 3, 4 }, buffer.Read(2, 2)));
+ Assert.True(Enumerable.SequenceEqual(new byte[] { 1, 2, 3, 4, 5 }, buffer.Read(0, 5)));
+ Assert.True(Enumerable.SequenceEqual(new byte[0], buffer.Read(0, 0)));
+ Assert.True(Enumerable.SequenceEqual(new byte[0], buffer.Read(4, 0)));
+ }
+
+ [Fact]
+ public void BadReads()
+ {
+ MemoryBufferAddressSpace buffer = new MemoryBufferAddressSpace(new byte[] { 1, 2, 3, 4, 5 });
+ Assert.Throws<BadInputFormatException>(() =>
+ {
+ buffer.Read(5, 1);
+ });
+ Assert.Throws<BadInputFormatException>(() =>
+ {
+ buffer.Read(5, 0);
+ });
+ Assert.Throws<BadInputFormatException>(() =>
+ {
+ buffer.Read(3, 3);
+ });
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.IO;
+using System.Linq;
+using TestHelpers;
+using Xunit;
+
+namespace Microsoft.FileFormats.ELF.Tests
+{
+ public class Tests
+ {
+ [Fact]
+ public void CheckIndexingInfo()
+ {
+ using (Stream libcoreclr = TestUtilities.OpenCompressedFile("TestBinaries/libcoreclr.so.gz"))
+ {
+ StreamAddressSpace dataSource = new(libcoreclr);
+ ELFFile elf = new(dataSource);
+ Assert.True(elf.IsValid());
+ Assert.True(elf.Header.Type == ELFHeaderType.Shared);
+ string buildId = TestUtilities.ToHexString(elf.BuildID);
+
+ //this is the build id for libcoreclr.so from package:
+ // https://dotnet.myget.org/feed/dotnet-core/package/nuget/runtime.ubuntu.14.04-x64.Microsoft.NETCore.Runtime.CoreCLR/2.0.0-preview3-25428-01
+ Assert.Equal("ef8f58a0b402d11c68f78342ef4fcc7d23798d4c", buildId);
+ }
+
+ // 32 bit arm ELF binary
+ using (Stream apphost = TestUtilities.OpenCompressedFile("TestBinaries/apphost.gz"))
+ {
+ StreamAddressSpace dataSource = new(apphost);
+ ELFFile elf = new(dataSource);
+ Assert.True(elf.IsValid());
+ Assert.True(elf.Header.Type == ELFHeaderType.Executable);
+ string buildId = TestUtilities.ToHexString(elf.BuildID);
+
+ //this is the build id for apphost from package:
+ // https://dotnet.myget.org/F/dotnet-core/symbols/runtime.linux-arm.Microsoft.NETCore.DotNetAppHost/2.1.0-preview2-25512-03
+ Assert.Equal("316d55471a8d5ebd6f2cb0631f0020518ab13dc0", buildId);
+ }
+ }
+
+ [Fact]
+ public void CheckDbgIndexingInfo()
+ {
+ using (Stream stream = TestUtilities.OpenCompressedFile("TestBinaries/libcoreclrtraceptprovider.so.dbg.gz"))
+ {
+ StreamAddressSpace dataSource = new(stream);
+ ELFFile elf = new(dataSource);
+ Assert.True(elf.IsValid());
+ Assert.True(elf.Header.Type == ELFHeaderType.Shared);
+ string buildId = TestUtilities.ToHexString(elf.BuildID);
+ Assert.Equal("ce4ce0558d878a05754dff246ccea2a70a1db3a8", buildId);
+ }
+ }
+
+ [Fact]
+ public void CheckFreeBSDIndexingInfo()
+ {
+ using (Stream stream = File.OpenRead("TestBinaries/ilasm.dbg"))
+ {
+ StreamAddressSpace dataSource = new(stream);
+ ELFFile elf = new(dataSource);
+ Assert.True(elf.IsValid());
+ Assert.True(elf.Header.Type == ELFHeaderType.Executable);
+ string buildId = TestUtilities.ToHexString(elf.BuildID);
+ Assert.Equal("4a91e41002a1307ef4097419d7875df001969daa", buildId);
+ }
+ }
+
+ [Fact]
+ public void CheckCustomNamedBuildIdSection()
+ {
+ using (Stream stream = File.OpenRead("TestBinaries/renamed_build_id_section"))
+ {
+ StreamAddressSpace dataSource = new(stream);
+ ELFFile elf = new(dataSource);
+ Assert.True(elf.IsValid());
+ Assert.True(elf.Header.Type == ELFHeaderType.Shared);
+ string buildId = TestUtilities.ToHexString(elf.BuildID);
+ Assert.Equal("1bd6a199dcb6f234558d9439cfcbba2727f1e1d9", buildId);
+ }
+ }
+
+ [Fact]
+ public void ParseCore()
+ {
+ using (Stream core = TestUtilities.OpenCompressedFile("TestBinaries/core.gz"))
+ {
+ StreamAddressSpace dataSource = new(core);
+ ELFCoreFile coreReader = new(dataSource);
+ Assert.True(coreReader.IsValid());
+ ELFLoadedImage loadedImage = coreReader.LoadedImages.Where(i => i.Path.EndsWith("librt-2.17.so")).First();
+ Assert.True(loadedImage.Image.IsValid());
+ Assert.True(loadedImage.Image.Header.Type == ELFHeaderType.Shared);
+ string buildId = TestUtilities.ToHexString(loadedImage.Image.BuildID);
+ Assert.Equal("1d2ad4eaa62bad560685a4b8dccc8d9aa95e22ce", buildId);
+ }
+ }
+
+ [Fact]
+ public void ParseTriageDump()
+ {
+ using (Stream core = TestUtilities.OpenCompressedFile("TestBinaries/triagedump.gz"))
+ {
+ StreamAddressSpace dataSource = new(core);
+ ELFCoreFile coreReader = new(dataSource);
+ Assert.True(coreReader.IsValid());
+ ELFLoadedImage loadedImage = coreReader.LoadedImages.Where(i => i.Path.EndsWith("libcoreclr.so")).First();
+ Assert.True(loadedImage.Image.IsValid());
+ Assert.True(loadedImage.Image.Header.Type == ELFHeaderType.Shared);
+ string buildId = TestUtilities.ToHexString(loadedImage.Image.BuildID);
+ Assert.Equal("8f39a52a756311ab365090bfe9edef7ee8c44503", buildId);
+ }
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using Xunit;
+
+namespace Microsoft.FileFormats.Tests
+{
+ public class Layouts
+ {
+ [Fact]
+ public void ReadPrimitives()
+ {
+ MemoryBufferAddressSpace dataSource = new(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 });
+ Reader reader = new Reader(dataSource);
+ Assert.Equal(0x0201, reader.Read<ushort>(0));
+ Assert.Equal(0x5, reader.Read<byte>(4));
+ Assert.Equal((uint)0x08070605, reader.Read<uint>(4));
+ }
+
+#pragma warning disable 0649
+ private class SimpleStruct : TStruct
+ {
+ public int X;
+ public short Y;
+ }
+
+ [Fact]
+ public void ReadTStruct()
+ {
+ MemoryBufferAddressSpace dataSource = new(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 });
+ Reader reader = new(dataSource);
+ SimpleStruct s = reader.Read<SimpleStruct>(1);
+ Assert.Equal(0x05040302, s.X);
+ Assert.Equal(0x0706, s.Y);
+ }
+
+ private class DerivedStruct : SimpleStruct
+ {
+ public int Z;
+ }
+
+ [Fact]
+ public void ReadDerivedTStruct()
+ {
+ MemoryBufferAddressSpace dataSource = new(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13 });
+ Reader reader = new(dataSource);
+ DerivedStruct s = reader.Read<DerivedStruct>(1);
+ Assert.Equal(0x05040302, s.X);
+ Assert.Equal(0x0706, s.Y);
+ Assert.Equal(0x0d0c0b0a, s.Z);
+ }
+
+ private class ArrayStruct : TStruct
+ {
+ [ArraySize(3)]
+ public short[] array;
+ public int X;
+ }
+
+ [Fact]
+ public void ReadArrayTStructTest()
+ {
+ MemoryBufferAddressSpace dataSource = new(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13 });
+ Reader reader = new(dataSource);
+ ArrayStruct s = reader.Read<ArrayStruct>(1);
+ Assert.Equal(3, s.array.Length);
+ Assert.Equal(0x0302, s.array[0]);
+ Assert.Equal(0x0504, s.array[1]);
+ Assert.Equal(0x0706, s.array[2]);
+ Assert.Equal(0x0d0c0b0a, s.X);
+ }
+
+ private enum FooEnum : ushort
+ {
+ ThreeTwo = 0x0302
+ }
+
+ private class EnumStruct : TStruct
+ {
+ public FooEnum E;
+ public int X;
+ }
+
+ [Fact]
+ public void EnumTStructTest()
+ {
+ MemoryBufferAddressSpace dataSource = new(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13 });
+ Reader reader = new(dataSource);
+ EnumStruct s = reader.Read<EnumStruct>(1);
+ Assert.Equal(FooEnum.ThreeTwo, s.E);
+ Assert.Equal(0x09080706, s.X);
+ }
+
+ private class VariableSizedPointer<T> : Pointer<T, SizeT> { }
+ private class UInt32Pointer<T> : Pointer<T, uint> { }
+ private class UInt64Pointer<T> : Pointer<T, ulong> { }
+ private class PointerStruct : TStruct
+ {
+ public VariableSizedPointer<uint> P;
+ public UInt32Pointer<byte> P32;
+ public UInt64Pointer<ulong> P64;
+ }
+
+ [Fact]
+ public void PointerTStructTest()
+ {
+ MemoryBufferAddressSpace dataSource = new(new byte[] { 4, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0 });
+ LayoutManager mgr = new LayoutManager().AddPrimitives().AddSizeT(4).AddPointerTypes().AddTStructTypes();
+ Reader reader = new(dataSource, mgr);
+ PointerStruct s = reader.Read<PointerStruct>(0);
+ Assert.Equal((ulong)0x4, s.P.Value);
+ Assert.False(s.P.IsNull);
+ Assert.Equal((uint)0x1, s.P.Dereference(dataSource));
+ Assert.Equal((ulong)0x1, s.P32.Value);
+ Assert.Equal((byte)0x0, s.P32.Dereference(dataSource));
+ Assert.Equal((byte)0x1, s.P32.Element(dataSource, 3));
+ Assert.Equal((ulong)0x2, s.P64.Value);
+ Assert.Equal((ulong)0x0002000000010000, s.P64.Dereference(dataSource));
+ }
+
+ public class OptionalField : TStruct
+ {
+ public int X;
+ [If("A")]
+ public int Y;
+ public int Z;
+ }
+
+ [Fact]
+ public void DefineTest()
+ {
+ MemoryBufferAddressSpace dataSource = new(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13 });
+ LayoutManager a = new LayoutManager().AddPrimitives().AddTStructTypes(new string[] { "A" });
+ Reader readerA = new(dataSource, a);
+ OptionalField fA = readerA.Read<OptionalField>(0);
+ Assert.Equal(0x08070605, fA.Y);
+ Assert.Equal(0x0c0b0a09, fA.Z);
+
+ Reader readerB = new(dataSource);
+ OptionalField fB = readerB.Read<OptionalField>(0);
+ Assert.Equal(0x0, fB.Y);
+ Assert.Equal(0x08070605, fB.Z);
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.IO;
+using System.Linq;
+using TestHelpers;
+using Xunit;
+
+namespace Microsoft.FileFormats.MachO.Tests
+{
+ public class Tests
+ {
+ [Fact]
+ public void CheckIndexingInfo()
+ {
+ // https://dotnet.myget.org/feed/dotnet-core/package/nuget/runtime.osx.10.12-x64.Microsoft.NETCore.Runtime.CoreCLR/1.1.2
+ using (Stream dylib = TestUtilities.OpenCompressedFile("TestBinaries/libcoreclr.dylib.gz"))
+ {
+ StreamAddressSpace dataSource = new(dylib);
+ MachOFile machO = new(dataSource);
+ Assert.True(machO.IsValid());
+ Assert.Equal(Guid.Parse("da2b37b5-cdbc-f838-899b-6a782ceca847"), new Guid(machO.Uuid));
+ }
+ }
+
+ [Fact]
+ public void CheckDwarfIndexingInfo()
+ {
+ // From a local build
+ using (Stream dwarf = TestUtilities.OpenCompressedFile("TestBinaries/libclrjit.dylib.dwarf.gz"))
+ {
+ StreamAddressSpace dataSource = new(dwarf);
+ MachOFile machO = new(dataSource);
+ Assert.True(machO.IsValid());
+ Assert.Equal(Guid.Parse("0c235eb3-e98e-ef32-b6e6-e6ed18a604a8"), new Guid(machO.Uuid));
+ }
+ }
+
+ [Fact(Skip = "Need an alternate scheme to acquire the binary this test was reading")]
+ public void ParseCore()
+ {
+ using (Stream core = TestUtilities.DecompressFile("TestBinaries/core.gz", "TestBinaries/core"))
+ {
+ StreamAddressSpace dataSource = new(core);
+ // hard-coding the dylinker position so we don't pay to search for it each time
+ // the code is capable of finding it by brute force search even if we don't provide the hint
+ MachCore coreReader = new(dataSource, 0x000000010750c000);
+ Assert.True(coreReader.IsValid());
+ MachLoadedImage[] images = coreReader.LoadedImages.Where(i => i.Path.EndsWith("libcoreclr.dylib")).ToArray();
+ MachOFile libCoreclr = images[0].Image;
+ Assert.True(libCoreclr.IsValid());
+ Assert.Equal(Guid.Parse("c5660f3e-7352-b138-8141-e9d63b8ab415"), new Guid(libCoreclr.Uuid));
+ }
+ }
+ }
+}
--- /dev/null
+<Project Sdk="Microsoft.NET.Sdk">
+ <PropertyGroup>
+ <TargetFramework>net6.0</TargetFramework>
+ <NoWarn>;1591;1701</NoWarn>
+ </PropertyGroup>
+ <ItemGroup>
+ <None Remove="TestBinaries\renamed_build_id_section" />
+ </ItemGroup>
+
+ <ItemGroup>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\PerfMapEnabled\System.ComponentModel.EventBasedAsync.dll" Link="TestBinaries\PerfMapEnabled\System.ComponentModel.EventBasedAsync.dll">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\apphost.gz">
+ <CopyToOutputDirectory>Always</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\core.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\HelloWorld.exe">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\HelloWorld.pdb">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\libclrjit.dylib.dwarf.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\libcoreclr.dylib.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\libcoreclr.so.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\libcoreclrtraceptprovider.so.dbg.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\minidump_x64.dmp.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\minidump_x86.dmp.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\renamed_build_id_section">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\System.Diagnostics.StackTrace.dll">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\triagedump.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\ilasm.dbg">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ </ItemGroup>
+
+ <ItemGroup>
+ <ProjectReference Include="$(MSBuildThisFileDirectory)..\..\Microsoft.FileFormats\Microsoft.FileFormats.csproj" />
+ <ProjectReference Include="$(MSBuildThisFileDirectory)..\TestHelpers\TestHelpers.csproj" />
+ </ItemGroup>
+</Project>
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using Microsoft.FileFormats.PE;
+using System;
+using System.Collections.ObjectModel;
+using System.IO;
+using System.Linq;
+using TestHelpers;
+using Xunit;
+
+namespace Microsoft.FileFormats.Minidump
+{
+ public class Tests
+ {
+ const string x86Dump = "TestBinaries/minidump_x86.dmp.gz";
+ const string x64Dump = "TestBinaries/minidump_x64.dmp.gz";
+
+ readonly Guid x64ClrGuid = new Guid("e18d6461-eb4f-49a6-b418-e9af91007a21");
+ readonly Guid x86ClrGuid = new Guid("df1e3528-29be-4d0e-9457-4c8ccfdc278a");
+ const int ClrAge = 2;
+ const string ClrPdb = "clr.pdb";
+
+ [Fact]
+ public void CheckIsMinidump()
+ {
+ using (Stream stream = TestUtilities.OpenCompressedFile(x86Dump))
+ {
+ Assert.True(Minidump.IsValid(new StreamAddressSpace(stream)));
+ Assert.False(Minidump.IsValid(new StreamAddressSpace(stream), 1));
+ }
+
+ using (Stream stream = TestUtilities.OpenCompressedFile(x64Dump))
+ {
+ Assert.True(Minidump.IsValid(new StreamAddressSpace(stream)));
+ Assert.False(Minidump.IsValid(new StreamAddressSpace(stream), 1));
+ }
+
+ // These are GZiped files, they should not be minidumps.
+ using (FileStream stream = File.OpenRead(x86Dump))
+ Assert.False(Minidump.IsValid(new StreamAddressSpace(stream)));
+
+ using (FileStream stream = File.OpenRead(x64Dump))
+ Assert.False(Minidump.IsValid(new StreamAddressSpace(stream)));
+ }
+
+ [Fact]
+ public void CheckPdbInfo()
+ {
+ using (Stream stream = TestUtilities.OpenCompressedFile(x86Dump))
+ {
+ CheckPdbInfoInternal(GetMinidumpFromStream(stream), x86ClrGuid);
+ }
+ using (Stream stream = TestUtilities.OpenCompressedFile(x64Dump))
+ {
+ CheckPdbInfoInternal(GetMinidumpFromStream(stream), x64ClrGuid);
+ }
+ }
+
+ private void CheckPdbInfoInternal(Minidump minidump, Guid guid)
+ {
+ PEFile image = minidump.LoadedImages.Where(i => i.ModuleName.EndsWith(@"\clr.dll")).Single().Image;
+ foreach (PEPdbRecord pdb in image.Pdbs)
+ {
+ Assert.NotNull(pdb);
+ Assert.Equal(ClrPdb, pdb.Path);
+ Assert.Equal(ClrAge, pdb.Age);
+ Assert.Equal(guid, pdb.Signature);
+ }
+ }
+
+ [Fact]
+ public void CheckModuleNames()
+ {
+ using (Stream stream = TestUtilities.OpenCompressedFile(x86Dump))
+ {
+ CheckModuleNamesInternal(GetMinidumpFromStream(stream));
+ }
+ using (Stream stream = TestUtilities.OpenCompressedFile(x64Dump))
+ {
+ CheckModuleNamesInternal(GetMinidumpFromStream(stream));
+ }
+ }
+
+ private void CheckModuleNamesInternal(Minidump minidump)
+ {
+ Assert.Single(minidump.LoadedImages.Where(i => i.ModuleName.EndsWith(@"\clr.dll")));
+
+ foreach (var module in minidump.LoadedImages)
+ Assert.NotNull(module.ModuleName);
+ }
+
+ [Fact]
+ public void CheckNestedPEImages()
+ {
+ using (Stream stream = TestUtilities.OpenCompressedFile(x86Dump))
+ {
+ CheckNestedPEImagesInternal(GetMinidumpFromStream(stream));
+ }
+ using (Stream stream = TestUtilities.OpenCompressedFile(x64Dump))
+ {
+ CheckNestedPEImagesInternal(GetMinidumpFromStream(stream));
+ }
+ }
+
+ private void CheckNestedPEImagesInternal(Minidump minidump)
+ {
+ foreach (var loadedImage in minidump.LoadedImages)
+ {
+ Assert.True(loadedImage.Image.HasValidDosSignature.Check());
+ Assert.True(loadedImage.Image.HasValidPESignature.Check());
+ }
+ }
+
+ [Fact]
+ public void CheckMemoryRanges()
+ {
+ using (Stream stream = TestUtilities.OpenCompressedFile(x86Dump))
+ {
+ CheckMemoryRangesInternal(GetMinidumpFromStream(stream));
+ }
+ using (Stream stream = TestUtilities.OpenCompressedFile(x64Dump))
+ {
+ CheckMemoryRangesInternal(GetMinidumpFromStream(stream));
+ }
+ }
+
+ private void CheckMemoryRangesInternal(Minidump minidump)
+ {
+ ReadOnlyCollection<MinidumpLoadedImage> images = minidump.LoadedImages;
+ ReadOnlyCollection<MinidumpSegment> memory = minidump.Segments;
+
+ // Ensure that all of our images actually correspond to memory in the crash dump. Note that our minidumps used
+ // for this test are all full dumps with all memory (including images) in them.
+ foreach (var image in images)
+ {
+ int count = memory.Where(m => m.VirtualAddress <= image.BaseAddress && image.BaseAddress < m.VirtualAddress + m.Size).Count();
+ Assert.Equal(1, count);
+
+ // Check the start of each image for the PE header 'MZ'
+ byte[] header = minidump.VirtualAddressReader.Read(image.BaseAddress, 2);
+ Assert.Equal((byte)'M', header[0]);
+ Assert.Equal((byte)'Z', header[1]);
+ }
+ }
+
+ [Fact]
+ public void CheckLoadedModules()
+ {
+ using (Stream stream = TestUtilities.OpenCompressedFile(x86Dump))
+ {
+ CheckLoadedModulesInternal(stream);
+ }
+ using (Stream stream = TestUtilities.OpenCompressedFile(x64Dump))
+ {
+ CheckLoadedModulesInternal(stream);
+ }
+ }
+
+ private static void CheckLoadedModulesInternal(Stream stream)
+ {
+ Minidump minidump = GetMinidumpFromStream(stream);
+
+ var modules = minidump.LoadedImages;
+ Assert.True(modules.Count > 0);
+ }
+
+ [Fact]
+ public void CheckStartupMemoryRead()
+ {
+ using (Stream stream = TestUtilities.OpenCompressedFile(x86Dump))
+ {
+ CheckStartupMemoryReadInternal(stream);
+ }
+ using (Stream stream = TestUtilities.OpenCompressedFile(x64Dump))
+ {
+ CheckStartupMemoryReadInternal(stream);
+ }
+ }
+
+ private static void CheckStartupMemoryReadInternal(Stream stream)
+ {
+ IAddressSpace sas = new StreamAddressSpace(stream);
+ MaxStreamReadHelper readHelper = new MaxStreamReadHelper(sas);
+
+ Minidump minidump = new Minidump(readHelper);
+
+ // We should have read the header of a minidump, so we cannot have read nothing.
+ Assert.True(readHelper.Max > 0);
+
+ // We should only read the header and not too far into the dump file, 1k should be plenty.
+ Assert.True(readHelper.Max <= 1024);
+ }
+
+ private static Minidump GetMinidumpFromStream(Stream stream)
+ {
+ StreamAddressSpace sas = new(stream);
+ return new(sas);
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Threading.Tasks;
+using Xunit;
+
+namespace Microsoft.FileFormats.PDB.Tests
+{
+ public class Tests
+ {
+ [Fact]
+ public void CheckIndexingInfo()
+ {
+ using (Stream s = File.OpenRead("TestBinaries/HelloWorld.pdb"))
+ {
+ StreamAddressSpace fileContent = new(s);
+ PDBFile pdb = new(fileContent);
+ Assert.True(pdb.Header.IsMagicValid.Check());
+ Assert.True(pdb.IsValid());
+ Assert.Equal((uint)1, pdb.Age);
+ Assert.Equal(Guid.Parse("99891B3E-D7AE-4C3B-ABFF-8A2B4A9B0C43"), pdb.Signature);
+ }
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Threading.Tasks;
+using Xunit;
+
+namespace Microsoft.FileFormats.PE.Tests
+{
+ public class Tests
+ {
+ [Fact]
+ public void CheckExeIndexingInfo()
+ {
+ using (Stream s = File.OpenRead("TestBinaries/HelloWorld.exe"))
+ {
+ StreamAddressSpace fileContent = new(s);
+ PEFile pe = new(fileContent);
+ Assert.True(pe.IsValid());
+ Assert.Equal((uint)0x8000, pe.SizeOfImage);
+ Assert.Equal((uint)0x577F5919, pe.Timestamp);
+ }
+ }
+
+ [Fact]
+ public void CheckExePdbInfo()
+ {
+ using (Stream s = File.OpenRead("TestBinaries/HelloWorld.exe"))
+ {
+ StreamAddressSpace fileContent = new(s);
+ PEFile pe = new(fileContent);
+
+ // There should only be one pdb record entry
+ foreach (PEPdbRecord pdb in pe.Pdbs)
+ {
+ Assert.Equal(new Guid("99891b3e-d7ae-4c3b-abff-8a2b4a9b0c43"), pdb.Signature);
+ Assert.Equal(1, pdb.Age);
+ Assert.Equal(@"c:\users\noahfalk\documents\visual studio 2015\Projects\HelloWorld\HelloWorld\obj\Debug\HelloWorld.pdb", pdb.Path);
+ }
+ }
+ }
+
+ [Fact]
+ public void CheckDllIndexingInfo()
+ {
+ using (Stream s = File.OpenRead("TestBinaries/System.Diagnostics.StackTrace.dll"))
+ {
+ StreamAddressSpace fileContent = new(s);
+ PEFile pe = new(fileContent);
+ Assert.True(pe.IsValid());
+ Assert.Equal((uint)0x35a00, pe.SizeOfImage);
+ Assert.Equal((uint)0x595cd91b, pe.Timestamp);
+ }
+ }
+
+ [Fact]
+ public void CheckDllPdbInfo()
+ {
+ using (Stream s = File.OpenRead("TestBinaries/System.Diagnostics.StackTrace.dll"))
+ {
+ StreamAddressSpace fileContent = new(s);
+ PEFile pe = new(fileContent);
+
+ bool first = true;
+ foreach (PEPdbRecord pdb in pe.Pdbs)
+ {
+ // Skip the first entry (ngen pdb)
+ if (!first)
+ {
+ Assert.Equal(new Guid("8B2E8CF4-4314-4806-982A-B7D904876A50"), pdb.Signature);
+ Assert.Equal(1, pdb.Age);
+ Assert.Equal(@"/root/corefx/bin/obj/Unix.AnyCPU.Release/System.Diagnostics.StackTrace/netcoreapp/System.Diagnostics.StackTrace.pdb", pdb.Path);
+ }
+ first = false;
+ }
+ }
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+using Microsoft.FileFormats.PerfMap;
+using Xunit;
+
+namespace Microsoft.FileFormats.PerfMap.Tests
+{
+ public class Tests
+ {
+ public static MemoryStream GenerateStreamFromString(string value)
+ {
+ return new MemoryStream(Encoding.UTF8.GetBytes(value ?? ""));
+ }
+
+ public const string s_validV1PerfMap =
+@"FFFFFFFF 00 734D59D6DE0E96AA3C77B3E2ED498097
+FFFFFFFE 00 1
+FFFFFFFD 00 2
+FFFFFFFC 00 3
+FFFFFFFB 00 1
+000115D0 0D Microsoft.CodeAnalysis.EmbeddedAttribute::.ctor()";
+
+ [Fact]
+ public void CheckIndexingInfo()
+ {
+ using (var s = new MemoryStream(Encoding.UTF8.GetBytes(s_validV1PerfMap)))
+ {
+ PerfMapFile perfMap = new(s);
+ Assert.True(perfMap.IsValid);
+ Assert.True(perfMap.Header is not null);
+ Assert.True(TestHelpers.TestUtilities.ToHexString(perfMap.Header.Signature) == "734d59d6de0e96aa3c77b3e2ed498097");
+ Assert.True(perfMap.Header.Version == 1);
+ }
+ }
+
+ [Fact]
+ public void CheckFields()
+ {
+ using (var s = new MemoryStream(Encoding.UTF8.GetBytes(s_validV1PerfMap)))
+ {
+ PerfMapFile perfMap = new(s);
+ Assert.True(perfMap.IsValid);
+ Assert.True(perfMap.Header is not null);
+ Assert.True(TestHelpers.TestUtilities.ToHexString(perfMap.Header.Signature) == "734d59d6de0e96aa3c77b3e2ed498097");
+ Assert.True(perfMap.Header.Version == 1);
+ Assert.True(perfMap.Header.OperatingSystem == PerfMapFile.PerfMapOSToken.Linux);
+ Assert.True(perfMap.Header.Architecture == PerfMapFile.PerfMapArchitectureToken.X64);
+ Assert.True(perfMap.Header.Abi == PerfMapFile.PerfMapAbiToken.Default);
+ }
+ }
+
+ [Fact]
+ public void CheckRecords()
+ {
+ using (var s = new MemoryStream(Encoding.UTF8.GetBytes(s_validV1PerfMap)))
+ {
+ PerfMapFile perfMap = new(s);
+ PerfMapFile.PerfMapRecord record = perfMap.PerfRecords.Single();
+ Assert.True(record.Rva == 0x115D0);
+ Assert.True(record.Length == 0x0D);
+ Assert.True(record.Name == "Microsoft.CodeAnalysis.EmbeddedAttribute::.ctor()");
+ }
+ }
+
+ public const string s_VNextPerfMapValid =
+@"FFFFFFFF 00 734D59D6DE0E96AA3C77B3E2ED498097
+FFFFFFFE 00 99
+FFFFFFFD 00 2
+FFFFFFFC 00 3
+FFFFFFFB 00 1
+000115D0 0D Microsoft.CodeAnalysis.EmbeddedAttribute::.ctor()";
+
+ [Fact]
+ public void CheckHeaderVNext()
+ {
+ // Reading the vNext header is valid as long as the signature and fields remain compatible.
+ using (var s = new MemoryStream(Encoding.UTF8.GetBytes(s_VNextPerfMapValid)))
+ {
+ PerfMapFile perfMap = new(s);
+ Assert.True(perfMap.IsValid);
+ Assert.True(perfMap.Header is not null);
+ Assert.True(TestHelpers.TestUtilities.ToHexString(perfMap.Header.Signature) == "734d59d6de0e96aa3c77b3e2ed498097");
+ Assert.True(perfMap.Header.Version == 99);
+ Assert.True(perfMap.Header.OperatingSystem == PerfMapFile.PerfMapOSToken.Linux);
+ Assert.True(perfMap.Header.Architecture == PerfMapFile.PerfMapArchitectureToken.X64);
+ Assert.True(perfMap.Header.Abi == PerfMapFile.PerfMapAbiToken.Default);
+ }
+ }
+
+ [Fact]
+ public void CheckRecordsVNextFail()
+ {
+ // Reading the vNext records is invalid as .
+ using (var s = new MemoryStream(Encoding.UTF8.GetBytes(s_VNextPerfMapValid)))
+ {
+ PerfMapFile perfMap = new(s);
+ Assert.True(perfMap.IsValid);
+ Assert.True(perfMap.Header is not null);
+ Assert.True(perfMap.Header.Version == 99);
+ Assert.Throws<NotImplementedException>(perfMap.PerfRecords.First);
+ }
+ }
+
+ public static IEnumerable<object[]> InvalidSigPerfMaps() =>
+ new object[][] {
+// Too short
+new object[]{@"FFFFFFFF 00 734D59D6DE0E96AA3C77B3E2ED4980
+FFFFFFFE 00 1
+FFFFFFFD 00 2
+FFFFFFFC 00 3
+FFFFFFFB 00 1"},
+// Not HexString
+new object[]{@"FFFFFFFF 00 734D59D6DE0E96AA3C77B3E2ED4980CG
+FFFFFFFE 00 1
+FFFFFFFD 00 2
+FFFFFFFC 00 3
+FFFFFFFB 00 1"},
+// Too long
+new object[]{@"FFFFFFFF 00 734D59D6DE0E96AA3C77B3E2ED49809701
+FFFFFFFE 00 1
+FFFFFFFD 00 2
+FFFFFFFC 00 3
+FFFFFFFB 00 1"}};
+
+ [Theory]
+ [MemberData(nameof(InvalidSigPerfMaps))]
+ public void CheckInvalidSigsFail(string doc)
+ {
+ using (var s = new MemoryStream(Encoding.UTF8.GetBytes(doc)))
+ {
+ var perfMap = new PerfMapFile(s);
+ Assert.True(!perfMap.IsValid);
+ }
+ }
+
+ public static IEnumerable<object[]> InvalidHeaders() =>
+ new object[][]{
+// Wrong token for sig
+new object[]{ @"FFFFFFFA 00 734D59D6DE0E96AA3C77B3E2ED4980
+FFFFFFFE 00 1
+FFFFFFFD 00 2
+FFFFFFFC 00 3
+FFFFFFFB 00 1" },
+// Out of order
+new object[]{ @"FFFFFFFF 00 734D59D6DE0E96AA3C77B3E2ED4980CG
+FFFFFFFE 00 1
+FFFFFFFC 00 3
+FFFFFFFD 00 2
+FFFFFFFB 00 1"},
+// Missing Entry
+new object[]{ @"FFFFFFFF 00 734D59D6DE0E96AA3C77B3E2ED498097
+FFFFFFFE 00 1
+FFFFFFFC 00 3
+FFFFFFFB 00 1"},
+// Repeated pseudo RVA
+new object[]{ @"FFFFFFFF 00 734D59D6DE0E96AA3C77B3E2ED498097
+FFFFFFFE 00 1
+FFFFFFFD 00 2
+FFFFFFFD 00 2
+FFFFFFFC 00 3
+FFFFFFFB 00 1"},
+// Wrong pseudo offset
+new object[]{ @"FFFFFFFF 02 734D59D6DE0E96AA3C77B3E2ED498097
+FFFFFFFE 00 1
+FFFFFFFD 00 2
+FFFFFFFC 00 3
+FFFFFFFB 00 1"}};
+
+ [Theory]
+ [MemberData(nameof(InvalidHeaders))]
+ public void CheckInvalidHeadersFail(string doc)
+ {
+ using (var s = new MemoryStream(Encoding.UTF8.GetBytes(doc)))
+ {
+ var perfMap = new PerfMapFile(s);
+ Assert.True(!perfMap.IsValid);
+ }
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+using Xunit;
+
+namespace Microsoft.FileFormats.Tests
+{
+ public class PrimitiveTypes
+ {
+ [Fact]
+ public void ByteTest()
+ {
+ MemoryBufferAddressSpace dt = new MemoryBufferAddressSpace(new byte[] { 200, 12, 0 });
+ Assert.Equal(200, (byte)new UInt8Layout(false).Read(dt, 0));
+ Assert.Equal(12, (byte)new UInt8Layout(false).Read(dt, 1));
+ Assert.Equal(0, (byte)new UInt8Layout(false).Read(dt, 2));
+ Assert.Equal(200, (byte)new UInt8Layout(true).Read(dt, 0));
+ Assert.Equal(12, (byte)new UInt8Layout(true).Read(dt, 1));
+ Assert.Equal(0, (byte)new UInt8Layout(true).Read(dt, 2));
+ }
+
+ [Fact]
+ public void SByteTest()
+ {
+ MemoryBufferAddressSpace dt = new MemoryBufferAddressSpace(new byte[] { 200, 12, 0 });
+ Assert.Equal(-56, (sbyte)new Int8Layout(false).Read(dt, 0));
+ Assert.Equal(12, (sbyte)new Int8Layout(false).Read(dt, 1));
+ Assert.Equal(0, (sbyte)new Int8Layout(false).Read(dt, 2));
+ Assert.Equal(-56, (sbyte)new Int8Layout(true).Read(dt, 0));
+ Assert.Equal(12, (sbyte)new Int8Layout(true).Read(dt, 1));
+ Assert.Equal(0, (sbyte)new Int8Layout(true).Read(dt, 2));
+ }
+
+ [Fact]
+ public void UShortTest()
+ {
+ MemoryBufferAddressSpace dt = new MemoryBufferAddressSpace(new byte[] { 200, 12, 0, 0 });
+ Assert.Equal(12 * 256 + 200, (ushort)new UInt16Layout(false).Read(dt, 0));
+ Assert.Equal(0, (ushort)new UInt16Layout(false).Read(dt, 2));
+ Assert.Equal(200 * 256 + 12, (ushort)new UInt16Layout(true).Read(dt, 0));
+ Assert.Equal(0, (ushort)new UInt16Layout(true).Read(dt, 2));
+ }
+
+ [Fact]
+ public void ShortTest()
+ {
+ MemoryBufferAddressSpace dt = new MemoryBufferAddressSpace(new byte[] { 200, 12, 0, 0 });
+ Assert.Equal(12 * 256 + 200, (short)new Int16Layout(false).Read(dt, 0));
+ Assert.Equal(0, (short)new Int16Layout(false).Read(dt, 2));
+ Assert.Equal(-56 * 256 + 12, (short)new Int16Layout(true).Read(dt, 0));
+ Assert.Equal(0, (short)new Int16Layout(true).Read(dt, 2));
+ }
+
+ [Fact]
+ public void UIntTest()
+ {
+ MemoryBufferAddressSpace dt = new MemoryBufferAddressSpace(new byte[] { 200, 12, 19, 139, 0, 0, 0, 0 });
+ Assert.Equal((uint)139 * 256 * 256 * 256 + 19 * 256 * 256 + 12 * 256 + 200, new UInt32Layout(false).Read(dt, 0));
+ Assert.Equal((uint)0, new UInt32Layout(false).Read(dt, 4));
+ Assert.Equal((uint)200 * 256 * 256 * 256 + 12 * 256 * 256 + 19 * 256 + 139, new UInt32Layout(true).Read(dt, 0));
+ Assert.Equal((uint)0, new UInt32Layout(true).Read(dt, 4));
+ }
+
+ [Fact]
+ public void IntTest()
+ {
+ MemoryBufferAddressSpace dt = new MemoryBufferAddressSpace(new byte[] { 200, 12, 19, 139, 0, 0, 0, 0 });
+ Assert.Equal((139 - 256) * 256 * 256 * 256 + 19 * 256 * 256 + 12 * 256 + 200, new Int32Layout(false).Read(dt, 0));
+ Assert.Equal(0, new Int32Layout(false).Read(dt, 4));
+ Assert.Equal((200 - 256) * 256 * 256 * 256 + 12 * 256 * 256 + 19 * 256 + 139, new Int32Layout(true).Read(dt, 0));
+ Assert.Equal(0, new Int32Layout(true).Read(dt, 4));
+ }
+
+ [Fact]
+ public void ULongTest()
+ {
+ MemoryBufferAddressSpace dt = new MemoryBufferAddressSpace(new byte[] { 200, 12, 19, 139, 192, 7, 1, 40, 0, 0, 0, 0, 0, 0, 0, 0 });
+ Assert.Equal((40UL << 56) + (1UL << 48) + (7UL << 40) + (192UL << 32) + (139UL << 24) + (19UL << 16) + (12UL << 8) + 200UL,
+ new UInt64Layout(false).Read(dt, 0));
+ Assert.Equal(0UL, new UInt64Layout(false).Read(dt, 8));
+ Assert.Equal((200UL << 56) + (12UL << 48) + (19UL << 40) + (139UL << 32) + (192UL << 24) + (7UL << 16) + (1UL << 8) + 40UL,
+ new UInt64Layout(true).Read(dt, 0));
+ Assert.Equal(0UL, new UInt64Layout(true).Read(dt, 8));
+ }
+
+ [Fact]
+ public void LongTest()
+ {
+ MemoryBufferAddressSpace dt = new MemoryBufferAddressSpace(new byte[] { 200, 12, 19, 139, 192, 7, 1, 40, 0, 0, 0, 0, 0, 0, 0, 0 });
+ Assert.Equal((40L << 56) + (1L << 48) + (7L << 40) + (192L << 32) + (139L << 24) + (19L << 16) + (12L << 8) + 200L,
+ new Int64Layout(false).Read(dt, 0));
+ Assert.Equal(0L, new Int64Layout(false).Read(dt, 8));
+ Assert.Equal((-56L << 56) + (12L << 48) + (19L << 40) + (139L << 32) + (192L << 24) + (7L << 16) + (1L << 8) + 40L,
+ new Int64Layout(true).Read(dt, 0));
+ Assert.Equal(0L, new Int64Layout(true).Read(dt, 8));
+ }
+ }
+}
--- /dev/null
+FFFFFFFF 00 734D59D6DE0E96AA3C77B3E2ED498097
+FFFFFFFE 00 1
+FFFFFFFD 00 2
+FFFFFFFC 00 3
+FFFFFFFB 00 1
+000115D0 0D Microsoft.CodeAnalysis.EmbeddedAttribute::.ctor()
+000115E0 3F System.Runtime.CompilerServices.NullableAttribute::.ctor(System.Byte)
+00011620 26 System.Runtime.CompilerServices.NullableContextAttribute::.ctor(System.Byte)
+00011650 26 System.Runtime.CompilerServices.NullablePublicOnlyAttribute::.ctor(System.Boolean)
+00011680 10 System.Boolean System.SR::UsingResourceKeys()
+00011690 74 System.String System.SR::GetResourceString(System.String)
+00011710 50 System.Resources.ResourceManager System.SR::get_ResourceManager()
+00011760 17 System.String System.SR::get_Async_NullDelegate()
+00011780 17 System.String System.SR::get_Async_OperationAlreadyCompleted()
+000117A0 17 System.String System.SR::get_Async_OperationCancelled()
+000117C0 17 System.String System.SR::get_Async_ExceptionOccurred()
+000117E0 17 System.String System.SR::get_BackgroundWorker_WorkerAlreadyRunning()
+00011800 17 System.String System.SR::get_BackgroundWorker_WorkerDoesntReportProgress()
+00011820 17 System.String System.SR::get_BackgroundWorker_WorkerDoesntSupportCancellation()
+00011840 3A System.SR::.cctor()
+00011880 50 System.ComponentModel.AsyncCompletedEventArgs::.ctor(System.Exception, System.Boolean, System.Object)
+000118D0 9A System.ComponentModel.AsyncCompletedEventArgs::RaiseExceptionIfNecessary()
+00011970 0D System.Boolean System.ComponentModel.AsyncCompletedEventArgs::get_Cancelled()
+00011980 0C System.Exception System.ComponentModel.AsyncCompletedEventArgs::get_Error()
+00011990 0D System.Object System.ComponentModel.AsyncCompletedEventArgs::get_UserState()
+000119A0 43 System.ComponentModel.AsyncOperation::.ctor(System.Object, System.Threading.SynchronizationContext)
+000119F0 25 System.ComponentModel.AsyncOperation::Finalize()
+00011A20 05 System.Object System.ComponentModel.AsyncOperation::get_UserSuppliedState()
+00011A30 05 System.Threading.SynchronizationContext System.ComponentModel.AsyncOperation::get_SynchronizationContext()
+00011A40 4F System.ComponentModel.AsyncOperation::Post(System.Threading.SendOrPostCallback, System.Object)
+00011A90 5C System.ComponentModel.AsyncOperation::PostOperationCompleted(System.Threading.SendOrPostCallback, System.Object)
+00011AF0 29 System.ComponentModel.AsyncOperation::OperationCompleted()
+00011B20 5A System.ComponentModel.AsyncOperation::PostCore(System.Threading.SendOrPostCallback, System.Object, System.Boolean)
+00011B80 58 System.ComponentModel.AsyncOperation::OperationCompletedCore()
+00011BE0 46 System.ComponentModel.AsyncOperation::VerifyNotCompleted()
+00011C30 4F System.ComponentModel.AsyncOperation::VerifyDelegateNotNull(System.Threading.SendOrPostCallback)
+00011C80 5B System.ComponentModel.AsyncOperation System.ComponentModel.AsyncOperation::CreateOperation(System.Object, System.Threading.SynchronizationContext)
+00011CE0 61 System.ComponentModel.AsyncOperation System.ComponentModel.AsyncOperationManager::CreateOperation(System.Object)
+00011D50 3C System.Threading.SynchronizationContext System.ComponentModel.AsyncOperationManager::get_SynchronizationContext()
+00011D90 0D System.ComponentModel.AsyncOperationManager::set_SynchronizationContext(System.Threading.SynchronizationContext)
+00011DA0 37 System.ComponentModel.ProgressChangedEventArgs::.ctor(System.Int32, System.Object)
+00011DE0 0C System.Int32 System.ComponentModel.ProgressChangedEventArgs::get_ProgressPercentage()
+00011DF0 0C System.Object System.ComponentModel.ProgressChangedEventArgs::get_UserState()
+00011E00 65 System.ComponentModel.BackgroundWorker::.ctor()
+00011E70 42 System.ComponentModel.BackgroundWorker::AsyncOperationCompleted(System.Object)
+00011EC0 0D System.Boolean System.ComponentModel.BackgroundWorker::get_CancellationPending()
+00011ED0 53 System.ComponentModel.BackgroundWorker::CancelAsync()
+00011F30 62 System.ComponentModel.BackgroundWorker::add_DoWork(System.ComponentModel.DoWorkEventHandler)
+00011FA0 62 System.ComponentModel.BackgroundWorker::remove_DoWork(System.ComponentModel.DoWorkEventHandler)
+00012010 0D System.Boolean System.ComponentModel.BackgroundWorker::get_IsBusy()
+00012020 2F System.ComponentModel.BackgroundWorker::OnDoWork(System.ComponentModel.DoWorkEventArgs)
+00012050 2F System.ComponentModel.BackgroundWorker::OnRunWorkerCompleted(System.ComponentModel.RunWorkerCompletedEventArgs)
+00012080 2F System.ComponentModel.BackgroundWorker::OnProgressChanged(System.ComponentModel.ProgressChangedEventArgs)
+000120B0 62 System.ComponentModel.BackgroundWorker::add_ProgressChanged(System.ComponentModel.ProgressChangedEventHandler)
+00012120 62 System.ComponentModel.BackgroundWorker::remove_ProgressChanged(System.ComponentModel.ProgressChangedEventHandler)
+00012190 31 System.ComponentModel.BackgroundWorker::ProgressReporter(System.Object)
+000121D0 0F System.ComponentModel.BackgroundWorker::ReportProgress(System.Int32)
+000121E0 F7 System.ComponentModel.BackgroundWorker::ReportProgress(System.Int32, System.Object)
+000122E0 0F System.ComponentModel.BackgroundWorker::RunWorkerAsync()
+000122F0 11A System.ComponentModel.BackgroundWorker::RunWorkerAsync(System.Object)
+00012410 62 System.ComponentModel.BackgroundWorker::add_RunWorkerCompleted(System.ComponentModel.RunWorkerCompletedEventHandler)
+00012480 62 System.ComponentModel.BackgroundWorker::remove_RunWorkerCompleted(System.ComponentModel.RunWorkerCompletedEventHandler)
+000124F0 0D System.Boolean System.ComponentModel.BackgroundWorker::get_WorkerReportsProgress()
+00012500 0D System.ComponentModel.BackgroundWorker::set_WorkerReportsProgress(System.Boolean)
+00012510 0D System.Boolean System.ComponentModel.BackgroundWorker::get_WorkerSupportsCancellation()
+00012520 0D System.ComponentModel.BackgroundWorker::set_WorkerSupportsCancellation(System.Boolean)
+00012530 15B System.ComponentModel.BackgroundWorker::WorkerThreadStart(System.Object)
+00012690 01 System.ComponentModel.BackgroundWorker::Dispose(System.Boolean)
+000126A0 0D System.ComponentModel.BackgroundWorker::<RunWorkerAsync>b__27_0(System.Object)
+000126B0 30 System.ComponentModel.DoWorkEventArgs::.ctor(System.Object)
+000126E0 0C System.Object System.ComponentModel.DoWorkEventArgs::get_Argument()
+000126F0 0D System.Object System.ComponentModel.DoWorkEventArgs::get_Result()
+00012700 14 System.ComponentModel.DoWorkEventArgs::set_Result(System.Object)
+00012720 5D System.ComponentModel.RunWorkerCompletedEventArgs::.ctor(System.Object, System.Exception, System.Boolean)
+00012780 1A System.Object System.ComponentModel.RunWorkerCompletedEventArgs::get_Result()
+000127A0 0D System.Object System.ComponentModel.RunWorkerCompletedEventArgs::get_UserState()
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using Microsoft.SymbolStore.KeyGenerators;
+using TestHelpers;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace Microsoft.SymbolStore.Tests
+{
+ public class KeyGenerationTests
+ {
+ readonly ITracer _tracer;
+
+ public KeyGenerationTests(ITestOutputHelper output)
+ {
+ _tracer = new Tracer(output);
+ }
+
+ [Fact]
+ public void FileKeyGenerator()
+ {
+ ELFCoreKeyGeneratorInternal(fileGenerator: true);
+ ELFFileKeyGeneratorInternal(fileGenerator: true);
+ //MachCoreKeyGeneratorInternal(fileGenerator: true);
+ MachOFileKeyGeneratorInternal(fileGenerator: true);
+ MinidumpKeyGeneratorInternal(fileGenerator: true);
+ PDBFileKeyGeneratorInternal(fileGenerator: true);
+ PEFileKeyGeneratorInternal(fileGenerator: true);
+ PortablePDBFileKeyGeneratorInternal(fileGenerator: true);
+ PerfMapFileKeyGeneratorInternal(fileGenerator: true);
+ }
+
+
+ [Fact]
+ public void PerfMapFileKeyGenerator()
+ {
+ PerfMapFileKeyGeneratorInternal(fileGenerator: false);
+ }
+
+ private void PerfMapFileKeyGeneratorInternal(bool fileGenerator)
+ {
+ const string LinuxPerfMapPath = "TestBinaries/PerfMapEnabled/System.ComponentModel.EventBasedAsync.ni.r2rmap";
+ using (Stream linuxPerfMapStream = File.OpenRead(LinuxPerfMapPath))
+ {
+ var file = new SymbolStoreFile(linuxPerfMapStream, LinuxPerfMapPath);
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new PerfMapFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Single().Index == "system.componentmodel.eventbasedasync.ni.r2rmap/r2rmap-v1-734d59d6de0e96aa3c77b3e2ed498097/system.componentmodel.eventbasedasync.ni.r2rmap");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(!symbolKey.Any());
+
+ IEnumerable<SymbolStoreKey> perfMapKey = generator.GetKeys(KeyTypeFlags.PerfMapKeys);
+ Assert.True(!symbolKey.Any());
+
+ IEnumerable<SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys);
+ Assert.True(!symbolKey.Any());
+ }
+ }
+
+ [Fact]
+ public void ELFCoreKeyGenerator()
+ {
+ ELFCoreKeyGeneratorInternal(fileGenerator: false);
+ }
+
+ private void ELFCoreKeyGeneratorInternal(bool fileGenerator)
+ {
+ using (Stream core = TestUtilities.OpenCompressedFile("TestBinaries/triagedump.gz"))
+ {
+ var file = new SymbolStoreFile(core, "triagedump");
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new ELFCoreKeyGenerator(_tracer, file);
+
+ Dictionary<string, SymbolStoreKey> identityKeys = generator.GetKeys(KeyTypeFlags.IdentityKey).ToDictionary((key) => key.Index);
+ Dictionary<string, SymbolStoreKey> symbolKeys = generator.GetKeys(KeyTypeFlags.SymbolKey).ToDictionary((key) => key.Index);
+ Dictionary<string, SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys).ToDictionary((key) => key.Index);
+ Dictionary<string, SymbolStoreKey> dacdbiKeys = generator.GetKeys(KeyTypeFlags.DacDbiKeys).ToDictionary((key) => key.Index);
+ Dictionary<string, SymbolStoreKey> runtimeKeys = generator.GetKeys(KeyTypeFlags.RuntimeKeys).ToDictionary((key) => key.Index);
+
+ // Program (SymbolTestApp2)
+ Assert.True(identityKeys.ContainsKey("symboltestapp2.dll/DD52998F8000/symboltestapp2.dll"));
+ Assert.True(symbolKeys.ContainsKey("symboltestapp2.pdb/ed4317cbcab24c1fa06d93f8164c74ddFFFFFFFF/symboltestapp2.pdb"));
+
+ // System.IO.dll
+ Assert.True(identityKeys.ContainsKey("system.io.dll/595CD90631400/system.io.dll"));
+ Assert.True(symbolKeys.ContainsKey("system.io.pdb/5e949d2065c746a1b510de28f35d114cFFFFFFFF/system.io.pdb"));
+
+ // System.Native.so
+ Assert.True(identityKeys.ContainsKey("system.native.so/elf-buildid-3c22124b073eeb90746d6f6eab1ae2bf4097eb70/system.native.so"));
+ Assert.True(symbolKeys.ContainsKey("_.debug/elf-buildid-sym-3c22124b073eeb90746d6f6eab1ae2bf4097eb70/_.debug"));
+
+ // libcoreclr.so
+ Assert.True(identityKeys.ContainsKey("libcoreclr.so/elf-buildid-8f39a52a756311ab365090bfe9edef7ee8c44503/libcoreclr.so"));
+ Assert.True(symbolKeys.ContainsKey("_.debug/elf-buildid-sym-8f39a52a756311ab365090bfe9edef7ee8c44503/_.debug"));
+ Assert.True(runtimeKeys.ContainsKey("libcoreclr.so/elf-buildid-8f39a52a756311ab365090bfe9edef7ee8c44503/libcoreclr.so"));
+
+ Assert.True(clrKeys.ContainsKey("libmscordaccore.so/elf-buildid-coreclr-8f39a52a756311ab365090bfe9edef7ee8c44503/libmscordaccore.so"));
+ Assert.True(clrKeys.ContainsKey("libsos.so/elf-buildid-coreclr-8f39a52a756311ab365090bfe9edef7ee8c44503/libsos.so"));
+ Assert.True(clrKeys.ContainsKey("sos.netcore.dll/elf-buildid-coreclr-8f39a52a756311ab365090bfe9edef7ee8c44503/sos.netcore.dll"));
+
+ Assert.True(dacdbiKeys.ContainsKey("libmscordaccore.so/elf-buildid-coreclr-8f39a52a756311ab365090bfe9edef7ee8c44503/libmscordaccore.so"));
+ Assert.False(dacdbiKeys.ContainsKey("libsos.so/elf-buildid-coreclr-8f39a52a756311ab365090bfe9edef7ee8c44503/libsos.so"));
+ Assert.False(dacdbiKeys.ContainsKey("sos.netcore.dll/elf-buildid-coreclr-8f39a52a756311ab365090bfe9edef7ee8c44503/sos.netcore.dll"));
+ }
+ }
+
+ [Fact]
+ public void ELFFileKeyGenerator()
+ {
+ ELFFileKeyGeneratorInternal(fileGenerator: false);
+ }
+
+ private void ELFFileKeyGeneratorInternal(bool fileGenerator)
+ {
+ using (Stream stream = TestUtilities.OpenCompressedFile("TestBinaries/libcoreclr.so.gz"))
+ {
+ var file = new SymbolStoreFile(stream, "libcoreclr.so");
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new ELFFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Count() == 1);
+ Assert.True(identityKey.First().Index == "libcoreclr.so/elf-buildid-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/libcoreclr.so");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(symbolKey.Count() == 1);
+ Assert.True(symbolKey.First().Index == "_.debug/elf-buildid-sym-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/_.debug");
+
+ Dictionary<string, SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys).ToDictionary((key) => key.Index);
+ Assert.True(clrKeys.ContainsKey("libmscordaccore.so/elf-buildid-coreclr-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/libmscordaccore.so"));
+ Assert.True(clrKeys.ContainsKey("libmscordbi.so/elf-buildid-coreclr-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/libmscordbi.so"));
+ Assert.True(clrKeys.ContainsKey("mscordaccore.dll/elf-buildid-coreclr-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/mscordaccore.dll"));
+ Assert.True(clrKeys.ContainsKey("mscordbi.dll/elf-buildid-coreclr-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/mscordbi.dll"));
+ Assert.True(clrKeys.ContainsKey("libsos.so/elf-buildid-coreclr-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/libsos.so"));
+ Assert.True(clrKeys.ContainsKey("sos.netcore.dll/elf-buildid-coreclr-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/sos.netcore.dll"));
+
+ Dictionary<string, SymbolStoreKey> dacdbiKeys = generator.GetKeys(KeyTypeFlags.DacDbiKeys).ToDictionary((key) => key.Index);
+ Assert.True(dacdbiKeys.ContainsKey("libmscordaccore.so/elf-buildid-coreclr-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/libmscordaccore.so"));
+ Assert.True(dacdbiKeys.ContainsKey("libmscordbi.so/elf-buildid-coreclr-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/libmscordbi.so"));
+ Assert.True(dacdbiKeys.ContainsKey("mscordaccore.dll/elf-buildid-coreclr-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/mscordaccore.dll"));
+ Assert.True(dacdbiKeys.ContainsKey("mscordbi.dll/elf-buildid-coreclr-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/mscordbi.dll"));
+ Assert.False(dacdbiKeys.ContainsKey("libsos.so/elf-buildid-coreclr-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/libsos.so"));
+ Assert.False(dacdbiKeys.ContainsKey("sos.netcore.dll/elf-buildid-coreclr-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/sos.netcore.dll"));
+
+ Dictionary<string, SymbolStoreKey> runtimeKeys = generator.GetKeys(KeyTypeFlags.RuntimeKeys).ToDictionary((key) => key.Index);
+ Assert.True(runtimeKeys.ContainsKey("libcoreclr.so/elf-buildid-ef8f58a0b402d11c68f78342ef4fcc7d23798d4c/libcoreclr.so"));
+ }
+
+ using (Stream stream = TestUtilities.OpenCompressedFile("TestBinaries/libcoreclrtraceptprovider.so.dbg.gz"))
+ {
+ var file = new SymbolStoreFile(stream, "libcoreclrtraceptprovider.so.dbg");
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new ELFFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Count() == 1);
+ Assert.True(identityKey.First().Index == "_.debug/elf-buildid-sym-ce4ce0558d878a05754dff246ccea2a70a1db3a8/_.debug");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(symbolKey.Count() == 0);
+
+ IEnumerable<SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys);
+ Assert.True(clrKeys.Count() == 0);
+ }
+
+ using (Stream stream = File.OpenRead("TestBinaries/symbolized_executable"))
+ {
+ var file = new SymbolStoreFile(stream, "symbolized_executable");
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new ELFFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Count() == 1);
+ Assert.True(identityKey.First().Index == "_.debug/elf-buildid-sym-126ba1461caf6644cfdd124bfcceeffa81b18897/_.debug");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(symbolKey.Count() == 0);
+
+ IEnumerable<SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys);
+ Assert.True(clrKeys.Count() == 0);
+ }
+
+ using (Stream stream = File.OpenRead("TestBinaries/stripped_executable"))
+ {
+ var file = new SymbolStoreFile(stream, "stripped_executable");
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new ELFFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Count() == 1);
+ Assert.True(identityKey.First().Index == "stripped_executable/elf-buildid-126ba1461caf6644cfdd124bfcceeffa81b18897/stripped_executable");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(symbolKey.Count() == 1);
+ Assert.True(symbolKey.First().Index == "_.debug/elf-buildid-sym-126ba1461caf6644cfdd124bfcceeffa81b18897/_.debug");
+
+ IEnumerable<SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys);
+ Assert.True(clrKeys.Count() == 0);
+ }
+
+ using (Stream stream = File.OpenRead("TestBinaries/md5_build_id"))
+ {
+ var file = new SymbolStoreFile(stream, "md5_build_id");
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new ELFFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Count() == 1);
+ Assert.True(identityKey.First().Index == "md5_build_id/elf-buildid-001ba81f23966cf77e40bcbb0701cd3400000000/md5_build_id");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(symbolKey.Count() == 1);
+ Assert.True(symbolKey.First().Index == "_.debug/elf-buildid-sym-001ba81f23966cf77e40bcbb0701cd3400000000/_.debug");
+
+ IEnumerable<SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys);
+ Assert.True(clrKeys.Count() == 0);
+ }
+ }
+
+ [Fact(Skip = "Need an alternate scheme to acquire the binary this test was reading")]
+ public void MachCoreKeyGenerator()
+ {
+ MachCoreKeyGeneratorInternal(fileGenerator: false);
+ }
+
+ private void MachCoreKeyGeneratorInternal(bool fileGenerator)
+ {
+ using (Stream core = TestUtilities.DecompressFile("TestBinaries/core.gz", "TestBinaries/core"))
+ {
+ var file = new SymbolStoreFile(core, "core");
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new MachCoreKeyGenerator(_tracer, file);
+
+ Dictionary<string, SymbolStoreKey> identityKeys = generator.GetKeys(KeyTypeFlags.IdentityKey).ToDictionary((key) => key.Index);
+ Dictionary<string, SymbolStoreKey> symbolKeys = generator.GetKeys(KeyTypeFlags.SymbolKey).ToDictionary((key) => key.Index);
+ Dictionary<string, SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys).ToDictionary((key) => key.Index);
+ Dictionary<string, SymbolStoreKey> dacdbiKeys = generator.GetKeys(KeyTypeFlags.DacDbiKeys).ToDictionary((key) => key.Index);
+ Dictionary<string, SymbolStoreKey> runtimeKeys = generator.GetKeys(KeyTypeFlags.RuntimeKeys).ToDictionary((key) => key.Index);
+
+ // System.Native.dylib
+ Assert.True(identityKeys.ContainsKey("system.native.dylib/mach-uuid-f7c77509e13a3da18099a2b97e90fade/system.native.dylib"));
+ Assert.True(symbolKeys.ContainsKey("_.dwarf/mach-uuid-sym-f7c77509e13a3da18099a2b97e90fade/_.dwarf"));
+
+ // libcoreclr.dylib
+ Assert.True(identityKeys.ContainsKey("libcoreclr.dylib/mach-uuid-3e0f66c5527338b18141e9d63b8ab415/libcoreclr.dylib"));
+ Assert.True(symbolKeys.ContainsKey("_.dwarf/mach-uuid-sym-3e0f66c5527338b18141e9d63b8ab415/_.dwarf"));
+ Assert.True(runtimeKeys.ContainsKey("libcoreclr.dylib/mach-uuid-3e0f66c5527338b18141e9d63b8ab415/libcoreclr.dylib"));
+
+ Assert.True(clrKeys.ContainsKey("libmscordaccore.dylib/mach-uuid-coreclr-3e0f66c5527338b18141e9d63b8ab415/libmscordaccore.dylib"));
+ Assert.True(clrKeys.ContainsKey("libmscordbi.dylib/mach-uuid-coreclr-3e0f66c5527338b18141e9d63b8ab415/libmscordbi.dylib"));
+ Assert.True(clrKeys.ContainsKey("libsos.dylib/mach-uuid-coreclr-3e0f66c5527338b18141e9d63b8ab415/libsos.dylib"));
+ Assert.True(clrKeys.ContainsKey("sos.netcore.dll/mach-uuid-coreclr-3e0f66c5527338b18141e9d63b8ab415/sos.netcore.dll"));
+
+ Assert.True(dacdbiKeys.ContainsKey("libmscordaccore.dylib/mach-uuid-coreclr-3e0f66c5527338b18141e9d63b8ab415/libmscordaccore.dylib"));
+ Assert.True(dacdbiKeys.ContainsKey("libmscordbi.dylib/mach-uuid-coreclr-3e0f66c5527338b18141e9d63b8ab415/libmscordbi.dylib"));
+ Assert.False(dacdbiKeys.ContainsKey("libsos.dylib/mach-uuid-coreclr-3e0f66c5527338b18141e9d63b8ab415/libsos.dylib"));
+ Assert.False(dacdbiKeys.ContainsKey("sos.netcore.dll/mach-uuid-coreclr-3e0f66c5527338b18141e9d63b8ab415/sos.netcore.dll"));
+
+ }
+ }
+
+ [Fact]
+ public void MachOFileKeyGenerator()
+ {
+ MachOFileKeyGeneratorInternal(fileGenerator: false);
+ }
+
+ private void MachOFileKeyGeneratorInternal(bool fileGenerator)
+ {
+ using (Stream dylib = TestUtilities.OpenCompressedFile("TestBinaries/libcoreclr.dylib.gz"))
+ {
+ var file = new SymbolStoreFile(dylib, "libcoreclr.dylib");
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new MachOFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Count() == 1);
+ Assert.True(identityKey.First().Index == "libcoreclr.dylib/mach-uuid-b5372bdabccd38f8899b6a782ceca847/libcoreclr.dylib");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(symbolKey.Count() == 1);
+ Assert.True(symbolKey.First().Index == "_.dwarf/mach-uuid-sym-b5372bdabccd38f8899b6a782ceca847/_.dwarf");
+
+ Dictionary<string, SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys).ToDictionary((key) => key.Index);
+ Assert.True(clrKeys.ContainsKey("libmscordaccore.dylib/mach-uuid-coreclr-b5372bdabccd38f8899b6a782ceca847/libmscordaccore.dylib"));
+ Assert.True(clrKeys.ContainsKey("libsos.dylib/mach-uuid-coreclr-b5372bdabccd38f8899b6a782ceca847/libsos.dylib"));
+ Assert.True(clrKeys.ContainsKey("sos.netcore.dll/mach-uuid-coreclr-b5372bdabccd38f8899b6a782ceca847/sos.netcore.dll"));
+
+ Dictionary<string, SymbolStoreKey> runtimeKeys = generator.GetKeys(KeyTypeFlags.RuntimeKeys).ToDictionary((key) => key.Index);
+ Assert.True(runtimeKeys.ContainsKey("libcoreclr.dylib/mach-uuid-b5372bdabccd38f8899b6a782ceca847/libcoreclr.dylib"));
+ }
+
+ using (Stream dwarf = TestUtilities.OpenCompressedFile("TestBinaries/libclrjit.dylib.dwarf.gz"))
+ {
+ var file = new SymbolStoreFile(dwarf, "libclrjit.dylib.dwarf");
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new MachOFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Count() == 1);
+ Assert.True(identityKey.First().Index == "_.dwarf/mach-uuid-sym-b35e230c8ee932efb6e6e6ed18a604a8/_.dwarf");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(symbolKey.Count() == 0);
+
+ IEnumerable<SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys);
+ Assert.True(clrKeys.Count() == 0);
+ }
+
+ using (Stream machofat = TestUtilities.OpenCompressedFile("TestBinaries/libSystem.Security.Cryptography.Native.Apple.dylib.gz"))
+ {
+ var file = new SymbolStoreFile(machofat, "libsystem.security.cryptography.native.apple.dylib");
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new MachOFatHeaderKeyGenerator(_tracer, file);
+
+ Dictionary<string, SymbolStoreKey> identityKeys = generator.GetKeys(KeyTypeFlags.IdentityKey).ToDictionary((key) => key.Index);
+ Assert.True(identityKeys.ContainsKey("libsystem.security.cryptography.native.apple.dylib/mach-uuid-fad93e41f2e23d11aab75e98d7fe66d6/libsystem.security.cryptography.native.apple.dylib"));
+ Assert.True(identityKeys.ContainsKey("libsystem.security.cryptography.native.apple.dylib/mach-uuid-e5bf8b935f393806a20933aa98adf5b7/libsystem.security.cryptography.native.apple.dylib"));
+
+ Dictionary<string, SymbolStoreKey> symbolKeys = generator.GetKeys(KeyTypeFlags.SymbolKey).ToDictionary((key) => key.Index);
+ Assert.True(symbolKeys.ContainsKey("_.dwarf/mach-uuid-sym-fad93e41f2e23d11aab75e98d7fe66d6/_.dwarf"));
+ Assert.True(symbolKeys.ContainsKey("_.dwarf/mach-uuid-sym-e5bf8b935f393806a20933aa98adf5b7/_.dwarf"));
+ }
+ }
+
+ [Fact]
+ public void MinidumpKeyGenerator()
+ {
+ MinidumpKeyGeneratorInternal(fileGenerator: false);
+ }
+
+ private void MinidumpKeyGeneratorInternal(bool fileGenerator)
+ {
+ using (Stream core = TestUtilities.OpenCompressedFile("TestBinaries/minidump_x64.dmp.gz"))
+ {
+ var file = new SymbolStoreFile(core, "minidump_x64.dmp");
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new MinidumpKeyGenerator(_tracer, file);
+
+ Dictionary<string, SymbolStoreKey> identityKeys = generator.GetKeys(KeyTypeFlags.IdentityKey).ToDictionary((key) => key.Index);
+ Dictionary<string, SymbolStoreKey> symbolKeys = generator.GetKeys(KeyTypeFlags.SymbolKey).ToDictionary((key) => key.Index);
+
+ // Program (exception.exe)
+ Assert.True(identityKeys.ContainsKey("exception.exe/57B39FFA6000/exception.exe"));
+ Assert.True(symbolKeys.ContainsKey("exception.pdb/df85e94d63ae4d8992fbf81730a7ac911/exception.pdb"));
+
+ // mscoree.dll
+ Assert.True(identityKeys.ContainsKey("mscoree.dll/57A5832766000/mscoree.dll"));
+ Assert.True(symbolKeys.ContainsKey("mscoree.pdb/4a348372fdff448ab6a1bfc8b93ffb6b1/mscoree.pdb"));
+ }
+ }
+
+ [Fact]
+ public void PDBFileKeyGenerator()
+ {
+ PDBFileKeyGeneratorInternal(fileGenerator: false);
+ }
+
+ private void PDBFileKeyGeneratorInternal(bool fileGenerator)
+ {
+ const string TestBinary = "TestBinaries/HelloWorld.pdb";
+ using (Stream pdb = File.OpenRead(TestBinary))
+ {
+ var file = new SymbolStoreFile(pdb, TestBinary);
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new PDBFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Count() == 1);
+ Assert.True(identityKey.First().Index == "helloworld.pdb/99891b3ed7ae4c3babff8a2b4a9b0c431/helloworld.pdb");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(symbolKey.Count() == 0);
+
+ IEnumerable<SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys);
+ Assert.True(clrKeys.Count() == 0);
+ }
+ }
+
+ [Fact]
+ public void PEFileKeyGenerator()
+ {
+ PEFileKeyGeneratorInternal(fileGenerator: false);
+ }
+
+ private void PEFileKeyGeneratorInternal(bool fileGenerator)
+ {
+ const string TestBinaryExe = "TestBinaries/HelloWorld.exe";
+ using (Stream exe = File.OpenRead(TestBinaryExe))
+ {
+ var file = new SymbolStoreFile(exe, TestBinaryExe);
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new PEFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Count() == 1);
+ Assert.True(identityKey.First().Index == "helloworld.exe/577F59198000/helloworld.exe");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(symbolKey.Count() == 1);
+ Assert.True(symbolKey.First().Index == "helloworld.pdb/99891b3ed7ae4c3babff8a2b4a9b0c431/helloworld.pdb");
+
+ IEnumerable<SymbolStoreKey> perfMapKey = generator.GetKeys(KeyTypeFlags.PerfMapKeys);
+ Assert.True(perfMapKey.Count() == 0);
+
+ IEnumerable<SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys);
+ Assert.True(clrKeys.Count() == 0);
+ }
+
+ const string LinuxPePath = "TestBinaries/PerfMapEnabled/System.ComponentModel.EventBasedAsync.dll";
+ using (Stream linuxPeStream = File.OpenRead(LinuxPePath))
+ {
+ var file = new SymbolStoreFile(linuxPeStream, LinuxPePath);
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new PEFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Count() == 1);
+ Assert.True(identityKey.First().Index == "system.componentmodel.eventbasedasync.dll/9757F3A636c00/system.componentmodel.eventbasedasync.dll");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(symbolKey.Single().Index == "system.componentmodel.eventbasedasync.pdb/99d3f272c6a8429ba694647a7912d178FFFFFFFF/system.componentmodel.eventbasedasync.pdb");
+
+ IEnumerable<SymbolStoreKey> perfMapKey = generator.GetKeys(KeyTypeFlags.PerfMapKeys);
+ Assert.True(perfMapKey.Single().Index == "system.componentmodel.eventbasedasync.ni.r2rmap/r2rmap-v1-734d59d6de0e96aa3c77b3e2ed498097/system.componentmodel.eventbasedasync.ni.r2rmap");
+
+ IEnumerable<SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys);
+ Assert.True(clrKeys.Count() == 0);
+ }
+
+ const string TestBinaryDll = "TestBinaries/System.Diagnostics.StackTrace.dll";
+ using (Stream dll = File.OpenRead(TestBinaryDll))
+ {
+ var file = new SymbolStoreFile(dll, TestBinaryDll);
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new PEFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Count() == 1);
+ Assert.True(identityKey.First().Index == "system.diagnostics.stacktrace.dll/595CD91B35a00/system.diagnostics.stacktrace.dll");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(symbolKey.Count() == 2);
+ Assert.True(symbolKey.First().Index == "system.diagnostics.stacktrace.ni.pdb/3cd5a68a9f2cd99b169d074e6e956d4fFFFFFFFF/system.diagnostics.stacktrace.ni.pdb");
+ Assert.True(symbolKey.Last().Index == "system.diagnostics.stacktrace.pdb/8b2e8cf443144806982ab7d904876a50FFFFFFFF/system.diagnostics.stacktrace.pdb");
+
+ IEnumerable<SymbolStoreKey> perfMapKey = generator.GetKeys(KeyTypeFlags.PerfMapKeys);
+ Assert.True(perfMapKey.Count() == 0);
+
+ IEnumerable<SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys);
+ Assert.True(clrKeys.Count() == 0);
+ }
+
+ using (Stream coreclr = TestUtilities.OpenCompressedFile("TestBinaries/coreclr.dll.gz"))
+ {
+ var file = new SymbolStoreFile(coreclr, "coreclr.dll");
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new PEFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Count() == 1);
+ Assert.True(identityKey.First().Index == "coreclr.dll/595EBCD5538000/coreclr.dll");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(symbolKey.Count() == 1);
+ Assert.True(symbolKey.First().Index == "coreclr.pdb/3f3d5a3258e64ae8b86b31ff776949351/coreclr.pdb");
+
+ Dictionary<string, SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys).ToDictionary((key) => key.Index);
+ Assert.True(clrKeys.Count() == 3);
+ Assert.True(clrKeys.ContainsKey("mscordaccore.dll/595EBCD5538000/mscordaccore.dll"));
+ Assert.True(clrKeys.ContainsKey("mscordaccore_amd64_amd64_4.6.25505.00.dll/595EBCD5538000/mscordaccore_amd64_amd64_4.6.25505.00.dll"));
+ Assert.True(clrKeys.ContainsKey("mscordbi.dll/595EBCD5538000/mscordbi.dll"));
+
+ Dictionary<string, SymbolStoreKey> dacdbiKeys = generator.GetKeys(KeyTypeFlags.DacDbiKeys).ToDictionary((key) => key.Index);
+ Assert.True(dacdbiKeys.Count() == 3);
+ Assert.True(dacdbiKeys.ContainsKey("mscordaccore.dll/595EBCD5538000/mscordaccore.dll"));
+ Assert.True(dacdbiKeys.ContainsKey("mscordaccore_amd64_amd64_4.6.25505.00.dll/595EBCD5538000/mscordaccore_amd64_amd64_4.6.25505.00.dll"));
+ Assert.True(dacdbiKeys.ContainsKey("mscordbi.dll/595EBCD5538000/mscordbi.dll"));
+
+ IEnumerable<SymbolStoreKey> perfMapKey = generator.GetKeys(KeyTypeFlags.PerfMapKeys);
+ Assert.True(perfMapKey.Count() == 0);
+
+ Dictionary<string, SymbolStoreKey> runtimeKeys = generator.GetKeys(KeyTypeFlags.RuntimeKeys).ToDictionary((key) => key.Index);
+ Assert.True(runtimeKeys.ContainsKey("coreclr.dll/595EBCD5538000/coreclr.dll"));
+ }
+ }
+
+ [Fact]
+ public void PortablePDBFileKeyGenerator()
+ {
+ PortablePDBFileKeyGeneratorInternal(fileGenerator: false);
+ }
+
+ private void PortablePDBFileKeyGeneratorInternal(bool fileGenerator)
+ {
+ const string TestBinary = "TestBinaries/dir1/System.Threading.Thread.pdb";
+ using (Stream pdb = File.OpenRead(TestBinary))
+ {
+ var file = new SymbolStoreFile(pdb, TestBinary);
+ KeyGenerator generator = fileGenerator ? (KeyGenerator)new FileKeyGenerator(_tracer, file) : new PortablePDBFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Count() == 1);
+ Assert.True(identityKey.First().Index == "system.threading.thread.pdb/a43b38726e6a4b3cb1691f35f0d6cc48FFFFFFFF/system.threading.thread.pdb");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(symbolKey.Count() == 0);
+
+ IEnumerable<SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys);
+ Assert.True(clrKeys.Count() == 0);
+ }
+ }
+
+ [Fact]
+ public void SourceFileKeyGenerator()
+ {
+ using (Stream source = TestUtilities.OpenCompressedFile("TestBinaries/StackTraceSymbols.CoreCLR.cs.gz"))
+ {
+ var file = new SymbolStoreFile(source, "StackTraceSymbols.CoreCLR.cs");
+ var generator = new SourceFileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> identityKey = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKey.Count() == 1);
+ Assert.True(identityKey.First().Index == "stacktracesymbols.coreclr.cs/sha1-da39a3ee5e6b4b0d3255bfef95601890afd80709/stacktracesymbols.coreclr.cs");
+
+ IEnumerable<SymbolStoreKey> symbolKey = generator.GetKeys(KeyTypeFlags.SymbolKey);
+ Assert.True(symbolKey.Count() == 0);
+
+ IEnumerable<SymbolStoreKey> clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys);
+ Assert.True(clrKeys.Count() == 0);
+ }
+ }
+ }
+}
--- /dev/null
+<Project Sdk="Microsoft.NET.Sdk">
+ <PropertyGroup>
+ <TargetFramework>net6.0</TargetFramework>
+ <NoWarn>;1591;1701</NoWarn>
+ </PropertyGroup>
+
+ <ItemGroup>
+ <None Remove="TestBinaries\coreclr.dll.gz" />
+ <None Remove="TestBinaries\md5_build_id" />
+ <None Remove="TestBinaries\StackTraceSymbols.CoreCLR.cs.gz" />
+ <None Remove="TestBinaries\stripped_executable" />
+ <None Remove="TestBinaries\symbolized_executable" />
+ </ItemGroup>
+
+ <ItemGroup>
+ <ProjectReference Include="$(MSBuildThisFileDirectory)..\..\Microsoft.SymbolStore\Microsoft.SymbolStore.csproj" />
+ <ProjectReference Include="$(MSBuildThisFileDirectory)..\TestHelpers\TestHelpers.csproj" />
+ </ItemGroup>
+
+ <ItemGroup>
+ <Service Include="{82a7f48d-3b50-4b1e-b82e-3ada8210c358}" />
+ </ItemGroup>
+
+ <ItemGroup>
+ <Folder Include="TestBinaries\">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Folder>
+ </ItemGroup>
+
+ <ItemGroup>
+ <Content Include="$(MSBuildThisFileDirectory)..\Microsoft.FileFormats.UnitTests\TestBinaries\PerfMapEnabled\System.ComponentModel.EventBasedAsync.dll" Link="TestBinaries\PerfMapEnabled\System.ComponentModel.EventBasedAsync.dll">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)..\Microsoft.FileFormats.UnitTests\TestBinaries\PerfMapEnabled\System.ComponentModel.EventBasedAsync.ni.r2rmap" Link="TestBinaries\PerfMapEnabled\System.ComponentModel.EventBasedAsync.ni.r2rmap">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)..\Microsoft.FileFormats.UnitTests\TestBinaries\triagedump.gz" Link="TestBinaries\triagedump.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)..\Microsoft.FileFormats.UnitTests\TestBinaries\libcoreclr.so.gz" Link="TestBinaries\libcoreclr.so.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)..\Microsoft.FileFormats.UnitTests\TestBinaries\libcoreclr.dylib.gz" Link="TestBinaries\libcoreclr.dylib.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)..\Microsoft.FileFormats.UnitTests\TestBinaries\libcoreclrtraceptprovider.so.dbg.gz" Link="TestBinaries\libcoreclrtraceptprovider.so.dbg.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)..\Microsoft.FileFormats.UnitTests\TestBinaries\libclrjit.dylib.dwarf.gz" Link="TestBinaries\libclrjit.dylib.dwarf.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)..\Microsoft.FileFormats.UnitTests\TestBinaries\minidump_x64.dmp.gz" Link="TestBinaries\minidump_x64.dmp.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)..\Microsoft.FileFormats.UnitTests\TestBinaries\HelloWorld.pdb" Link="TestBinaries\HelloWorld.pdb">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)..\Microsoft.FileFormats.UnitTests\TestBinaries\HelloWorld.exe" Link="TestBinaries\HelloWorld.exe">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)..\Microsoft.FileFormats.UnitTests\TestBinaries\System.Diagnostics.StackTrace.dll" Link="TestBinaries\System.Diagnostics.StackTrace.dll">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\dir1\System.Threading.Thread.dll">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\dir1\System.Threading.Thread.pdb">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\dir2\System.Threading.Thread.dll">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\dir2\System.Threading.Thread.pdb">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\coreclr.dll.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\mockclr_amd64.dll">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\mockclr_arm64.dll">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\mockclr_i386.dll">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\mockdac.dll">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\mockdbi.dll">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\mocksos.dll">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\libSystem.Security.Cryptography.Native.Apple.dylib.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\md5_build_id">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\StackTraceSymbols.CoreCLR.cs.gz">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\stripped_executable">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)TestBinaries\symbolized_executable">
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ </ItemGroup>
+</Project>
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using Microsoft.SymbolStore.KeyGenerators;
+using TestHelpers;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace Microsoft.SymbolStore.Tests
+{
+ public class PEFileKeyGenerationTests
+ {
+ readonly ITracer _tracer;
+
+ public PEFileKeyGenerationTests(ITestOutputHelper output)
+ {
+ _tracer = new Tracer(output);
+ }
+
+ public class MockPEFile
+ {
+ public string Path { get; }
+ public string FileName { get; }
+ public string Id { get; }
+ public bool IsRuntimeModule { get; }
+ public bool IsSpecialFile { get; }
+ public string[] DacDbiFiles { get; }
+ public string[] SosFiles { get; }
+
+ public MockPEFile(string path, string fileName, string guid, bool isRuntimeModule, bool isSpecialFile, string[] dacDbiFiles, string[] sosFiles)
+ {
+ Path = path;
+ FileName = fileName;
+ Id = guid;
+ IsRuntimeModule = isRuntimeModule;
+ IsSpecialFile = isSpecialFile;
+ DacDbiFiles = dacDbiFiles;
+ SosFiles = sosFiles;
+ }
+ }
+
+ public static IEnumerable<object[]> MockPEFiles()
+ {
+ yield return new object[] { new MockPEFile("TestBinaries/mockclr_amd64.dll", "clr.dll", "4D4F434B434c52", true, false, new string[] { "mscordacwks.dll", "mscordacwks_amd64_amd64_1.2.3.45.dll", "mscordbi.dll" }, new string[] { "sos_amd64_amd64_1.2.3.45.dll" } ) };
+ yield return new object[] { new MockPEFile("TestBinaries/mockclr_arm64.dll", "clr.dll", "4D4F434B434c52", true, false, new string[] { "mscordacwks.dll", "mscordacwks_arm64_arm64_1.2.3.45.dll", "mscordacwks_amd64_arm64_1.2.3.45.dll", "mscordbi.dll" }, new string[] { "sos_arm64_arm64_1.2.3.45.dll", "sos_amd64_arm64_1.2.3.45.dll" }) };
+ yield return new object[] { new MockPEFile("TestBinaries/mockclr_i386.dll", "clr.dll", "4D4F434B434c52", true, false, new string[] { "mscordacwks.dll", "mscordacwks_x86_x86_1.2.3.45.dll", "mscordbi.dll" }, new string[] { "sos_x86_x86_1.2.3.45.dll" }) };
+ yield return new object[] { new MockPEFile("TestBinaries/mockclr_amd64.dll", "coreclr.dll", "4D4F434B434c52", true, false, new string[] { "mscordaccore.dll", "mscordaccore_amd64_amd64_1.2.3.45.dll", "mscordbi.dll" }, []) };
+ yield return new object[] { new MockPEFile("TestBinaries/mockclr_arm64.dll", "coreclr.dll", "4D4F434B434c52", true, false, new string[] { "mscordaccore.dll", "mscordaccore_arm64_arm64_1.2.3.45.dll", "mscordaccore_amd64_arm64_1.2.3.45.dll", "mscordbi.dll" }, []) };
+ yield return new object[] { new MockPEFile("TestBinaries/mockclr_i386.dll", "coreclr.dll", "4D4F434B434c52", true, false, new string[] { "mscordaccore.dll", "mscordaccore_x86_x86_1.2.3.45.dll", "mscordbi.dll" }, []) };
+ yield return new object[] { new MockPEFile("TestBinaries/mockdac.dll", "mscordacwks.dll", "4D4F434B444143", false, true, [], []) };
+ yield return new object[] { new MockPEFile("TestBinaries/mockdac.dll", "mscordacwks_amd64_amd64_1.2.3.45.dll", "4D4F434B444143", false, true, [], []) };
+ yield return new object[] { new MockPEFile("TestBinaries/mockdbi.dll", "mscordbi.dll", "4D4F434B444249", false, true, [], []) };
+ yield return new object[] { new MockPEFile("TestBinaries/mocksos.dll", "sos.dll", "4D4F434B534f53", false, false, [], []) };
+ yield return new object[] { new MockPEFile("TestBinaries/mocksos.dll", "sos_amd64_amd64_1.2.3.45.dll", "4D4F434B534f53", false, true, [], []) };
+ }
+
+ [Theory]
+ [MemberData(nameof(MockPEFiles))]
+ public void PEFileGenerateNoneKeys(MockPEFile mockPEFile)
+ {
+ using var mockFileStream = new FileStream(mockPEFile.Path, FileMode.Open, FileAccess.Read);
+ var mockSymbolStoreFile = new SymbolStoreFile(mockFileStream, mockPEFile.FileName);
+ var generator = new PEFileKeyGenerator(_tracer, mockSymbolStoreFile);
+
+ var noneKeys = generator.GetKeys(KeyTypeFlags.None);
+ Assert.Empty(noneKeys);
+ }
+
+ [Theory]
+ [MemberData(nameof(MockPEFiles))]
+ public void PEFileGenerateIdentityKeys(MockPEFile mockPEFile)
+ {
+ using var mockFileStream = new FileStream(mockPEFile.Path, FileMode.Open, FileAccess.Read);
+ var mockSymbolStoreFile = new SymbolStoreFile(mockFileStream, mockPEFile.FileName);
+ var generator = new PEFileKeyGenerator(_tracer, mockSymbolStoreFile);
+
+ var identityKeys = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(identityKeys.Count() == 1);
+ Assert.True(identityKeys.First().Index == $"{mockPEFile.FileName}/{mockPEFile.Id}/{mockPEFile.FileName}");
+ Assert.True(identityKeys.First().IsClrSpecialFile == mockPEFile.IsSpecialFile);
+ }
+
+ [Theory]
+ [MemberData(nameof(MockPEFiles))]
+ public void PEFileGenerateClrKeys(MockPEFile mockPEFile)
+ {
+ using var mockFileStream = new FileStream(mockPEFile.Path, FileMode.Open, FileAccess.Read);
+ var mockSymbolStoreFile = new SymbolStoreFile(mockFileStream, mockPEFile.FileName);
+ var generator = new PEFileKeyGenerator(_tracer, mockSymbolStoreFile);
+
+ var clrKeys = generator.GetKeys(KeyTypeFlags.ClrKeys).ToDictionary((key) => key.Index);
+ var specialFiles = mockPEFile.DacDbiFiles.Concat(mockPEFile.SosFiles);
+ Assert.True(clrKeys.Count() == specialFiles.Count());
+ foreach (var specialFileName in specialFiles)
+ {
+ Assert.True(clrKeys.ContainsKey($"{specialFileName}/{mockPEFile.Id}/{specialFileName}"));
+ }
+ }
+
+ [Theory]
+ [MemberData(nameof(MockPEFiles))]
+ public void PEFileGenerateDacDbiKeys(MockPEFile mockPEFile)
+ {
+ using var mockFileStream = new FileStream(mockPEFile.Path, FileMode.Open, FileAccess.Read);
+ var mockSymbolStoreFile = new SymbolStoreFile(mockFileStream, mockPEFile.FileName);
+ var generator = new PEFileKeyGenerator(_tracer, mockSymbolStoreFile);
+
+ var dacdbiKeys = generator.GetKeys(KeyTypeFlags.DacDbiKeys).ToDictionary((key) => key.Index);
+ Assert.True(dacdbiKeys.Count() == mockPEFile.DacDbiFiles.Count());
+ foreach (var specialFileName in mockPEFile.DacDbiFiles)
+ {
+ Assert.True(dacdbiKeys.ContainsKey($"{specialFileName}/{mockPEFile.Id}/{specialFileName}"));
+ }
+ }
+
+ [Theory]
+ [MemberData(nameof(MockPEFiles))]
+ public void PEFileGenerateRuntimeKeys(MockPEFile mockPEFile)
+ {
+ using var mockFileStream = new FileStream(mockPEFile.Path, FileMode.Open, FileAccess.Read);
+ var mockSymbolStoreFile = new SymbolStoreFile(mockFileStream, mockPEFile.FileName);
+ var generator = new PEFileKeyGenerator(_tracer, mockSymbolStoreFile);
+
+ var runtimeKeys = generator.GetKeys(KeyTypeFlags.RuntimeKeys);
+ if (mockPEFile.IsRuntimeModule)
+ {
+ Assert.True(runtimeKeys.Count() == 1);
+ Assert.True(runtimeKeys.First().Index == $"{mockPEFile.FileName}/{mockPEFile.Id}/{mockPEFile.FileName}");
+ }
+ else
+ {
+ Assert.Empty(runtimeKeys);
+ }
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.SymbolStore.KeyGenerators;
+using Microsoft.SymbolStore.SymbolStores;
+using SOS;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace Microsoft.SymbolStore.Tests
+{
+ public class SymbolStoreTests
+ {
+ readonly ITracer _tracer;
+
+ public SymbolStoreTests(ITestOutputHelper output)
+ {
+ _tracer = new Tracer(output);
+ }
+
+ [Fact]
+ public async Task CacheSymbolStore()
+ {
+ using (Stream pdb = File.OpenRead("TestBinaries/HelloWorld.pdb")) {
+ // Clean up any previous cache directories
+ string cacheDirectory = "TestSymbolCache";
+ try {
+ Directory.Delete(cacheDirectory, recursive: true);
+ }
+ catch (DirectoryNotFoundException) {
+ }
+ var inputFile = new SymbolStoreFile(pdb, "HelloWorld.pdb");
+ var generator = new PDBFileKeyGenerator(_tracer, inputFile);
+
+ IEnumerable<SymbolStoreKey> keys = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(keys.Count() == 1);
+ SymbolStoreKey key = keys.First();
+
+ var backingStore = new TestSymbolStore(_tracer, key, inputFile);
+ var cacheSymbolStore = new CacheSymbolStore(_tracer, backingStore, cacheDirectory);
+
+ // This should put HelloWorld.pdb into the cache
+ SymbolStoreFile outputFile = await cacheSymbolStore.GetFile(key, CancellationToken.None);
+ Assert.True(outputFile != null);
+
+ // Should be the exact same instance given to TestSymbolStore
+ Assert.True(inputFile == outputFile);
+
+ // This should get it from the cache and not the backingStore
+ backingStore.Dispose();
+ outputFile = await cacheSymbolStore.GetFile(key, CancellationToken.None);
+ Assert.True(outputFile != null);
+
+ // Should NOT be the exact same SymbolStoreFile instance given to TestSymbolStore
+ Assert.True(inputFile != outputFile);
+
+ // Now make sure the output file from the cache is the same as the pdb we opened above
+ CompareStreams(pdb, outputFile.Stream);
+ }
+ }
+
+ [Fact]
+ public async Task DirectorySymbolStore()
+ {
+ using (Stream pdb = File.OpenRead("TestBinaries/dir1/System.Threading.Thread.pdb"))
+ {
+ var inputFile = new SymbolStoreFile(pdb, "System.Threading.Thread.pdb");
+ var generator = new PortablePDBFileKeyGenerator(_tracer, inputFile);
+
+ IEnumerable<SymbolStoreKey> keys = generator.GetKeys(KeyTypeFlags.IdentityKey);
+ Assert.True(keys.Count() == 1);
+ SymbolStoreKey key = keys.First();
+
+ var dir1store = new DirectorySymbolStore(_tracer, null, "TestBinaries/dir1");
+ var dir2store = new DirectorySymbolStore(_tracer, dir1store, "TestBinaries/dir2");
+
+ SymbolStoreFile outputFile = await dir2store.GetFile(key, CancellationToken.None);
+ Assert.True(outputFile != null);
+
+ // Should NOT be the exact same SymbolStoreFile instance
+ Assert.True(inputFile != outputFile);
+
+ CompareStreams(pdb, outputFile.Stream);
+ }
+ }
+
+ [Fact]
+ public async Task HttpSymbolStore()
+ {
+ using (FileStream downloadStream = File.OpenRead("TestBinaries/dir1/System.Threading.Thread.dll"))
+ {
+ using (Stream compareStream = File.OpenRead("TestBinaries/dir1/System.Threading.Thread.pdb"))
+ {
+ await DownloadFile(downloadStream, compareStream, flags: KeyTypeFlags.SymbolKey);
+ }
+ }
+ }
+
+ private async Task DownloadFile(FileStream downloadStream, Stream compareStream, KeyTypeFlags flags)
+ {
+ SymbolStoreFile file = new SymbolStoreFile(downloadStream, downloadStream.Name);
+
+ Uri.TryCreate("https://msdl.microsoft.com/download/symbols/", UriKind.Absolute, out Uri uri);
+ SymbolStores.SymbolStore store = new HttpSymbolStore(_tracer, backingStore: null, uri);
+
+ var generator = new FileKeyGenerator(_tracer, file);
+
+ IEnumerable<SymbolStoreKey> keys = generator.GetKeys(flags);
+ Assert.True(keys.Count() > 0);
+
+ foreach (SymbolStoreKey key in keys)
+ {
+ if (key.FullPathName.Contains(".ni.pdb")) {
+ continue;
+ }
+ using (SymbolStoreFile symbolFile = await store.GetFile(key, CancellationToken.None))
+ {
+ if (symbolFile != null)
+ {
+ Assert.True(downloadStream != symbolFile.Stream);
+ Assert.True(compareStream != symbolFile.Stream);
+
+ compareStream.Seek(0, SeekOrigin.Begin);
+ CompareStreams(compareStream, symbolFile.Stream);
+ }
+ }
+ }
+ }
+
+ private void CompareStreams(Stream stream1, Stream stream2)
+ {
+ Assert.True(stream1.Length == stream2.Length);
+
+ stream1.Position = 0;
+ stream2.Position = 0;
+
+ for (int i = 0; i < stream1.Length; i++) {
+ int b1 = stream1.ReadByte();
+ int b2 = stream2.ReadByte();
+ Assert.True(b1 == b2);
+ if (b1 != b2) {
+ break;
+ }
+ }
+ }
+
+ sealed class TestSymbolStore : Microsoft.SymbolStore.SymbolStores.SymbolStore
+ {
+ readonly SymbolStoreKey _key;
+ SymbolStoreFile _file;
+
+ public TestSymbolStore(ITracer tracer, SymbolStoreKey key, SymbolStoreFile file)
+ : base(tracer)
+ {
+ _key = key;
+ _file = file;
+ }
+
+ protected override Task<SymbolStoreFile> GetFileInner(SymbolStoreKey key, CancellationToken token)
+ {
+ if (_file != null && key.Equals(_key))
+ {
+ _file.Stream.Position = 0;
+ return Task.FromResult(_file);
+ }
+ return Task.FromResult<SymbolStoreFile>(null);
+ }
+
+ public override void Dispose()
+ {
+ _file = null;
+ base.Dispose();
+ }
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using Xunit.Abstractions;
+
+namespace Microsoft.SymbolStore.Tests
+{
+ /// <summary>
+ /// Simple trace/logging support.
+ /// </summary>
+ internal sealed class Tracer : ITracer
+ {
+ private readonly ITestOutputHelper _output;
+
+ public Tracer(ITestOutputHelper output)
+ {
+ _output = output;
+ }
+
+ public void WriteLine(string message)
+ {
+ _output.WriteLine(message);
+ }
+
+ public void WriteLine(string format, params object[] arguments)
+ {
+ _output.WriteLine(format, arguments);
+ }
+
+ public void Information(string message)
+ {
+ _output.WriteLine(message);
+ }
+
+ public void Information(string format, params object[] arguments)
+ {
+ _output.WriteLine(format, arguments);
+ }
+
+ public void Warning(string message)
+ {
+ _output.WriteLine("WARNING: " + message);
+ }
+
+ public void Warning(string format, params object[] arguments)
+ {
+ _output.WriteLine("WARNING: " + format, arguments);
+ }
+
+ public void Error(string message)
+ {
+ _output.WriteLine("ERROR: " + message);
+ }
+
+ public void Error(string format, params object[] arguments)
+ {
+ _output.WriteLine("ERROR: " + format, arguments);
+ }
+
+ public void Verbose(string message)
+ {
+ }
+
+ public void Verbose(string format, params object[] arguments)
+ {
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using Microsoft.FileFormats;
+
+namespace TestHelpers
+{
+ public class MaxStreamReadHelper : IAddressSpace
+ {
+ private readonly IAddressSpace _addressSpace;
+
+ public ulong Max { get; private set; }
+
+ public MaxStreamReadHelper(IAddressSpace address)
+ {
+ _addressSpace = address;
+ }
+
+ public ulong Length
+ {
+ get
+ {
+ return _addressSpace.Length;
+ }
+ }
+
+ public uint Read(ulong position, byte[] buffer, uint bufferOffset, uint count)
+ {
+ ulong max = position + count;
+ if (max > Max)
+ {
+ Max = max;
+ }
+ return _addressSpace.Read(position, buffer, bufferOffset, count);
+ }
+ }
+}
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.IO;
+using System.IO.Compression;
+using System.Linq;
+
+namespace TestHelpers
+{
+ public static class TestUtilities
+ {
+ public static Stream OpenCompressedFile(string path)
+ {
+ MemoryStream ms = new();
+ using (FileStream fs = File.OpenRead(path))
+ {
+ using (GZipStream gs = new(fs, CompressionMode.Decompress))
+ {
+ gs.CopyTo(ms);
+ }
+ }
+ return ms;
+ }
+
+ public static Stream DecompressFile(string source, string destination)
+ {
+ bool fileExists = File.Exists(destination);
+ FileStream destStream = File.Open(destination, FileMode.OpenOrCreate, FileAccess.ReadWrite);
+ if (!fileExists || destStream.Length == 0)
+ {
+ using (FileStream s = File.OpenRead(source))
+ {
+ using (GZipStream gs = new(s, CompressionMode.Decompress))
+ {
+ gs.CopyTo(destStream);
+ }
+ destStream.Position = 0;
+ }
+ }
+ return destStream;
+ }
+
+ /// <summary>
+ /// Convert an array of bytes to a lower case hex string.
+ /// </summary>
+ /// <param name="bytes">array of bytes</param>
+ /// <returns>hex string</returns>
+ public static string ToHexString(byte[] bytes)
+ {
+ return string.Concat(bytes.Select(b => b.ToString("x2")));
+ }
+ }
+}
--- /dev/null
+<Project Sdk="Microsoft.NET.Sdk">
+ <PropertyGroup>
+ <TargetFramework>netstandard2.0</TargetFramework>
+ <NoWarn>;1591;1701</NoWarn>
+ </PropertyGroup>
+
+ <ItemGroup>
+ <ProjectReference Include="$(MSBuildThisFileDirectory)..\..\Microsoft.FileFormats\Microsoft.FileFormats.csproj" />
+ </ItemGroup>
+</Project>