-TsaPublish $True'
artifactNames:
- 'Packages'
+
+ # This sets up the bits to do a Release.
+ - template: /eng/prepare-release.yml
--- /dev/null
+stages:
+- stage: PrepareReleaseStage
+ displayName: Release Preparation
+ ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), startsWith(variables['Build.SourceBranch'], 'refs/heads/release/')) }}:
+ dependsOn:
+ - publish_using_darc
+ jobs:
+ - job: PrepareReleaseJob
+ displayName: Prepare release with Darc
+ pool:
+ vmImage: windows-latest
+ variables:
+ - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), startsWith(variables['Build.SourceBranch'], 'refs/heads/release/')) }}:
+ - group: DotNet-Diagnostics-Storage
+ - group: DotNet-DotNetStage-Storage
+ - group: Release-Pipeline
+ - name: BARBuildId
+ value: $[ stageDependencies.publish_using_darc.setupMaestroVars.outputs['setReleaseVars.BARBuildId'] ]
+ steps:
+ - ${{ if in(variables['Build.Reason'], 'PullRequest') }}:
+ - script: '$(Build.Repository.LocalPath)\dotnet.cmd build $(Build.Repository.LocalPath)\eng\release\DiagnosticsReleaseTool\DiagnosticsReleaseTool.csproj -c Release /bl'
+ workingDirectory: '$(System.ArtifactsDirectory)'
+ displayName: 'Build Manifest generation and asset publishing tool'
+ - task: PublishPipelineArtifact@1
+ inputs:
+ targetPath: '$(System.ArtifactsDirectory)'
+ publishLocation: 'pipeline'
+ artifact: 'DiagnosticsReleaseToolBin'
+ - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), startsWith(variables['Build.SourceBranch'], 'refs/heads/release/')) }}:
+ - task: UseDotNet@2
+ displayName: 'Use .NET Core runtime 3.1.x'
+ inputs:
+ packageType: runtime
+ version: 3.1.x
+ installationPath: '$(Build.Repository.LocalPath)\.dotnet'
+ - task: PowerShell@2
+ displayName: 'DARC Gather build'
+ inputs:
+ targetType: filePath
+ filePath: '$(Build.Repository.LocalPath)/eng/release/Scripts/AcquireBuild.ps1'
+ arguments: >-
+ -BarBuildId "$(BARBuildId)"
+ -AzdoToken "$(dn-bot-dotnet-all-scopes)"
+ -MaestroToken "$(MaestroAccessToken)"
+ -GitHubToken "$(BotAccount-dotnet-bot-repo-PAT)"
+ -DownloadTargetPath "$(System.ArtifactsDirectory)\ReleaseTarget"
+ -SasSuffixes "$(dotnetclichecksumsmsrc-dotnet-read-list-sas-token),$(dotnetclimsrc-read-sas-token)"
+ -ReleaseVersion "$(Build.BuildNumber)"
+ workingDirectory: '$(Build.Repository.LocalPath)'
+ - script: >-
+ dotnet.cmd run --project $(Build.Repository.LocalPath)\eng\release\DiagnosticsReleaseTool\DiagnosticsReleaseTool.csproj -c Release
+ --
+ prepare-release
+ --input-drop-path "$(System.ArtifactsDirectory)\ReleaseTarget"
+ --tool-manifest "$(Build.Repository.LocalPath)\eng\release\tool-list.json"
+ --staging-directory "$(System.ArtifactsDirectory)\ReleaseStaging"
+ --release-name "$(Build.BuildNumber)"
+ --account-name "$(dotnet-diagnostics-storage-accountname)"
+ --account-key "$(dotnetstage-storage-key)"
+ --container-name "$(dotnet-diagnostics-container-name)"
+ --sas-valid-days "$(dotnet-diagnostics-storage-retentiondays)"
+ -v True
+ workingDirectory: '$(Build.Repository.LocalPath)\'
+ displayName: 'Manifest generation and asset publishing'
+ - task: PublishPipelineArtifact@1
+ inputs:
+ targetPath: '$(System.ArtifactsDirectory)\ReleaseStaging'
+ publishLocation: 'pipeline'
+ artifact: 'DiagnosticsRelease'
\ No newline at end of file
--- /dev/null
+using Azure;
+using Azure.Storage;
+using Azure.Storage.Blobs;
+using Azure.Storage.Blobs.Models;
+using Azure.Storage.Sas;
+using Microsoft.Extensions.Logging;
+using System;
+using System.Buffers;
+using System.IO;
+using System.Linq;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace ReleaseTool.Core
+{
+ public class AzureBlobBublisher : IPublisher
+ {
+ private const int ClockSkewSec = 15 * 60;
+ private const int MaxRetries = 15;
+ private const int MaxFullLoopRetries = 5;
+ private readonly TimeSpan FullLoopRetryDelay = TimeSpan.FromSeconds(1);
+ private const string AccessPolicyDownloadId = "DownloadDrop";
+
+ private readonly string _accountName;
+ private readonly string _accountKey;
+ private readonly string _containerName;
+ private readonly string _releaseName;
+ private readonly int _sasValidDays;
+ private readonly ILogger _logger;
+
+ private BlobContainerClient _client;
+
+ private Uri AccountBlobUri
+ {
+ get
+ {
+ return new Uri(FormattableString.Invariant($"https://{_accountName}.blob.core.windows.net"));
+ }
+ }
+
+ private StorageSharedKeyCredential AccountCredential
+ {
+ get
+ {
+ StorageSharedKeyCredential credential = new StorageSharedKeyCredential(_accountName, _accountKey);
+ return credential;
+ }
+ }
+
+ private BlobClientOptions BlobOptions
+ {
+ get
+ {
+ // The Azure SDK client has it's own built in retry logic
+ // We want to allow more and longer retries because this
+ // is a publishing operation that happens once and can be
+ // allowed to take a very long time. We have a high
+ // tolerance for slow operations and a low tolerance for failure.
+ return new BlobClientOptions()
+ {
+ Retry =
+ {
+ MaxRetries = MaxRetries,
+ }
+ };
+ }
+ }
+
+ public AzureBlobBublisher(string accountName, string accountKey, string containerName, string releaseName, int sasValidDays, ILogger logger)
+ {
+ _accountName = accountName;
+ _accountKey = accountKey;
+ _containerName = containerName;
+ _releaseName = releaseName;
+ _sasValidDays = sasValidDays;
+ _logger = logger;
+ }
+
+ public void Dispose()
+ {
+ }
+
+ public async Task<string> PublishFileAsync(FileMapping fileMap, CancellationToken ct)
+ {
+ Uri result = null;
+ int retriesLeft = MaxFullLoopRetries;
+ TimeSpan loopDelay = FullLoopRetryDelay;
+ bool completed = false;
+
+ do
+ {
+ _logger.LogInformation($"Attempting to publish {fileMap.RelativeOutputPath}, {retriesLeft} tries left.");
+ try
+ {
+ BlobContainerClient client = await GetClient(ct);
+ if (client == null)
+ {
+ // client creation failed, return
+ return null;
+ }
+
+ using var srcStream = new FileStream(fileMap.LocalSourcePath, FileMode.Open, FileAccess.Read);
+
+ BlobClient blobClient = client.GetBlobClient(GetBlobName(_releaseName, fileMap.RelativeOutputPath));
+
+ await blobClient.UploadAsync(srcStream, overwrite: true, ct);
+
+ BlobSasBuilder sasBuilder = new BlobSasBuilder()
+ {
+ BlobContainerName = client.Name,
+ BlobName = blobClient.Name,
+ Identifier = AccessPolicyDownloadId,
+ Protocol = SasProtocol.Https
+ };
+ Uri accessUri = blobClient.GenerateSasUri(sasBuilder);
+
+ using BlobDownloadStreamingResult blobStream = (await blobClient.DownloadStreamingAsync(cancellationToken: ct)).Value;
+ srcStream.Position = 0;
+ completed = await VerifyFileStreamsMatchAsync(srcStream, blobStream, ct);
+
+ result = accessUri;
+ }
+ catch (IOException ioEx) when (!(ioEx is PathTooLongException))
+ {
+ _logger.LogWarning(ioEx, $"Failed to publish {fileMap.LocalSourcePath}, retries remaining: {retriesLeft}.");
+
+ /* Retry IO exceptions */
+ retriesLeft--;
+ loopDelay *= 2;
+
+ if (retriesLeft > 0)
+ {
+ await Task.Delay(loopDelay, ct);
+ }
+ }
+ catch (Exception ex)
+ {
+ // Azure errors have their own built-in retry logic, so just abort if we got an AzureResponseException
+ _logger.LogWarning(ex, $"Failed to publish {fileMap.LocalSourcePath}, unexpected error, aborting.");
+ return null;
+ }
+ } while (retriesLeft > 0 && !completed);
+
+ return result?.OriginalString;
+ }
+
+ private static string GetBlobName(string releaseName, string relativeFilePath)
+ {
+ return FormattableString.Invariant($"{releaseName}/{relativeFilePath}");
+ }
+
+ private async Task<BlobContainerClient> GetClient(CancellationToken ct)
+ {
+ if (_client == null)
+ {
+ BlobServiceClient serviceClient = new BlobServiceClient(AccountBlobUri, AccountCredential, BlobOptions);
+ _logger.LogInformation($"Attempting to connect to {serviceClient.Uri} to store blobs.");
+
+ BlobContainerClient newClient;
+ int attemptCt = 0;
+ do
+ {
+ try
+ {
+ newClient = serviceClient.GetBlobContainerClient(_containerName);
+ if (!(await newClient.ExistsAsync(ct)).Value)
+ {
+ newClient = (await serviceClient.CreateBlobContainerAsync(_containerName, PublicAccessType.None, metadata: null, ct));
+ }
+ }
+ catch (Exception ex)
+ {
+ _logger.LogWarning(ex, $"Failed to create or access {_containerName}, retrying with new name.");
+ continue;
+ }
+
+ try
+ {
+ DateTime baseTime = DateTime.UtcNow;
+ // Add the new (or update existing) "download" policy to the container
+ // This is used to mint the SAS tokens without an expiration policy
+ // Expiration can be added later by modifying this policy
+ BlobSignedIdentifier downloadPolicyIdentifier = new BlobSignedIdentifier()
+ {
+ Id = AccessPolicyDownloadId,
+ AccessPolicy = new BlobAccessPolicy()
+ {
+ Permissions = "r",
+ PolicyStartsOn = new DateTimeOffset(baseTime.AddSeconds(-ClockSkewSec)),
+ PolicyExpiresOn = new DateTimeOffset(DateTime.UtcNow.AddDays(_sasValidDays).AddSeconds(ClockSkewSec)),
+ }
+ };
+ _logger.LogInformation($"Writing download access policy: {AccessPolicyDownloadId} to {_containerName}.");
+ await newClient.SetAccessPolicyAsync(PublicAccessType.None, new BlobSignedIdentifier[] { downloadPolicyIdentifier }, cancellationToken: ct);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogWarning(ex, $"Failed to write access policy for {_containerName}, retrying.");
+ continue;
+ }
+
+ _logger.LogInformation($"Container {_containerName} is ready.");
+ _client = newClient;
+ break;
+ } while (++attemptCt < MaxFullLoopRetries);
+ }
+
+ if (_client == null)
+ {
+ _logger.LogError("Failed to create or access container for publishing drop.");
+ }
+ return _client;
+ }
+
+ private async Task<bool> VerifyFileStreamsMatchAsync(FileStream srcStream, BlobDownloadStreamingResult destBlobDownloadStream, CancellationToken ct)
+ {
+ if (srcStream.Length != destBlobDownloadStream.Details.ContentLength)
+ {
+ return false;
+ }
+
+ using Stream destStream = destBlobDownloadStream.Content;
+
+ using IMemoryOwner<byte> memOwnerSrc = MemoryPool<byte>.Shared.Rent(minBufferSize: 16_384);
+ using IMemoryOwner<byte> memOwnerDest = MemoryPool<byte>.Shared.Rent(minBufferSize: 16_384);
+ Memory<byte> memSrc = memOwnerSrc.Memory;
+ Memory<byte> memDest = memOwnerDest.Memory;
+
+ int bytesProcessed = 0;
+ int srcBytesRemainingFromPrevRead = 0;
+ int destBytesRemainingFromPrevRead = 0;
+
+ while (bytesProcessed != srcStream.Length)
+ {
+ int srcBytesRead = await srcStream.ReadAsync(memSrc.Slice(srcBytesRemainingFromPrevRead), ct);
+ srcBytesRead += srcBytesRemainingFromPrevRead;
+ int destBytesRead = await destStream.ReadAsync(memDest.Slice(destBytesRemainingFromPrevRead), ct);
+ destBytesRead += destBytesRemainingFromPrevRead;
+
+ int bytesToCompare = Math.Min(srcBytesRead, destBytesRead);
+
+ if (bytesToCompare == 0)
+ {
+ return false;
+ }
+
+ bytesProcessed += bytesToCompare;
+ srcBytesRemainingFromPrevRead = srcBytesRead - bytesToCompare;
+ destBytesRemainingFromPrevRead = destBytesRead - bytesToCompare;
+
+ bool isChunkEquals = memDest.Span.Slice(0, bytesToCompare).SequenceEqual(memSrc.Span.Slice(0, bytesToCompare));
+ if (!isChunkEquals)
+ {
+ return false;
+ }
+
+ memSrc.Slice(bytesToCompare, srcBytesRemainingFromPrevRead).CopyTo(memSrc);
+ memDest.Slice(bytesToCompare, destBytesRemainingFromPrevRead).CopyTo(memDest);
+ }
+
+ return true;
+ }
+ }
+}
\ No newline at end of file
public sealed class NugetLayoutWorker : PassThroughLayoutWorker
{
public NugetLayoutWorker(string stagingPath) : base(
- shouldHandleFileFunc: ShouldHandleFile,
- getRelativePublishPathFromFileFunc: GetNugetPublishRelativePath,
- getMetadataForFileFunc: (_) => new FileMetadata(FileClass.Nuget),
+ shouldHandleFileFunc: static file => file.Extension == ".nupkg" && !file.Name.EndsWith(".symbols.nupkg"),
+ getRelativePublishPathFromFileFunc: static file => Helpers.GetDefaultPathForFileCategory(file, FileClass.Nuget),
+ getMetadataForFileFunc: static file => Helpers.GetDefaultFileMetadata(file, FileClass.Nuget),
stagingPath
- ) {}
-
- private static bool ShouldHandleFile(FileInfo file) => file.Extension == ".nupkg" && !file.Name.EndsWith(".symbols.nupkg");
- private static string GetNugetPublishRelativePath(FileInfo file) => FileMetadata.GetDefaultCatgoryForClass(FileClass.Nuget);
+ ){}
}
}
\ No newline at end of file
string stagingPath)
{
- _shouldHandleFileFunc = shouldHandleFileFunc ?? (_ => true);
+ _shouldHandleFileFunc = shouldHandleFileFunc ?? (static _ => true);
- _getRelativePublishPathFromFileFunc = getRelativePublishPathFromFileFunc ?? (file => Path.Combine(FileMetadata.GetDefaultCatgoryForClass(FileClass.Unknown), file.Name));
+ _getRelativePublishPathFromFileFunc = getRelativePublishPathFromFileFunc ?? (static file => Helpers.GetDefaultPathForFileCategory(file, FileClass.Unknown));
- _getMetadataForFileFunc = getMetadataForFileFunc ?? (_ => new FileMetadata(FileClass.Unknown));
+ _getMetadataForFileFunc = getMetadataForFileFunc ?? (static file => Helpers.GetDefaultFileMetadata(file, FileClass.Unknown));
_stagingPath = stagingPath;
}
return new LayoutWorkerResult(LayoutResultStatus.FileNotHandled);
}
- string publishReleasePath = Path.Combine(_getRelativePublishPathFromFileFunc(file), file.Name);
+ string publishReleasePath = _getRelativePublishPathFromFileFunc(file);
string localPath = file.FullName;
--- /dev/null
+using System;
+using System.IO;
+
+namespace ReleaseTool.Core
+{
+ static class Helpers
+ {
+ internal static string GetDefaultPathForFileCategory(FileInfo file, FileClass fileClass)
+ {
+ string category = FileMetadata.GetDefaultCatgoryForClass(fileClass);
+ return FormattableString.Invariant($"{category}/{file.Name}");
+ }
+
+ internal static FileMetadata GetDefaultFileMetadata(FileInfo fileInfo, FileClass fileClass)
+ {
+ string sha512Hash = GetSha512(fileInfo);
+ FileMetadata result = new FileMetadata(
+ fileClass,
+ FileMetadata.GetDefaultCatgoryForClass(fileClass),
+ sha512: sha512Hash);
+ return result;
+ }
+
+ internal static string GetSha512(FileInfo fileInfo)
+ {
+ using FileStream fileReadStream = fileInfo.OpenRead();
+ using var sha = System.Security.Cryptography.SHA512.Create();
+ byte[] hashValueBytes = sha.ComputeHash(fileReadStream);
+ return Convert.ToHexString(hashValueBytes);
+ }
+ }
+}
\ No newline at end of file
public class SymbolPackageLayoutWorker : PassThroughLayoutWorker
{
public SymbolPackageLayoutWorker(string stagingPath) : base(
- shouldHandleFileFunc: ShouldHandleFile,
- getRelativePublishPathFromFileFunc: GetSymbolPackagePublishRelativePath,
- getMetadataForFileFunc: (_) => new FileMetadata(FileClass.SymbolPackage),
+ shouldHandleFileFunc: static file => file.Name.EndsWith(".symbols.nupkg"),
+ getRelativePublishPathFromFileFunc: static file => Helpers.GetDefaultPathForFileCategory(file, FileClass.SymbolPackage),
+ getMetadataForFileFunc: static file => Helpers.GetDefaultFileMetadata(file, FileClass.SymbolPackage),
stagingPath
) {}
-
- private static bool ShouldHandleFile(FileInfo file) => file.Name.EndsWith(".symbols.nupkg");
- private static string GetSymbolPackagePublishRelativePath(FileInfo file) => FileMetadata.GetDefaultCatgoryForClass(FileClass.SymbolPackage);
}
}
\ No newline at end of file
_shouldHandleFileFunc = shouldHandleFileFunc ?? (file => file.Extension == ".zip");
- _getRelativePathFromZipAndInnerFileFunc = getRelativePathFromZipAndInnerFileFunc ?? ((zipFile, innerFile) => Path.Combine(zipFile.Name, innerFile.Name));
+ Func<FileInfo, FileInfo, string> defaultgetRelPathFunc = static (zipFile, innerFile) =>
+ FormattableString.Invariant($"{Path.GetFileNameWithoutExtension(zipFile.Name)}/{innerFile.Name}");
- _getMetadataForInnerFileFunc = getMetadataForInnerFileFunc ?? ((_, _) => new FileMetadata(FileClass.Blob));
+ _getRelativePathFromZipAndInnerFileFunc = getRelativePathFromZipAndInnerFileFunc ?? defaultgetRelPathFunc;
+
+ _getMetadataForInnerFileFunc = getMetadataForInnerFileFunc ?? (static (_, innerFile) => Helpers.GetDefaultFileMetadata(innerFile, FileClass.Blob));
_stagingPath = stagingPath;
}
}
string relativePath = _getRelativePathFromZipAndInnerFileFunc(file, extractedFile);
- relativePath = Path.Combine(relativePath, extractedFile.Name);
-
string localPath = extractedFile.FullName;
if (_stagingPath is not null)
public bool ShouldVerifyManifest { get; }
public DirectoryInfo DropPath { get; }
public DirectoryInfo StagingDirectory { get; }
- public string PublishPath { get; }
+ public string ReleaseName { get; }
+ public string AccountName { get; }
+ public string AccountKey { get; }
+ public string ContainerName { get; }
+ public int SasValidDays { get; }
- public Config(FileInfo toolManifest, bool verifyToolManifest,
- DirectoryInfo inputDropPath, DirectoryInfo stagingDirectory, string publishPath)
+ public Config(
+ FileInfo toolManifest,
+ bool verifyToolManifest,
+ DirectoryInfo inputDropPath,
+ DirectoryInfo stagingDirectory,
+ string releaseName,
+ string accountName,
+ string accountKey,
+ string containerName,
+ int sasValidDays)
{
ToolManifest = toolManifest;
ShouldVerifyManifest = verifyToolManifest;
DropPath = inputDropPath;
StagingDirectory = stagingDirectory;
- PublishPath = publishPath;
+ ReleaseName = releaseName;
+ AccountName = accountName;
+ AccountKey = accountKey;
+ ContainerName = containerName;
+ SasValidDays = sasValidDays;
}
}
}
\ No newline at end of file
{
public readonly FileClass Class { get; }
- public readonly string AssetCategory { get; }
+ public readonly string AssetCategory { get; }
public readonly bool ShouldPublishToCdn { get; }
// TODO: Add a metadata bag for Key,Value pairs.
- public FileMetadata(FileClass fileClass)
- : this(fileClass, GetDefaultCatgoryForClass(fileClass)) {}
+ public FileMetadata(FileClass fileClass, string assetCategory, string sha512)
+ : this(fileClass, assetCategory, shouldPublishToCdn: false, rid: "any", sha512: sha512) {}
- public FileMetadata(FileClass fileClass, string assetCategory)
- : this(fileClass, assetCategory, shouldPublishToCdn: false, rid: "any", sha512: null) {}
-
- public FileMetadata(FileClass fileClass, string assetCategory, bool shouldPublishToCdn, string rid, string sha512)
+ public FileMetadata(FileClass fileClass, string assetCategory, bool shouldPublishToCdn, string rid, string sha512)
{
if (string.IsNullOrEmpty(assetCategory))
{
using System;
+using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
}
}
- internal ReleaseMetadata GetDropMetadata(string repoUrl)
+ internal ReleaseMetadata GetDropMetadataForSingleRepoVariants(IEnumerable<string> repoUrls)
{
string releaseVersion;
using (Stream darcReleaseFile = File.OpenRead(ReleaseFilePath))
// TODO: Schema validation.
JsonElement buildList = jsonDoc.RootElement.GetProperty("builds");
- // TODO: This should be using Uri.Compare...
+ // This iteration is necessary due to the public/private nature repos.
var repoBuilds = buildList.EnumerateArray()
- .Where(build => build.GetProperty("repo").GetString() == repoUrl);
+ .Where(build =>
+ {
+ var buildUri = new Uri(build.GetProperty("repo").GetString());
+ return repoUrls.Any(repoUrl => buildUri == new Uri(repoUrl));
+ });
if (repoBuilds.Count() != 1)
{
throw new InvalidOperationException(
- $"There's either no build for {repoUrl} or more than one. Can't retrieve metadata.");
+ $"There's either no build for requested repos or more than one. Can't retrieve metadata.");
}
- JsonElement build = repoBuilds.ElementAt(0);
+ JsonElement build = repoBuilds.First();
- // TODO: If any of these were to fail...
var releaseMetadata = new ReleaseMetadata(
releaseVersion: releaseVersion,
- repoUrl: repoUrl,
+ repoUrl: build.GetProperty("repo").GetString(),
branch: build.GetProperty("branch").GetString(),
commit: build.GetProperty("commit").GetString(),
dateProduced: build.GetProperty("produced").GetString(),
}
}
- internal DirectoryInfo GetShippingDirectoryForProject(string projectName)
+ internal DirectoryInfo GetShippingDirectoryForSingleProjectVariants(IEnumerable<string> projectNames)
{
using (Stream darcManifest = File.OpenRead(ReleaseFilePath))
using (JsonDocument jsonDoc = JsonDocument.Parse(darcManifest))
// pretty stable schema.
JsonElement productList = jsonDoc.RootElement[0].GetProperty("products");
- var directoryList = productList.EnumerateArray()
- .Where(prod => prod.GetProperty("name").GetString() == projectName)
- .Select(prod => prod.GetProperty("fileshare"));
+ var matchingProducts = productList.EnumerateArray()
+ .Where(prod => projectNames.Contains(prod.GetProperty("name").GetString()));
- if (directoryList.Count() != 1)
+ if (matchingProducts.Count() != 1)
{
throw new InvalidOperationException(
- $"There's either no product named {projectName} or more than one in the drop.");
+ $"There's either no product under the provided names or more than one in the drop.");
}
- return new DirectoryInfo(directoryList.ElementAt(0).GetString());
+ return new DirectoryInfo(matchingProducts.First().GetProperty("fileshare").GetString());
}
}
}
using System.Collections.Generic;
using System.IO;
using System.Linq;
+using System.Text.Encodings.Web;
using System.Text.Json;
+using System.Text.Json.Serialization;
using System.Text.RegularExpressions;
using DiagnosticsReleaseTool.Util;
using Microsoft.Extensions.Logging;
{
var stream = new MemoryStream();
- using (var writer = new Utf8JsonWriter(stream, new JsonWriterOptions{ Indented = true }))
+ var jro = new JsonWriterOptions
+ {
+ Indented = true,
+ Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping
+ };
+
+ using (var writer = new Utf8JsonWriter(stream, jro))
{
writer.WriteStartObject();
writer.WriteString("Rid", fileToRelease.FileMetadata.Rid);
writer.WriteString("PublishRelativePath", fileToRelease.FileMap.RelativeOutputPath);
writer.WriteString("PublishedPath", fileToRelease.PublishUri);
+ writer.WriteString("Sha512", fileToRelease.FileMetadata.Sha512);
writer.WriteEndObject();
}
writer.WriteStartObject();
writer.WriteString("PublishRelativePath", fileToRelease.FileMap.RelativeOutputPath);
writer.WriteString("PublishedPath", fileToRelease.PublishUri);
+ writer.WriteString("Sha512", fileToRelease.FileMetadata.Sha512);
writer.WriteEndObject();
}
var options = new JsonSerializerOptions
{
- IgnoreNullValues = true,
+ DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = true
};
private string GenerateSubpath(FileReleaseData fileToRelease)
{
var fi = new FileInfo(fileToRelease.FileMap.LocalSourcePath);
- using var hash = System.Security.Cryptography.SHA256Managed.Create();
+ using var hash = System.Security.Cryptography.SHA256.Create();
var enc = System.Text.Encoding.UTF8;
byte[] hashResult = hash.ComputeHash(enc.GetBytes(fileToRelease.FileMap.RelativeOutputPath));
string pathHash = BitConverter.ToString(hashResult).Replace("-", String.Empty);
{
if(!match.Groups.TryGetValue("metadata", out Group metadataGroup))
{
- // Give up if the catpturing failed
+ // Give up if the capturing failed
return null;
}
{
CommandHandler.Create<Config, bool, CancellationToken>(DiagnosticsReleaseRunner.PrepareRelease),
// Inputs
- InputDropPathOption(), ToolManifestPathOption(),
+ InputDropPathOption(), ToolManifestPathOption(), ReleaseNameOption(),
// Toggles
ToolManifestVerificationOption(), DiagnosticLoggingOption(),
// Outputs
- StagingPathOption(), PublishPathOption()
+ StagingPathOption(),
+ AzureStorageAccountNameOption(), AzureStorageAccountKeyOption(), AzureStorageContainerNameOption(), AzureStorageSasExpirationOption()
};
+
private static Option<bool> DiagnosticLoggingOption() =>
new Option<bool>(
aliases: new[] { "-v", "--verbose" },
IsRequired = true
}.ExistingOnly();
+ private static Option<string> ReleaseNameOption() =>
+ new Option<string>(
+ aliases: new[] { "-r", "--release-name" },
+ description: "Name of this release.")
+ {
+ IsRequired = true,
+ };
+
private static Option StagingPathOption() =>
new Option<DirectoryInfo>(
aliases: new[] { "--staging-directory", "-s" },
Path.Join(Path.GetTempPath(), Path.GetRandomFileName())))
.LegalFilePathsOnly();
- private static Option<string> PublishPathOption() =>
+ private static Option<string> AzureStorageAccountNameOption() =>
new Option<string>(
- aliases: new[] { "-o", "--publish-path" },
- description: "Path to publish the generated layout and publishing manifest to.")
+ aliases: new[] { "-n", "--account-name" },
+ description: "Storage account name, must be in public azure cloud.")
{
- IsRequired = true
+ IsRequired = true,
};
+
+ private static Option<string> AzureStorageAccountKeyOption() =>
+ new Option<string>(
+ aliases: new[] { "-k", "--account-key" },
+ description: "Storage account key, in base 64 format.")
+ {
+ IsRequired = true,
+ };
+
+ private static Option<string> AzureStorageContainerNameOption() =>
+ new Option<string>(
+ aliases: new[] { "-c", "--container-name" },
+ description: "Storage account container name where the files will be uploaded.")
+ {
+ IsRequired = true,
+ };
+
+ private static Option<int> AzureStorageSasExpirationOption() =>
+ new Option<int>(
+ aliases: new[] { "--sas-valid-days" },
+ description: "Number of days to allow access to the blobs via the provided SAS URIs.",
+ getDefaultValue: () => 1);
}
}
var layoutWorkerList = new List<ILayoutWorker>
{
// TODO: We may want to inject a logger.
- new NugetLayoutWorker(stagingPath: null),
- new SymbolPackageLayoutWorker(stagingPath: null),
+ new NugetLayoutWorker(stagingPath: releaseConfig.StagingDirectory.FullName),
+ new SymbolPackageLayoutWorker(stagingPath: releaseConfig.StagingDirectory.FullName),
new ZipLayoutWorker(
shouldHandleFileFunc: DiagnosticsRepoHelpers.IsBundledToolArchive,
getRelativePathFromZipAndInnerFileFunc: DiagnosticsRepoHelpers.GetToolPublishRelativePath,
getMetadataForInnerFileFunc: DiagnosticsRepoHelpers.GetMetadataForToolFile,
- stagingPath: null
+ stagingPath: releaseConfig.StagingDirectory.FullName
)
};
}
// TODO: Probably should use BAR ID instead as an identifier for the metadata to gather.
- ReleaseMetadata releaseMetadata = darcLayoutHelper.GetDropMetadata(DiagnosticsRepoHelpers.RepositoryName);
- DirectoryInfo basePublishDirectory = darcLayoutHelper.GetShippingDirectoryForProject(DiagnosticsRepoHelpers.ProductName);
+ ReleaseMetadata releaseMetadata = darcLayoutHelper.GetDropMetadataForSingleRepoVariants(DiagnosticsRepoHelpers.RepositoryUrls);
+ DirectoryInfo basePublishDirectory = darcLayoutHelper.GetShippingDirectoryForSingleProjectVariants(DiagnosticsRepoHelpers.ProductNames);
string publishManifestPath = Path.Combine(releaseConfig.StagingDirectory.FullName, ManifestName);
- IPublisher releasePublisher = new FileSharePublisher(releaseConfig.PublishPath);
+ IPublisher releasePublisher = new AzureBlobBublisher(releaseConfig.AccountName, releaseConfig.AccountKey, releaseConfig.ContainerName, releaseConfig.ReleaseName, releaseConfig.SasValidDays, logger);
IManifestGenerator manifestGenerator = new DiagnosticsManifestGenerator(releaseMetadata, releaseConfig.ToolManifest, logger);
using var diagnosticsRelease = new Release(
<PropertyGroup>
<OutputType>Exe</OutputType>
- <TargetFramework>net5.0</TargetFramework>
+ <TargetFramework>net6.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
</ItemGroup>
<ItemGroup>
- <PackageReference Include="Microsoft.Extensions.Configuration" Version="5.0.0-rc.2.20475.5" />
- <PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="5.0.0-rc.2.20475.5" />
- <PackageReference Include="Microsoft.Extensions.Logging" Version="5.0.0-rc.2.20475.5" />
- <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="5.0.0-rc.2.20475.5" />
- <PackageReference Include="Microsoft.Extensions.Logging.Configuration" Version="5.0.0-rc.2.20475.5" />
- <PackageReference Include="Microsoft.Extensions.Logging.Console" Version="5.0.0-rc.2.20475.5" />
+ <PackageReference Include="Microsoft.Extensions.Configuration" Version="[5.0.0]" />
+ <PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="[5.0.0]" />
+ <PackageReference Include="Microsoft.Extensions.Logging" Version="[5.0.0]" />
+ <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="[5.0.0]" />
+ <PackageReference Include="Microsoft.Extensions.Logging.Configuration" Version="[5.0.0]" />
+ <PackageReference Include="Microsoft.Extensions.Logging.Console" Version="[5.0.0]" />
+ <PackageReference Include="Azure.Storage.Blobs" Version="[12.9.1]" />
<PackageReference Include="System.CommandLine" Version="2.0.0-beta1.20467.2" />
</ItemGroup>
{
public static class DiagnosticsRepoHelpers
{
- public const string ProductName = "diagnostics";
- public const string RepositoryName = "https://github.com/dotnet/diagnostics";
+ public static readonly string[] ProductNames = new []{ "diagnostics", "dotnet-diagnostics" };
+ public static readonly string[] RepositoryUrls = new [] { "https://github.com/dotnet/diagnostics", "https://dev.azure.com/dnceng/internal/_git/dotnet-diagnostics" };
public static string BundleToolsPathInDrop => System.IO.Path.Combine("diagnostics", "bundledtools");
public const string BundledToolsPrefix = "diagnostic-tools-";
public const string BundledToolsCategory = "ToolBundleAssets";
_ => "UnknownAssets"
};
- string sha512 = null;
+ string sha512 = GetSha512(fileInZip.FullName);
string rid = GetRidFromBundleZip(zipFile);
- if (category == BundledToolsCategory)
- {
- sha512 = GetSha512(fileInZip.FullName);
- }
-
return new FileMetadata(
FileClass.Blob,
assetCategory: category,
public static string GetToolPublishRelativePath(FileInfo zipFile, FileInfo fileInZip)
{
- return Path.Combine(BundledToolsCategory, GetRidFromBundleZip(zipFile));
+ return FormattableString.Invariant($"{BundledToolsCategory}/{GetRidFromBundleZip(zipFile)}/{fileInZip.Name}");
}
public static bool IsBundledToolArchive(FileInfo file)
public static string GetSha512(string filePath)
{
- using (FileStream stream = System.IO.File.OpenRead(filePath))
- {
- var sha = new System.Security.Cryptography.SHA512Managed();
- byte[] checksum = sha.ComputeHash(stream);
- return BitConverter.ToString(checksum).Replace("-", String.Empty);
- }
+ using FileStream stream = System.IO.File.OpenRead(filePath);
+ using var sha = System.Security.Cryptography.SHA512.Create();
+ byte[] checksum = sha.ComputeHash(stream);
+ return Convert.ToHexString(checksum);
}
}
}
\ No newline at end of file
Invoke-WebRequest -Uri $nugetPack.PublishedPath -OutFile (New-Item -Path $packagePath -Force)
$progressPreference = 'Continue'
+ if ($nugetPack.PSobject.Properties.Name.Contains("Sha512")-and $(Get-FileHash -Algorithm sha512 $packagePath).Hash -ne $nugetPack.Sha512) {
+ Write-Host "Sha512 verification failed for $($nugetPack.PublishRelativePath)."
+ $failedToPublish++
+ continue
+ }
+
Write-Host "Publishing $packagePath."
& "$PSScriptRoot/../../../dotnet.cmd" nuget push $packagePath --source $FeedEndpoint --api-key $FeedPat
if ($LastExitCode -ne 0)