Add CLR JIT rolling build (#42507)
authorBruce Forstall <brucefo@microsoft.com>
Mon, 21 Sep 2020 17:11:08 +0000 (10:11 -0700)
committerGitHub <noreply@github.com>
Mon, 21 Sep 2020 17:11:08 +0000 (10:11 -0700)
Build just the JIT, and upload the resulting JIT DLL to
CLR JIT Azure Storage.

This will be used as a baseline JIT for developer use, such
as assembly diffs.

Linux arm and Linux arm64 are disabled for now, until we can
determine how to properly configure Python on the build machines
with the azure-storage-blob package required for upload of the
JIT to Azure blob storage.

eng/pipelines/coreclr/jitrollingbuild.yml [new file with mode: 0644]
eng/pipelines/coreclr/templates/build-jit-job.yml [new file with mode: 0644]
src/coreclr/scripts/jitrollingbuild.py [new file with mode: 0644]

diff --git a/eng/pipelines/coreclr/jitrollingbuild.yml b/eng/pipelines/coreclr/jitrollingbuild.yml
new file mode 100644 (file)
index 0000000..68be50e
--- /dev/null
@@ -0,0 +1,32 @@
+trigger:
+  batch: false
+  branches:
+    include:
+    - master
+  paths:
+    include:
+    - src/coreclr/src/jit/*
+
+pr: none
+
+jobs:
+#
+# Checkout repository
+#
+- template: /eng/pipelines/common/checkout-job.yml
+
+- template: /eng/pipelines/common/platform-matrix.yml
+  parameters:
+    jobTemplate: /eng/pipelines/coreclr/templates/build-jit-job.yml
+    buildConfig: checked
+    platforms:
+    - OSX_x64
+    # Currently, Linux arm/arm64 machines don't have the Python 'pip3' tool, nor the azure-storage-blob package that
+    # is required to do the JIT upload to Azure Storage. Thus, these platforms are disabled. If we can figure out how
+    # to get Python properly configured, then re-enable them.
+    # - Linux_arm
+    # - Linux_arm64
+    - Linux_x64
+    - Windows_NT_x64
+    - Windows_NT_x86
+    - Windows_NT_arm64
diff --git a/eng/pipelines/coreclr/templates/build-jit-job.yml b/eng/pipelines/coreclr/templates/build-jit-job.yml
new file mode 100644 (file)
index 0000000..e9738aa
--- /dev/null
@@ -0,0 +1,121 @@
+parameters:
+  archType: ''
+  buildConfig: ''
+  container: ''
+  crossrootfsDir: ''
+  osGroup: ''
+  osSubgroup: ''
+  pool: ''
+  stagedBuild: false
+  timeoutInMinutes: ''
+  variables: {}
+
+### Product build
+jobs:
+- template: xplat-pipeline-job.yml
+  parameters:
+    buildConfig: ${{ parameters.buildConfig }}
+    archType: ${{ parameters.archType }}
+    osGroup: ${{ parameters.osGroup }}
+    osSubgroup: ${{ parameters.osSubgroup }}
+    helixType: 'build/product/'
+    enableMicrobuild: true
+    stagedBuild: ${{ parameters.stagedBuild }}
+    pool: ${{ parameters.pool }}
+
+    # Compute job name from template parameters
+    name: ${{ format('coreclr_jit_build_{0}{1}_{2}_{3}', parameters.osGroup, parameters.osSubgroup, parameters.archType, parameters.buildConfig) }}
+    displayName: ${{ format('CoreCLR JIT Build {0}{1} {2} {3}', parameters.osGroup, parameters.osSubgroup, parameters.archType, parameters.buildConfig) }}
+
+    # Run all steps in the container.
+    # Note that the containers are defined in platform-matrix.yml
+    container: ${{ parameters.container }}
+
+    timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+
+    crossrootfsDir: ${{ parameters.crossrootfsDir }}
+
+    gatherAssetManifests: true
+
+    variables:
+    - name: osGroup
+      value: ${{ parameters.osGroup }}
+    - name: osSubgroup
+      value: ${{ parameters.osSubgroup }}
+
+    - name: publishLogsArtifactPrefix
+      value: 'BuildLogs_CoreCLR_JIT'
+
+    - name: compilerArg
+      value: ''
+    - ${{ if ne(parameters.osGroup, 'Windows_NT') }}:
+      - name: compilerArg
+        value: '-clang9'
+      # Building for x64 MUSL happens on Alpine Linux and we need to use the stable version available there
+      - ${{ if and(eq(parameters.osGroup, 'Linux'), eq(parameters.osSubgroup, '_musl'), eq(parameters.archType, 'x64')) }}:
+        - name: compilerArg
+          value: ''
+      # AppleClang has different version scheme, so we let complier introspection pick up the available clang from PATH
+      - ${{ if eq(parameters.osGroup, 'OSX') }}:
+        - name: compilerArg
+          value: ''
+
+    - ${{ if eq(parameters.osGroup, 'Windows_NT') }}:
+      - name: PythonScript
+        value: 'py -3'
+      - name: PipScript
+        value: 'py -3 -m pip'
+    - ${{ if ne(parameters.osGroup, 'Windows_NT') }}:
+      - name: PythonScript
+        value: 'python3'
+      - name: PipScript
+        value: 'pip3'
+
+    - ${{ parameters.variables }}
+
+    steps:
+
+    # Install native dependencies
+    # Linux builds use docker images with dependencies preinstalled,
+    # and FreeBSD builds use a build agent with dependencies
+    # preinstalled, so we only need this step for OSX and Windows.
+    - ${{ if eq(parameters.osGroup, 'OSX') }}:
+      - script: $(Build.SourcesDirectory)/eng/install-native-dependencies.sh $(osGroup)
+        displayName: Install native dependencies
+    - ${{ if eq(parameters.osGroup, 'Windows_NT') }}:
+      # Necessary to install python
+      - script: $(Build.SourcesDirectory)\eng\common\init-tools-native.cmd -InstallDirectory $(Build.SourcesDirectory)\native-tools -Force
+        displayName: Install native dependencies
+
+    # Install internal tools on official builds
+    # Since our internal tools are behind an authenticated feed,
+    # we need to use the DotNetCli AzDO task to restore from the feed using a service connection.
+    # We can't do this from within the build, so we need to do this as a separate step.
+    - ${{ if and(eq(variables['System.TeamProject'], 'internal'), ne(variables['Build.Reason'], 'PullRequest')) }}:
+      - template: /eng/pipelines/common/restore-internal-tools.yml
+
+    # Build CoreCLR JIT
+    - ${{ if ne(parameters.osGroup, 'Windows_NT') }}:
+      - script: $(Build.SourcesDirectory)/src/coreclr/build-runtime$(scriptExt) $(buildConfig) $(archType) $(crossArg) -ci $(compilerArg) -skipruntime -skipalljits -nopgooptimize
+        displayName: Build CoreCLR JIT
+    - ${{ if eq(parameters.osGroup, 'Windows_NT') }}:
+      - script: set __TestIntermediateDir=int&&$(Build.SourcesDirectory)/src/coreclr/build-runtime$(scriptExt) $(buildConfig) $(archType) -ci -skipruntime -skipalljits -nopgooptimize -skiprestoreoptdata
+        displayName: Build CoreCLR JIT
+
+    # Ensure the Python azure-storage-blob package is installed before doing the upload.
+    - script: $(PipScript) install --user azure.storage.blob==12.0.0 --force-reinstall
+      displayName: Install azure-storage-blob Python package
+
+    - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/jitrollingbuild.py upload -build_type $(buildConfig) -arch $(archType) -host_os $(osGroup) -git_hash $(Build.SourceVersion)
+      displayName: Upload JIT to Azure Storage
+      env:
+        CLRJIT_AZ_KEY: $(clrjit_key1) # secret key stored as variable in pipeline
+
+    # Publish Logs
+    - task: PublishPipelineArtifact@1
+      displayName: Publish Logs
+      inputs:
+        targetPath: $(Build.SourcesDirectory)/artifacts/log
+        artifactName: '$(publishLogsArtifactPrefix)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
+      continueOnError: true
+      condition: always()
diff --git a/src/coreclr/scripts/jitrollingbuild.py b/src/coreclr/scripts/jitrollingbuild.py
new file mode 100644 (file)
index 0000000..0464da8
--- /dev/null
@@ -0,0 +1,539 @@
+#!/usr/bin/env python3
+#
+## Licensed to the .NET Foundation under one or more agreements.
+## The .NET Foundation licenses this file to you under the MIT license.
+#
+##
+# Title               : jitrollingbuild.py
+#
+# Notes:
+#
+# Script to upload and manage rolling build clrjit and the rolling build storage location
+#
+################################################################################
+################################################################################
+
+import argparse
+import os
+import platform
+import shutil
+import sys
+import tempfile
+import string
+import urllib
+import urllib.request
+import zipfile
+
+import locale
+locale.setlocale(locale.LC_ALL, '')  # Use '' for auto, or force e.g. to 'en_US.UTF-8'
+
+from collections import defaultdict
+from sys import platform as _platform
+
+from coreclr_arguments import *
+
+################################################################################
+# Azure Storage information
+################################################################################
+
+az_account_name = "clrjit2"
+az_container_name = "jitrollingbuild"
+az_builds_root_folder = "builds"
+az_blob_storage_account_uri = "https://" + az_account_name + ".blob.core.windows.net/"
+az_blob_storage_container_uri = az_blob_storage_account_uri + az_container_name
+
+################################################################################
+# Argument Parser
+################################################################################
+
+description = """\
+Script to upload and manage rolling build clrjit
+"""
+
+upload_description = """\
+Upload clrjit to SuperPMI Azure storage.
+"""
+
+download_description = """\
+Download clrjit from SuperPMI Azure storage.
+"""
+
+list_description = """\
+List clrjit in SuperPMI Azure storage.
+"""
+
+host_os_help = "OS (Windows_NT, OSX, Linux). Default: current OS."
+
+arch_help = "Architecture (x64, x86, arm, arm64). Default: current architecture."
+
+build_type_help = "Build type (Debug, Checked, Release). Default: Checked."
+
+git_hash_help = "git hash"
+
+target_dir_help = "Directory to put the downloaded JIT."
+
+skip_cleanup_help = "Skip intermediate file removal."
+
+# Start of parser object creation.
+
+parser = argparse.ArgumentParser(description=description)
+
+subparsers = parser.add_subparsers(dest='mode', help="Command to invoke")
+
+# Common parser for git_hash/arch/build_type/host_os arguments
+
+common_parser = argparse.ArgumentParser(add_help=False)
+
+common_parser.add_argument("-arch", help=arch_help)
+common_parser.add_argument("-build_type", default="Checked", help=build_type_help)
+common_parser.add_argument("-host_os", help=host_os_help)
+
+# subparser for upload
+upload_parser = subparsers.add_parser("upload", description=upload_description, parents=[common_parser])
+
+upload_parser.add_argument("-git_hash", required=True, help=git_hash_help)
+upload_parser.add_argument("-az_storage_key", help="Key for the clrjit Azure Storage location. Default: use the value of the CLRJIT_AZ_KEY environment variable.")
+upload_parser.add_argument("--skip_cleanup", action="store_true", help=skip_cleanup_help)
+
+# subparser for download
+download_parser = subparsers.add_parser("download", description=download_description, parents=[common_parser])
+
+download_parser.add_argument("-git_hash", required=True, help=git_hash_help)
+download_parser.add_argument("-target_dir", required=True, help=target_dir_help)
+download_parser.add_argument("--skip_cleanup", action="store_true", help=skip_cleanup_help)
+
+# subparser for list
+list_parser = subparsers.add_parser("list", description=list_description, parents=[common_parser])
+
+list_parser.add_argument("-git_hash", help=git_hash_help)
+list_parser.add_argument("--all", action="store_true", help="Show all JITs, not just those for the specified (or default) git hash, OS, architecture, and flavor")
+
+################################################################################
+# Helper classes
+################################################################################
+
+class TempDir:
+    def __init__(self, path=None):
+        self.dir = tempfile.mkdtemp() if path is None else path
+        self.cwd = None
+
+    def __enter__(self):
+        self.cwd = os.getcwd()
+        os.chdir(self.dir)
+        return self.dir
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        os.chdir(self.cwd)
+        # Note: we are using the global `args`, not coreclr_args. This works because
+        # the `skip_cleanup` argument is not processed by CoreclrArguments, but is
+        # just copied there.
+        if not args.skip_cleanup:
+            shutil.rmtree(self.dir)
+
+class ChangeDir:
+    def __init__(self, dir):
+        self.dir = dir
+        self.cwd = None
+
+    def __enter__(self):
+        self.cwd = os.getcwd()
+        os.chdir(self.dir)
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        os.chdir(self.cwd)
+
+def determine_jit_name(coreclr_args):
+    """ Determine the jit based on the OS. If "-altjit" is specified, then use the specified altjit,
+        or an appropriate altjit based on target.
+
+    Args:
+        coreclr_args (CoreclrArguments): parsed args
+
+    Return:
+        jit_name(str) : name of the jit for this os
+    """
+
+    jit_base_name = "clrjit"
+    if coreclr_args.host_os == "OSX":
+        return "lib" + jit_base_name + ".dylib"
+    elif coreclr_args.host_os == "Linux":
+        return "lib" + jit_base_name + ".so"
+    elif coreclr_args.host_os == "Windows_NT":
+        return jit_base_name + ".dll"
+    else:
+        raise RuntimeError("Unknown OS.")
+
+
+def list_az_jits(coreclr_args, filter=lambda unused: True):
+    """ List the JITs in Azure Storage using REST api
+
+    Args:
+        filter (lambda: string -> bool): filter to apply to the list. The filter takes a URL and returns True if this URL is acceptable.
+
+    Returns:
+        urls (list): set of URLs in Azure Storage that match the filter.
+
+    Notes:
+        This method does not require installing the Azure Storage python package.
+    """
+
+    # This URI will return *all* the blobs, for all git_hash/OS/architecture/build_type combinations.
+    # pass "prefix=foo/bar/..." to only show a subset. Or, we can filter later using string search.
+    list_az_container_uri = az_blob_storage_container_uri + "?restype=container&comp=list&prefix=" + az_builds_root_folder + "/"
+
+    try:
+        contents = urllib.request.urlopen(list_az_container_uri).read().decode('utf-8')
+    except Exception as exception:
+        print("Didn't find any collections using {}".format(list_az_container_uri))
+        print("  Error: {}".format(exception))
+        return None
+
+    # Contents is an XML file with contents like:
+    #
+    # <EnumerationResults ContainerName="https://clrjit.blob.core.windows.net/superpmi">
+    #   <Blobs>
+    #     <Blob>
+    #       <Name>jit-ee-guid/Linux/x64/Linux.x64.Checked.frameworks.mch.zip</Name>
+    #       <Url>https://clrjit.blob.core.windows.net/superpmi/jit-ee-guid/Linux/x64/Linux.x64.Checked.frameworks.mch.zip</Url>
+    #       <Properties>
+    #         ...
+    #       </Properties>
+    #     </Blob>
+    #     <Blob>
+    #       <Name>jit-ee-guid/Linux/x64/Linux.x64.Checked.mch.zip</Name>
+    #       <Url>https://clrjit.blob.core.windows.net/superpmi/jit-ee-guid/Linux/x64/Linux.x64.Checked.mch.zip</Url>
+    #     ... etc. ...
+    #   </Blobs>
+    # </EnumerationResults>
+    #
+    # We just want to extract the <Url> entries. We could probably use an XML parsing package, but we just
+    # use regular expressions.
+
+    urls_split = contents.split("<Url>")[1:]
+    urls = []
+    for item in urls_split:
+        url = item.split("</Url>")[0].strip()
+        if filter(url):
+            urls.append(url)
+
+    return urls
+
+
+def upload_command(coreclr_args):
+    """ Upload the JIT
+
+    Args:
+        coreclr_args (CoreclrArguments): parsed args
+    """
+
+    print("JIT upload")
+
+    def upload_blob(file, blob_name):
+        blob_client = blob_service_client.get_blob_client(container=az_container_name, blob=blob_name)
+
+        # Check if the blob already exists, and delete it if it does, before uploading / replacing it.
+        try:
+            blob_properties = blob_client.get_blob_properties()
+            # If no exception, then the blob already exists. Delete it!
+            print("Warning: replacing existing blob!")
+            blob_client.delete_blob()
+        except Exception as StorageErrorException:
+            # Blob doesn't exist already; that's good
+            pass
+
+        with open(file, "rb") as data:
+            blob_client.upload_blob(data)
+
+    # 1. Find the JIT in the product directory
+    # 2. Upload it
+    # 3. optional? upload JIT PDB/symbols
+
+    # Target directory: <root>/git-hash/OS/architecture/build-flavor/
+    # Note that build-flavor will probably always be Checked.
+    # Lower-case all the directory names.
+
+    files = []
+
+    jit_name = determine_jit_name(coreclr_args)
+    jit_path = os.path.join(coreclr_args.product_location, jit_name)
+    if not os.path.isfile(jit_path):
+        print("Error: Couldn't find JIT at {}".format(jit_path))
+        raise RuntimeError("Missing JIT")
+
+    files.append(jit_path)
+
+    print("Uploading:")
+    for item in files:
+        print("  {}".format(item))
+
+    try:
+        from azure.storage.blob import BlobServiceClient, BlobClient
+
+    except:
+        print("Please install:")
+        print("  pip install azure-storage-blob")
+        print("See also https://docs.microsoft.com/en-us/azure/storage/blobs/storage-quickstart-blobs-python")
+        raise RuntimeError("Missing azure storage package.")
+
+    blob_service_client = BlobServiceClient(account_url=az_blob_storage_account_uri, credential=coreclr_args.az_storage_key)
+    blob_folder_name = "{}/{}/{}/{}/{}".format(az_builds_root_folder, coreclr_args.git_hash, coreclr_args.host_os, coreclr_args.arch, coreclr_args.build_type)
+
+    total_bytes_uploaded = 0
+
+    # Should we compress the JIT on upload? It would save space, but it makes it slightly more complicated to use
+    # because you can't just "wget" or otherwise download the file and use it immediately -- you need to unzip first.
+    # So for now, don't compress it.
+    compress_jit = False
+
+    with TempDir() as temp_location:
+        for file in files:
+            if compress_jit:
+                # Zip compress the file we will upload
+                zip_name = os.path.basename(file) + ".zip"
+                zip_path = os.path.join(temp_location, zip_name)
+                print("Compress {} -> {}".format(file, zip_path))
+                with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zip_file:
+                    zip_file.write(file, os.path.basename(file))
+
+                file_stat_result = os.stat(file)
+                zip_stat_result = os.stat(zip_path)
+                print("Compressed {:n} to {:n} bytes".format(file_stat_result.st_size, zip_stat_result.st_size))
+                total_bytes_uploaded += zip_stat_result.st_size
+
+                blob_name = "{}/{}".format(blob_folder_name, zip_name)
+                print("Uploading: {} ({}) -> {}".format(file, zip_path, az_blob_storage_container_uri + "/" + blob_name))
+                upload_blob(zip_path, blob_name)
+            else:
+                file_stat_result = os.stat(file)
+                total_bytes_uploaded += file_stat_result.st_size
+                file_name = os.path.basename(file)
+                blob_name = "{}/{}".format(blob_folder_name, file_name)
+                print("Uploading: {} -> {}".format(file, az_blob_storage_container_uri + "/" + blob_name))
+                upload_blob(file, blob_name)
+
+    print("Uploaded {:n} bytes".format(total_bytes_uploaded))
+    print("Finished JIT upload")
+
+
+def download_urls(urls, target_dir):
+    """ Download a set of files, specified as URLs, to a target directory.
+        If the URLs are to .ZIP files, then uncompress them and copy all contents
+        to the target directory.
+
+    Args:
+        urls (list): the URLs to download
+        target_dir (str): target directory where files are copied. Directory must exist
+
+    Returns:
+        list of local filenames of downloaded files
+    """
+
+    print("Downloading:")
+    for url in urls:
+        print("  {}".format(url))
+
+    local_files = []
+
+    # In case we'll need a temp directory for ZIP file processing, create it first.
+    with TempDir() as temp_location:
+        for url in urls:
+            item_name = url.split("/")[-1]
+
+            if url.lower().endswith(".zip"):
+                # Delete everything in the temp_location (from previous iterations of this loop, so previous URL downloads).
+                temp_location_items = [os.path.join(temp_location, item) for item in os.listdir(temp_location)]
+                for item in temp_location_items:
+                    if os.path.isdir(item):
+                        shutil.rmtree(item)
+                    else:
+                        os.remove(item)
+
+                download_path = os.path.join(temp_location, item_name)
+
+                print("Download: {} -> {}".format(url, download_path))
+                urllib.request.urlretrieve(url, download_path)
+
+                print("Uncompress {}".format(download_path))
+                with zipfile.ZipFile(download_path, "r") as file_handle:
+                    file_handle.extractall(temp_location)
+
+                # Copy everything that was extracted to the target directory.
+                items = [ os.path.join(temp_location, item) for item in os.listdir(temp_location) if not item.endswith(".zip") ]
+                for item in items:
+                    target_path = os.path.join(target_dir, os.path.basename(item))
+                    print("Copy {} -> {}".format(item, target_path))
+                    shutil.copy2(item, target_dir)
+                    local_files.append(target_path)
+            else:
+                # Not a zip file; download directory to target directory
+                download_path = os.path.join(target_dir, item_name)
+
+                print("Download: {} -> {}".format(url, download_path))
+                urllib.request.urlretrieve(url, download_path)
+
+                local_files.append(download_path)
+
+    return local_files
+
+
+def download_command(coreclr_args):
+    """ Download the JIT
+
+    Args:
+        coreclr_args (CoreclrArguments): parsed args
+    """
+
+    print("JIT download")
+
+    jit_name = determine_jit_name(coreclr_args)
+    blob_folder_name = "{}/{}/{}/{}/{}/{}".format(az_builds_root_folder, coreclr_args.git_hash, coreclr_args.host_os, coreclr_args.arch, coreclr_args.build_type, jit_name)
+    blob_uri = "{}/{}".format(az_blob_storage_container_uri, blob_folder_name)
+    urls = [ blob_uri ]
+    download_urls(urls, coreclr_args.target_dir)
+
+
+def list_command(coreclr_args):
+    """ List the JITs in Azure Storage
+
+    Args:
+        coreclr_args (CoreclrArguments) : parsed args
+    """
+
+    blob_filter_string = "{}/{}/{}/{}".format(coreclr_args.git_hash, coreclr_args.host_os, coreclr_args.arch, coreclr_args.build_type)
+    blob_prefix_filter = "{}/{}/{}".format(az_blob_storage_container_uri, az_builds_root_folder, blob_filter_string).lower()
+
+    # Determine if a URL in Azure Storage should be allowed. The URL looks like:
+    #   https://clrjit.blob.core.windows.net/jitsuperpmi/git_hash/Linux/x64/Checked/clrjit.dll.zip
+    # By default, filter to just the current git_hash, OS, architecture, and build_flavor.
+    def filter_jits(url):
+        url = url.lower()
+        return coreclr_args.all or url.startswith(blob_prefix_filter)
+
+    urls = list_az_jits(coreclr_args, filter_jits)
+    if urls is None:
+        return
+
+    count = len(urls)
+
+    if coreclr_args.all:
+        print("{} JIT".format(count))
+    else:
+        print("{} JIT for {}".format(count, blob_filter_string))
+    print("")
+
+    for url in urls:
+        print("{}".format(url))
+
+    print("")
+
+
+def setup_args(args):
+    """ Setup the args for SuperPMI to use.
+
+    Args:
+        args (ArgParse): args parsed by arg parser
+
+    Returns:
+        args (CoreclrArguments)
+
+    """
+    coreclr_args = CoreclrArguments(args, require_built_core_root=False, require_built_product_dir=False, require_built_test_dir=False, default_build_type="Checked")
+
+    coreclr_args.verify(args,
+                        "mode", # "mode" is from the `parser.add_subparsers(dest='mode')` call
+                        lambda unused: True,
+                        "Unable to set mode")
+
+    if coreclr_args.mode == "upload":
+
+        coreclr_args.verify(args,
+                            "git_hash",
+                            lambda unused: True,
+                            "Unable to set git_hash")
+
+        coreclr_args.verify(args,
+                            "az_storage_key",
+                            lambda item: item is not None,
+                            "Specify az_storage_key or set environment variable CLRJIT_AZ_KEY to the key to use.",
+                            modify_arg=lambda arg: os.environ["CLRJIT_AZ_KEY"] if arg is None and "CLRJIT_AZ_KEY" in os.environ else arg)
+
+        coreclr_args.verify(args,
+                            "skip_cleanup",
+                            lambda unused: True,
+                            "Unable to set skip_cleanup")
+
+        if not os.path.isdir(coreclr_args.product_location):
+            print("Built product location could not be determined")
+            raise RuntimeError("Error")
+
+    elif coreclr_args.mode == "download":
+
+        coreclr_args.verify(args,
+                            "git_hash",
+                            lambda unused: True,
+                            "Unable to set git_hash")
+
+        coreclr_args.verify(args,
+                            "target_dir",
+                            lambda unused: True,
+                            "Unable to set target_dir")
+
+        coreclr_args.verify(args,
+                            "skip_cleanup",
+                            lambda unused: True,
+                            "Unable to set skip_cleanup")
+
+        if not os.path.isdir(coreclr_args.target_dir):
+            print("--target_dir directory does not exist")
+            raise RuntimeError("Error")
+
+    elif coreclr_args.mode == "list":
+
+        coreclr_args.verify(args,
+                            "git_hash",
+                            lambda unused: True,
+                            "Unable to set git_hash")
+
+        coreclr_args.verify(args,
+                            "all",
+                            lambda unused: True,
+                            "Unable to set all")
+
+    return coreclr_args
+
+################################################################################
+# main
+################################################################################
+
+def main(args):
+    """ Main method
+    """
+
+    if sys.version_info.major < 3:
+        print("Please install python 3 or greater")
+        return 1
+
+    coreclr_args = setup_args(args)
+    success = True
+
+    if coreclr_args.mode == "upload":
+        upload_command(coreclr_args)
+
+    elif coreclr_args.mode == "download":
+        download_command(coreclr_args)
+
+    elif coreclr_args.mode == "list":
+        list_command(coreclr_args)
+
+    else:
+        raise NotImplementedError(coreclr_args.mode)
+
+    return 0 if success else 1
+
+################################################################################
+# __main__
+################################################################################
+
+if __name__ == "__main__":
+    args = parser.parse_args()
+    sys.exit(main(args))