diff --git a/eng/pipelines/common/templates/runtimes/send-to-helix-step.yml b/eng/pipelines/common/templates/runtimes/send-to-helix-step.yml
index f1d7f5ac509a28..d32b5a8233d269 100644
--- a/eng/pipelines/common/templates/runtimes/send-to-helix-step.yml
+++ b/eng/pipelines/common/templates/runtimes/send-to-helix-step.yml
@@ -29,6 +29,7 @@ parameters:
runtimeVariant: ''
shouldContinueOnError: false
SuperPmiCollect: ''
+ SuperPmiDiffType: ''
steps:
@@ -62,6 +63,7 @@ steps:
RuntimeFlavor: ${{ parameters.runtimeFlavor }}
_RuntimeVariant: ${{ parameters.runtimeVariant }}
_SuperPmiCollect: ${{ parameters.SuperPmiCollect }}
+ _SuperPmiDiffType: ${{ parameters.SuperPmiDiffType }}
${{ if eq(parameters.publishTestResults, 'true') }}:
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
# TODO: remove NUGET_PACKAGES once https://github.com/dotnet/arcade/issues/1578 is fixed
diff --git a/eng/pipelines/coreclr/superpmi-diffs.yml b/eng/pipelines/coreclr/superpmi-diffs.yml
index 5d992b33a3bbb9..cbfeae2aa714cc 100644
--- a/eng/pipelines/coreclr/superpmi-diffs.yml
+++ b/eng/pipelines/coreclr/superpmi-diffs.yml
@@ -65,4 +65,18 @@ extends:
helixQueueGroup: ci
helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
jobParameters:
- condition: not(eq(dependencies.evaluate_paths.outputs['SetPathVars_jiteeversionguid.containsChange'], true))
\ No newline at end of file
+ condition: not(eq(dependencies.evaluate_paths.outputs['SetPathVars_jiteeversionguid.containsChange'], true))
+ diffType: asmdiffs
+
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/coreclr/templates/superpmi-diffs-job.yml
+ buildConfig: checked
+ platforms:
+ - windows_x64
+ - windows_x86
+ helixQueueGroup: ci
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+ jobParameters:
+ condition: not(eq(dependencies.evaluate_paths.outputs['SetPathVars_jiteeversionguid.containsChange'], true))
+ diffType: tpdiff
\ No newline at end of file
diff --git a/eng/pipelines/coreclr/templates/run-superpmi-diffs-job.yml b/eng/pipelines/coreclr/templates/run-superpmi-diffs-job.yml
index 38b972bb2a7501..ddbb025e03f721 100644
--- a/eng/pipelines/coreclr/templates/run-superpmi-diffs-job.yml
+++ b/eng/pipelines/coreclr/templates/run-superpmi-diffs-job.yml
@@ -16,6 +16,7 @@ parameters:
liveLibrariesBuildConfig: '' # optional -- live-live libraries configuration to use for the run
helixQueues: '' # required -- Helix queues
dependOnEvaluatePaths: false
+ diffType: 'asmdiffs' # required -- 'asmdiffs', 'tpdiff', or 'all'
jobs:
- template: xplat-pipeline-job.yml
@@ -41,6 +42,9 @@ jobs:
- ${{ each variable in parameters.variables }}:
- ${{insert}}: ${{ variable }}
+ - name: diffType
+ value: ${{ parameters.diffType }}
+
- name: PythonScript
value: 'py -3'
- name: PipScript
@@ -49,11 +53,23 @@ jobs:
value: '$(Build.SourcesDirectory)\artifacts\spmi\'
- name: SpmiLogsLocation
value: '$(Build.SourcesDirectory)\artifacts\spmi_logs\'
- - name: SpmiAsmdiffsLocation
- value: '$(Build.SourcesDirectory)\artifacts\spmi_asmdiffs\'
+ - name: SpmiDiffsLocation
+ value: '$(Build.SourcesDirectory)\artifacts\spmi_diffs\'
- name: HelixResultLocation
value: '$(Build.SourcesDirectory)\artifacts\helixresults\'
+ - name: SetupScriptDirs
+ value: ''
+ - ${{ if eq(parameters.diffType, 'asmdiffs') }}:
+ - name: SetupScriptDirs
+ value: '-checked_directory $(buildProductRootFolderPath)'
+ - ${{ if eq(parameters.diffType, 'tpdiff') }}:
+ - name: SetupScriptDirs
+ value: '-release_directory $(releaseProductRootFolderPath)'
+ - ${{ if eq(parameters.diffType, 'all') }}:
+ - name: SetupScriptDirs
+ value: '-checked_directory $(buildProductRootFolderPath) -release_directory $(releaseProductRootFolderPath)'
+
workspace:
clean: all
pool:
@@ -65,11 +81,11 @@ jobs:
- script: |
mkdir $(SpmiCollectionLocation)
mkdir $(SpmiLogsLocation)
- mkdir $(SpmiAsmdiffsLocation)
+ mkdir $(SpmiDiffsLocation)
displayName: Create directories
- - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi_diffs_setup.py -source_directory $(Build.SourcesDirectory) -checked_directory $(buildProductRootFolderPath) -release_directory $(releaseProductRootFolderPath) -arch $(archType)
- displayName: ${{ format('SuperPMI diffs setup ({0})', parameters.archType) }}
+ - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi_diffs_setup.py -source_directory $(Build.SourcesDirectory) $(SetupScriptDirs) -type $(diffType) -arch $(archType)
+ displayName: ${{ format('SuperPMI diffs setup ({0} {1})', parameters.diffType, parameters.archType) }}
# Run superpmi-diffs.py script in helix
- template: /eng/pipelines/common/templates/runtimes/send-to-helix-step.yml
@@ -87,6 +103,7 @@ jobs:
BuildConfig: ${{ parameters.buildConfig }}
osGroup: ${{ parameters.osGroup }}
archType: ${{ parameters.archType }}
+ SuperPmiDiffType: ${{ parameters.diffType }}
# Always upload the available logs for diagnostics
- task: CopyFiles@2
@@ -102,34 +119,35 @@ jobs:
inputs:
sourceFolder: '$(HelixResultLocation)'
contents: '**/superpmi_*.md'
- targetFolder: '$(SpmiAsmdiffsLocation)'
+ targetFolder: '$(SpmiDiffsLocation)'
condition: always()
- - task: CopyFiles@2
- displayName: Copying dasm files of all partitions
- inputs:
- sourceFolder: '$(HelixResultLocation)'
- contents: '**/Asmdiffs_*.zip'
- targetFolder: '$(SpmiAsmdiffsLocation)'
- condition: always()
+ - ${{ if eq(parameters.diffType, 'asmdiffs') }}:
+ - task: CopyFiles@2
+ displayName: Copying dasm files of all partitions
+ inputs:
+ sourceFolder: '$(HelixResultLocation)'
+ contents: '**/Asmdiffs_*.zip'
+ targetFolder: '$(SpmiDiffsLocation)'
+ condition: always()
- - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi_diffs_summarize.py -diff_summary_dir $(SpmiAsmdiffsLocation) -arch $(archType)
- displayName: ${{ format('Summarize ({0})', parameters.archType) }}
+ - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi_diffs_summarize.py -diff_summary_dir $(SpmiDiffsLocation) -type $(diffType) -arch $(archType)
+ displayName: ${{ format('Summarize ({0} {1})', parameters.diffType, parameters.archType) }}
condition: always()
- task: PublishPipelineArtifact@1
displayName: Publish SuperPMI logs
inputs:
targetPath: $(SpmiLogsLocation)
- artifactName: 'SuperPMI_Logs_$(archType)_$(buildConfig)_Attempt$(System.JobAttempt)'
+ artifactName: 'SuperPMI_Logs_$(diffType)_$(archType)_$(buildConfig)_Attempt$(System.JobAttempt)'
condition: always()
continueOnError: true
- task: PublishPipelineArtifact@1
- displayName: Publish SuperPMI asmdiffs files
+ displayName: Publish SuperPMI diffs files
inputs:
- targetPath: $(SpmiAsmdiffsLocation)
- artifactName: 'SuperPMI_Asmdiffs_$(archType)_$(buildConfig)_Attempt$(System.JobAttempt)'
+ targetPath: $(SpmiDiffsLocation)
+ artifactName: 'SuperPMI_Diffs_$(diffType)_$(archType)_$(buildConfig)_Attempt$(System.JobAttempt)'
condition: always()
continueOnError: true
@@ -137,6 +155,6 @@ jobs:
displayName: Publish SuperPMI build logs
inputs:
targetPath: $(Build.SourcesDirectory)/artifacts/log
- artifactName: 'SuperPMI_BuildLogs_$(archType)_$(buildConfig)_Attempt$(System.JobAttempt)'
+ artifactName: 'SuperPMI_BuildLogs_$(diffType)_$(archType)_$(buildConfig)_Attempt$(System.JobAttempt)'
condition: always()
continueOnError: true
diff --git a/eng/pipelines/coreclr/templates/superpmi-diffs-job.yml b/eng/pipelines/coreclr/templates/superpmi-diffs-job.yml
index 01f048da1f0e32..828947888453c7 100644
--- a/eng/pipelines/coreclr/templates/superpmi-diffs-job.yml
+++ b/eng/pipelines/coreclr/templates/superpmi-diffs-job.yml
@@ -10,12 +10,13 @@ parameters:
helixQueues: ''
dependOnEvaluatePaths: false
runJobTemplate: '/eng/pipelines/coreclr/templates/run-superpmi-diffs-job.yml'
+ diffType: 'asmdiffs' # required -- 'asmdiffs', 'tpdiff', or 'all'
jobs:
- template: ${{ parameters.runJobTemplate }}
parameters:
- jobName: ${{ format('superpmi_diffs_{0}{1}_{2}', parameters.osGroup, parameters.osSubgroup, parameters.archType) }}
- displayName: ${{ format('SuperPMI diffs {0}{1} {2}', parameters.osGroup, parameters.osSubgroup, parameters.archType) }}
+ jobName: ${{ format('superpmi_diffs_{0}_{1}{2}_{3}', parameters.diffType, parameters.osGroup, parameters.osSubgroup, parameters.archType) }}
+ displayName: ${{ format('SuperPMI {0} {1}{2} {3}', parameters.diffType, parameters.osGroup, parameters.osSubgroup, parameters.archType) }}
pool: ${{ parameters.pool }}
buildConfig: ${{ parameters.buildConfig }}
archType: ${{ parameters.archType }}
@@ -25,33 +26,39 @@ jobs:
dependOnEvaluatePaths: ${{ parameters.dependOnEvaluatePaths }}
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
helixQueues: ${{ parameters.helixQueues }}
+ diffType: ${{ parameters.diffType }}
dependsOn:
- - ${{ format('coreclr_jit_build_{0}{1}_{2}_checked', parameters.osGroup, parameters.osSubgroup, parameters.archType) }}
- - ${{ format('coreclr_jit_build_{0}{1}_{2}_release', parameters.osGroup, parameters.osSubgroup, parameters.archType) }}
+ - ${{ if in(parameters.diffType, 'asmdiffs', 'all') }}:
+ - ${{ format('coreclr_jit_build_{0}{1}_{2}_checked', parameters.osGroup, parameters.osSubgroup, parameters.archType) }}
+ - ${{ if in(parameters.diffType, 'tpdiff', 'all') }}:
+ - ${{ format('coreclr_jit_build_{0}{1}_{2}_release', parameters.osGroup, parameters.osSubgroup, parameters.archType) }}
variables:
- ${{ each variable in parameters.variables }}:
- ${{insert}}: ${{ variable }}
- - name: releaseProductRootFolderPath
- value: '$(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).$(archType).Release'
- - name: releaseProductArtifactName
- value: 'CoreCLRProduct_${{ parameters.pgoType }}_${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_release'
+ - ${{ if in(parameters.diffType, 'tpdiff', 'all') }}:
+ - name: releaseProductRootFolderPath
+ value: '$(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).$(archType).Release'
+ - name: releaseProductArtifactName
+ value: 'CoreCLRProduct_${{ parameters.pgoType }}_${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_release'
steps:
- # Download jit checked builds
- - template: /eng/pipelines/common/download-artifact-step.yml
- parameters:
- unpackFolder: $(buildProductRootFolderPath)
- artifactFileName: '$(buildProductArtifactName)$(archiveExtension)'
- artifactName: '$(buildProductArtifactName)'
- displayName: 'JIT checked build'
+ - ${{ if in(parameters.diffType, 'asmdiffs', 'all') }}:
+ # Download jit checked builds
+ - template: /eng/pipelines/common/download-artifact-step.yml
+ parameters:
+ unpackFolder: $(buildProductRootFolderPath)
+ artifactFileName: '$(buildProductArtifactName)$(archiveExtension)'
+ artifactName: '$(buildProductArtifactName)'
+ displayName: 'JIT checked build'
- #Download jit release builds
- - template: /eng/pipelines/common/download-artifact-step.yml
- parameters:
- unpackFolder: $(releaseProductRootFolderPath)
- artifactFileName: '$(releaseProductArtifactName)$(archiveExtension)'
- artifactName: '$(releaseProductArtifactName)'
- displayName: 'JIT release build'
+ - ${{ if in(parameters.diffType, 'tpdiff', 'all') }}:
+ # Download jit release builds
+ - template: /eng/pipelines/common/download-artifact-step.yml
+ parameters:
+ unpackFolder: $(releaseProductRootFolderPath)
+ artifactFileName: '$(releaseProductArtifactName)$(archiveExtension)'
+ artifactName: '$(releaseProductArtifactName)'
+ displayName: 'JIT release build'
diff --git a/src/coreclr/scripts/superpmi-diffs.proj b/src/coreclr/scripts/superpmi-diffs.proj
index cef1cd97a04f66..d1f3e0d2ae2952 100644
--- a/src/coreclr/scripts/superpmi-diffs.proj
+++ b/src/coreclr/scripts/superpmi-diffs.proj
@@ -30,8 +30,6 @@
%HELIX_WORKITEM_UPLOAD_ROOT%
$(BUILD_SOURCESDIRECTORY)\artifacts\helixresults
- $(Python) $(ProductDirectory)\superpmi_diffs.py -base_jit_directory $(ProductDirectory)\base -diff_jit_directory $(ProductDirectory)\diff -log_directory $(SuperpmiLogsLocation)
- 2:00
@@ -45,6 +43,17 @@
$(_HelixType)
+
+
+ asmdiffs
+ $(_SuperPmiDiffType)
+
+
+
+ $(Python) $(ProductDirectory)\superpmi_diffs.py -type $(SuperPmiDiffType) -base_jit_directory $(ProductDirectory)\base -diff_jit_directory $(ProductDirectory)\diff -log_directory $(SuperpmiLogsLocation)
+ 2:00
+
+
%(Identity)
@@ -68,7 +77,9 @@
$(WorkItemCommand) -arch %(HelixWorkItem.Architecture) -platform %(HelixWorkItem.Platform)
$(WorkItemTimeout)
- superpmi_asmdiffs_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).log;superpmi_tpdiff_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).log;superpmi_download_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).log;superpmi_diff_summary_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).md;superpmi_tpdiff_summary_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).md;Asmdiffs_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).zip
+ superpmi_download_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).log;superpmi_asmdiffs_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).log;superpmi_asmdiffs_summary_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).md;Asmdiffs_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).zip
+ superpmi_download_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).log;superpmi_tpdiff_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).log;superpmi_tpdiff_summary_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).md
+ superpmi_download_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).log;superpmi_asmdiffs_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).log;superpmi_asmdiffs_summary_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).md;Asmdiffs_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).zip;superpmi_tpdiff_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).log;superpmi_tpdiff_summary_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).md
diff --git a/src/coreclr/scripts/superpmi.py b/src/coreclr/scripts/superpmi.py
index 07405e31e19abf..f3bda09bc3a6f0 100644
--- a/src/coreclr/scripts/superpmi.py
+++ b/src/coreclr/scripts/superpmi.py
@@ -4304,6 +4304,11 @@ def verify_base_diff_args():
lambda arch: arch == "x86" or arch == "x64",
"Throughput measurements not supported on platform {}".format(coreclr_args.arch))
+ coreclr_args.verify(determine_coredis_tools(coreclr_args),
+ "coredistools_location",
+ os.path.isfile,
+ "Unable to find coredistools.")
+
process_base_jit_path_arg(coreclr_args)
download_clrjit_pintool(coreclr_args)
diff --git a/src/coreclr/scripts/superpmi_diffs.py b/src/coreclr/scripts/superpmi_diffs.py
index dcf8dc26e47b27..155c89f171f048 100644
--- a/src/coreclr/scripts/superpmi_diffs.py
+++ b/src/coreclr/scripts/superpmi_diffs.py
@@ -21,6 +21,7 @@
parser = argparse.ArgumentParser(description="description")
parser.add_argument("-arch", help="Architecture")
+parser.add_argument("-type", help="Type of diff (asmdiffs, tpdiff, all)")
parser.add_argument("-platform", help="OS platform")
parser.add_argument("-base_jit_directory", help="path to the directory containing base clrjit binaries")
parser.add_argument("-diff_jit_directory", help="path to the directory containing diff clrjit binaries")
@@ -44,6 +45,11 @@ def setup_args(args):
lambda unused: True,
"Unable to set arch")
+ coreclr_args.verify(args,
+ "type",
+ lambda type: type in ["asmdiffs", "tpdiff", "all"],
+ "Invalid type \"{}\"".format)
+
coreclr_args.verify(args,
"platform",
lambda unused: True,
@@ -66,63 +72,246 @@ def setup_args(args):
return coreclr_args
-def copy_dasm_files(spmi_location, upload_directory, tag_name):
- """Copies .dasm files to a tempDirectory, zip it, and copy the compressed file to the upload directory.
-
- Args:
- spmi_location (string): Location where .dasm files are present
- upload_directory (string): Upload directory
- tag_name (string): tag_name used in zip file name.
+class Diff:
+ """ Class handling asmdiffs and tpdiff invocations
"""
- print("Copy .dasm files")
-
- # Create upload_directory
- if not os.path.isdir(upload_directory):
- os.makedirs(upload_directory)
-
- dasm_file_present = False
- # Create temp directory to copy all issues to upload. We don't want to create a sub-folder
- # inside issues_directory because that will also get included twice.
- with TempDir() as prep_directory:
- for file_path, dirs, files in walk(spmi_location, topdown=True):
- # Credit: https://stackoverflow.com/a/19859907
- dirs[:] = [d for d in dirs]
- for name in files:
- if not name.lower().endswith(".dasm"):
- continue
-
- dasm_src_file = path.join(file_path, name)
- dasm_dst_file = dasm_src_file.replace(spmi_location, prep_directory)
- dst_directory = path.dirname(dasm_dst_file)
- if not os.path.exists(dst_directory):
- os.makedirs(dst_directory)
- try:
- shutil.copy2(dasm_src_file, dasm_dst_file)
- dasm_file_present = True
- except PermissionError as pe_error:
- print('Ignoring PermissionError: {0}'.format(pe_error))
+ def __init__(self, coreclr_args):
+ """ Constructor
+
+ Args:
+ coreclr_args (CoreclrArguments) : parsed args
+ ...
+ """
+ self.coreclr_args = coreclr_args
+
+ self.python_path = sys.executable
+ self.script_dir = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
+
+ # It doesn't really matter where we put the downloaded SPMI artifacts.
+ # Here, they are put in /artifacts/spmi.
+ self.spmi_location = os.path.join(self.script_dir, "artifacts", "spmi")
+
+ self.log_directory = coreclr_args.log_directory
+ self.platform_name = coreclr_args.platform
+ self.arch_name = coreclr_args.arch
+
+ self.os_name = "win" if self.platform_name.lower() == "windows" else "unix"
+ self.host_arch_name = "x64" if self.arch_name.endswith("64") else "x86"
+ self.os_name = "universal" if self.arch_name.startswith("arm") else self.os_name
+
+ # Core_Root is where the superpmi tools (superpmi.exe, mcs.exe) are expected to be found.
+ # We pass the full path of the JITs to use as arguments.
+ self.core_root_dir = self.script_dir
+
+ # Assume everything succeeded. If any step fails, it will change this to True.
+ self.failed = False
+
+ # List of summary MarkDown files
+ self.summary_md_files = []
+
+
+ def download_mch(self):
+ """ Download MCH files for the diff
+ """
+ print("Running superpmi.py download to get MCH files")
+
+ log_file = os.path.join(self.log_directory, "superpmi_download_{}_{}.log".format(self.platform_name, self.arch_name))
+ run_command([
+ self.python_path,
+ os.path.join(self.script_dir, "superpmi.py"),
+ "download",
+ "--no_progress",
+ "-core_root", self.core_root_dir,
+ "-target_os", self.platform_name,
+ "-target_arch", self.arch_name,
+ "-spmi_location", self.spmi_location,
+ "-log_level", "debug",
+ "-log_file", log_file
+ ], _exit_on_fail=True)
+
+
+ def copy_dasm_files(self, upload_directory, tag_name):
+ """ Copies .dasm files to a tempDirectory, zip it, and copy the compressed file to the upload directory.
+
+ Args:
+ upload_directory (string): Upload directory
+ tag_name (string): tag_name used in zip file name.
+ """
+
+ print("Copy .dasm files")
+
+ # Create upload_directory
+ if not os.path.isdir(upload_directory):
+ os.makedirs(upload_directory)
+
+ dasm_file_present = False
+ # Create temp directory to copy all issues to upload. We don't want to create a sub-folder
+ # inside issues_directory because that will also get included twice.
+ with TempDir() as prep_directory:
+ for file_path, dirs, files in walk(self.spmi_location, topdown=True):
+ # Credit: https://stackoverflow.com/a/19859907
+ dirs[:] = [d for d in dirs]
+ for name in files:
+ if not name.lower().endswith(".dasm"):
+ continue
+
+ dasm_src_file = path.join(file_path, name)
+ dasm_dst_file = dasm_src_file.replace(self.spmi_location, prep_directory)
+ dst_directory = path.dirname(dasm_dst_file)
+ if not os.path.exists(dst_directory):
+ os.makedirs(dst_directory)
+ try:
+ shutil.copy2(dasm_src_file, dasm_dst_file)
+ dasm_file_present = True
+ except PermissionError as pe_error:
+ print('Ignoring PermissionError: {0}'.format(pe_error))
+
+ # If there are no diffs, create an zip file with single file in it.
+ # Otherwise, Azdo considers it as failed job.
+ # See https://github.com/dotnet/arcade/issues/8200
+ if not dasm_file_present:
+ no_diff = os.path.join(prep_directory, "nodiff.txt")
+ with open(no_diff, "w") as no_diff_file:
+ no_diff_file.write("No diffs found!")
+
+ # Zip compress the files we will upload
+ zip_path = os.path.join(prep_directory, "Asmdiffs_" + tag_name)
+ print("Creating archive: " + zip_path)
+ shutil.make_archive(zip_path, 'zip', prep_directory)
+
+ zip_path += ".zip"
+ dst_zip_path = os.path.join(upload_directory, "Asmdiffs_" + tag_name + ".zip")
+ print("Copying {} to {}".format(zip_path, dst_zip_path))
+ try:
+ shutil.copy2(zip_path, dst_zip_path)
+ except PermissionError as pe_error:
+ print('Ignoring PermissionError: {0}'.format(pe_error))
- # If there are no diffs, create an zip file with single file in it.
- # Otherwise, Azdo considers it as failed job.
- # See https://github.com/dotnet/arcade/issues/8200
- if not dasm_file_present:
- no_diff = os.path.join(prep_directory, "nodiff.txt")
- with open(no_diff, "w") as no_diff_file:
- no_diff_file.write("No diffs found!")
- # Zip compress the files we will upload
- zip_path = os.path.join(prep_directory, "Asmdiffs_" + tag_name)
- print("Creating archive: " + zip_path)
- shutil.make_archive(zip_path, 'zip', prep_directory)
+ def do_asmdiffs(self):
+ """ Run asmdiffs
+ """
- zip_path += ".zip"
- dst_zip_path = os.path.join(upload_directory, "Asmdiffs_" + tag_name + ".zip")
- print("Copying {} to {}".format(zip_path, dst_zip_path))
- try:
- shutil.copy2(zip_path, dst_zip_path)
- except PermissionError as pe_error:
- print('Ignoring PermissionError: {0}'.format(pe_error))
+ print("Running superpmi.py asmdiffs")
+
+ # Find the built jit-analyze and put its directory on the PATH
+ jit_analyze_dir = os.path.join(self.script_dir, "jit-analyze")
+ if not os.path.isdir(jit_analyze_dir):
+ print("Error: jit-analyze not found in {} (continuing)".format(jit_analyze_dir))
+ else:
+ # Put the jit-analyze directory on the PATH so superpmi.py can find it.
+ print("Adding {} to PATH".format(jit_analyze_dir))
+ os.environ["PATH"] = jit_analyze_dir + os.pathsep + os.environ["PATH"]
+
+ # Find the portable `git` installation, and put `git.exe` on the PATH, for use by `jit-analyze`.
+ git_directory = os.path.join(self.script_dir, "git", "cmd")
+ git_exe_tool = os.path.join(git_directory, "git.exe")
+ if not os.path.isfile(git_exe_tool):
+ print("Error: `git` not found at {} (continuing)".format(git_exe_tool))
+ else:
+ # Put the git/cmd directory on the PATH so jit-analyze can find it.
+ print("Adding {} to PATH".format(git_directory))
+ os.environ["PATH"] = git_directory + os.pathsep + os.environ["PATH"]
+
+ # Figure out which JITs to use
+ base_checked_jit_path = os.path.join(self.coreclr_args.base_jit_directory, "checked", 'clrjit_{}_{}_{}.dll'.format(self.os_name, self.arch_name, self.host_arch_name))
+ diff_checked_jit_path = os.path.join(self.coreclr_args.diff_jit_directory, "checked", 'clrjit_{}_{}_{}.dll'.format(self.os_name, self.arch_name, self.host_arch_name))
+
+ log_file = os.path.join(self.log_directory, "superpmi_asmdiffs_{}_{}.log".format(self.platform_name, self.arch_name))
+
+ # This is the summary file name and location written by superpmi.py. If the file exists, remove it to ensure superpmi.py doesn't created a numbered version.
+ overall_md_asmdiffs_summary_file = os.path.join(self.spmi_location, "diff_summary.md")
+ if os.path.isfile(overall_md_asmdiffs_summary_file):
+ os.remove(overall_md_asmdiffs_summary_file)
+
+ overall_md_asmdiffs_summary_file_target = os.path.join(self.log_directory, "superpmi_asmdiffs_summary_{}_{}.md".format(self.platform_name, self.arch_name))
+ self.summary_md_files.append((overall_md_asmdiffs_summary_file, overall_md_asmdiffs_summary_file_target))
+
+ _, _, return_code = run_command([
+ self.python_path,
+ os.path.join(self.script_dir, "superpmi.py"),
+ "asmdiffs",
+ "--no_progress",
+ "-core_root", self.core_root_dir,
+ "-target_os", self.platform_name,
+ "-target_arch", self.arch_name,
+ "-arch", self.host_arch_name,
+ "-base_jit_path", base_checked_jit_path,
+ "-diff_jit_path", diff_checked_jit_path,
+ "-spmi_location", self.spmi_location,
+ "-error_limit", "100",
+ "-log_level", "debug",
+ "-log_file", log_file])
+
+ if return_code != 0:
+ print("Failed during asmdiffs. Log file: {}".format(log_file))
+ self.failed = True
+
+ # Prepare .dasm files to upload to AzDO
+ self.copy_dasm_files(self.log_directory, "{}_{}".format(self.platform_name, self.arch_name))
+
+
+ def do_tpdiff(self):
+ """ Run tpdiff
+ """
+
+ print("Running superpmi.py tpdiff")
+
+ # Figure out which JITs to use
+ base_release_jit_path = os.path.join(self.coreclr_args.base_jit_directory, "release", 'clrjit_{}_{}_{}.dll'.format(self.os_name, self.arch_name, self.host_arch_name))
+ diff_release_jit_path = os.path.join(self.coreclr_args.diff_jit_directory, "release", 'clrjit_{}_{}_{}.dll'.format(self.os_name, self.arch_name, self.host_arch_name))
+
+ log_file = os.path.join(self.log_directory, "superpmi_tpdiff_{}_{}.log".format(self.platform_name, self.arch_name))
+
+ # This is the summary file name and location written by superpmi.py. If the file exists, remove it to ensure superpmi.py doesn't created a numbered version.
+ overall_md_tpdiff_summary_file = os.path.join(self.spmi_location, "tpdiff_summary.md")
+ if os.path.isfile(overall_md_tpdiff_summary_file):
+ os.remove(overall_md_tpdiff_summary_file)
+
+ overall_md_tpdiff_summary_file_target = os.path.join(self.log_directory, "superpmi_tpdiff_summary_{}_{}.md".format(self.platform_name, self.arch_name))
+ self.summary_md_files.append((overall_md_tpdiff_summary_file, overall_md_tpdiff_summary_file_target))
+
+ _, _, return_code = run_command([
+ self.python_path,
+ os.path.join(self.script_dir, "superpmi.py"),
+ "tpdiff",
+ "--no_progress",
+ "-core_root", self.core_root_dir,
+ "-target_os", self.platform_name,
+ "-target_arch", self.arch_name,
+ "-arch", self.host_arch_name,
+ "-base_jit_path", base_release_jit_path,
+ "-diff_jit_path", diff_release_jit_path,
+ "-spmi_location", self.spmi_location,
+ "-error_limit", "100",
+ "-log_level", "debug",
+ "-log_file", log_file])
+
+ if return_code != 0:
+ print("Failed during tpdiff. Log file: {}".format(log_file))
+ self.failed = True
+
+
+ def summarize(self):
+ """ Summarize the diffs
+ """
+ # If there are diffs, we'll get summary md files in the spmi_location directory.
+ # If there are no diffs, we still want to create this file and indicate there were no diffs.
+
+ for source, target in self.summary_md_files:
+ if os.path.isfile(source):
+ try:
+ print("Copying summary file {} -> {}".format(source, target))
+ shutil.copy2(source, target)
+ except PermissionError as pe_error:
+ print('Ignoring PermissionError: {0}'.format(pe_error))
+ else:
+ # Write a basic summary file. Ideally, we should not generate a summary.md file. However, Helix will report
+ # errors when the Helix work item fails to upload this specified file if it doesn't exist. We should change the
+ # upload to be conditional, or otherwise not error.
+ with open(target, "a") as f:
+ f.write("")
def main(main_args):
@@ -135,154 +324,31 @@ def main(main_args):
main_args ([type]): Arguments to the script
"""
- python_path = sys.executable
- script_dir = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
coreclr_args = setup_args(main_args)
- # It doesn't really matter where we put the downloaded SPMI artifacts.
- # Here, they are put in /artifacts/spmi.
- spmi_location = os.path.join(script_dir, "artifacts", "spmi")
-
- log_directory = coreclr_args.log_directory
- platform_name = coreclr_args.platform
-
- # Find the built jit-analyze and put its directory on the PATH
- jit_analyze_dir = os.path.join(script_dir, "jit-analyze")
- if not os.path.isdir(jit_analyze_dir):
- print("Error: jit-analyze not found in {} (continuing)".format(jit_analyze_dir))
- else:
- # Put the jit-analyze directory on the PATH so superpmi.py can find it.
- print("Adding {} to PATH".format(jit_analyze_dir))
- os.environ["PATH"] = jit_analyze_dir + os.pathsep + os.environ["PATH"]
-
- # Find the portable `git` installation, and put `git.exe` on the PATH, for use by `jit-analyze`.
- git_directory = os.path.join(script_dir, "git", "cmd")
- git_exe_tool = os.path.join(git_directory, "git.exe")
- if not os.path.isfile(git_exe_tool):
- print("Error: `git` not found at {} (continuing)".format(git_exe_tool))
- else:
- # Put the git/cmd directory on the PATH so jit-analyze can find it.
- print("Adding {} to PATH".format(git_directory))
- os.environ["PATH"] = git_directory + os.pathsep + os.environ["PATH"]
-
- # Figure out which JITs to use
- os_name = "win" if platform_name.lower() == "windows" else "unix"
- arch_name = coreclr_args.arch
- host_arch_name = "x64" if arch_name.endswith("64") else "x86"
- os_name = "universal" if arch_name.startswith("arm") else os_name
- base_checked_jit_path = os.path.join(coreclr_args.base_jit_directory, "checked", 'clrjit_{}_{}_{}.dll'.format(os_name, arch_name, host_arch_name))
- diff_checked_jit_path = os.path.join(coreclr_args.diff_jit_directory, "checked", 'clrjit_{}_{}_{}.dll'.format(os_name, arch_name, host_arch_name))
- base_release_jit_path = os.path.join(coreclr_args.base_jit_directory, "release", 'clrjit_{}_{}_{}.dll'.format(os_name, arch_name, host_arch_name))
- diff_release_jit_path = os.path.join(coreclr_args.diff_jit_directory, "release", 'clrjit_{}_{}_{}.dll'.format(os_name, arch_name, host_arch_name))
-
- # Core_Root is where the superpmi tools (superpmi.exe, mcs.exe) are expected to be found.
- # We pass the full path of the JITs to use as arguments.
- core_root_dir = script_dir
-
- print("Running superpmi.py download to get MCH files")
-
- log_file = os.path.join(log_directory, "superpmi_download_{}_{}.log".format(platform_name, arch_name))
- run_command([
- python_path,
- os.path.join(script_dir, "superpmi.py"),
- "download",
- "--no_progress",
- "-core_root", core_root_dir,
- "-target_os", platform_name,
- "-target_arch", arch_name,
- "-spmi_location", spmi_location,
- "-log_level", "debug",
- "-log_file", log_file
- ], _exit_on_fail=True)
-
- print("Running superpmi.py asmdiffs")
-
- log_file = os.path.join(log_directory, "superpmi_asmdiffs_{}_{}.log".format(platform_name, arch_name))
-
- overall_md_asmdiffs_summary_file = os.path.join(spmi_location, "diff_summary.md")
- if os.path.isfile(overall_md_asmdiffs_summary_file):
- os.remove(overall_md_asmdiffs_summary_file)
-
- _, _, return_code = run_command([
- python_path,
- os.path.join(script_dir, "superpmi.py"),
- "asmdiffs",
- "--no_progress",
- "-core_root", core_root_dir,
- "-target_os", platform_name,
- "-target_arch", arch_name,
- "-arch", host_arch_name,
- "-base_jit_path", base_checked_jit_path,
- "-diff_jit_path", diff_checked_jit_path,
- "-spmi_location", spmi_location,
- "-error_limit", "100",
- "-log_level", "debug",
- "-log_file", log_file])
-
- failed = False
- if return_code != 0:
- print("Failed during asmdiffs")
- failed = True
-
- print("Running superpmi.py tpdiff")
-
- log_file = os.path.join(log_directory, "superpmi_tpdiff_{}_{}.log".format(platform_name, arch_name))
-
- overall_md_tpdiff_summary_file = os.path.join(spmi_location, "tpdiff_summary.md")
- if os.path.isfile(overall_md_tpdiff_summary_file):
- os.remove(overall_md_tpdiff_summary_file)
-
- _, _, return_code = run_command([
- python_path,
- os.path.join(script_dir, "superpmi.py"),
- "tpdiff",
- "--no_progress",
- "-core_root", core_root_dir,
- "-target_os", platform_name,
- "-target_arch", arch_name,
- "-arch", host_arch_name,
- "-base_jit_path", base_release_jit_path,
- "-diff_jit_path", diff_release_jit_path,
- "-spmi_location", spmi_location,
- "-error_limit", "100",
- "-log_level", "debug",
- "-log_file", log_file])
-
- if return_code != 0:
- print("Failed during tpdiff")
- failed = True
-
- # If there are asm diffs, and jit-analyze ran, we'll get a diff_summary.md file in the spmi_location directory.
- # We make sure the file doesn't exist before we run diffs, so we don't need to worry about superpmi.py creating
- # a unique, numbered file. If there are no diffs, we still want to create this file and indicate there were no diffs.
-
- md_files = []
-
- overall_md_asmdiff_summary_file_target = os.path.join(log_directory, "superpmi_diff_summary_{}_{}.md".format(platform_name, arch_name))
- md_files.append((overall_md_asmdiffs_summary_file, overall_md_asmdiff_summary_file_target))
-
- overall_md_tpdiff_summary_file_target = os.path.join(log_directory, "superpmi_tpdiff_summary_{}_{}.md".format(platform_name, arch_name))
- md_files.append((overall_md_tpdiff_summary_file, overall_md_tpdiff_summary_file_target))
-
- for source, target in md_files:
- if os.path.isfile(source):
- try:
- print("Copying summary file {} -> {}".format(source, target))
- shutil.copy2(source, target)
- except PermissionError as pe_error:
- print('Ignoring PermissionError: {0}'.format(pe_error))
- else:
- # Write a basic summary file. Ideally, we should not generate a summary.md file. However, currently I'm seeing
- # errors where the Helix work item fails to upload this specified file if it doesn't exist. We should change the
- # upload to be conditional, or otherwise not error.
- with open(target, "a") as f:
- f.write("")
+ do_asmdiffs = False
+ do_tpdiff = False
+ if coreclr_args.type == 'asmdiffs':
+ do_asmdiffs = True
+ if coreclr_args.type == 'tpdiff':
+ do_tpdiff = True
+ if coreclr_args.type == 'all':
+ do_asmdiffs = True
+ do_tpdiff = True
+
+ diff = Diff(coreclr_args)
+
+ diff.download_mch()
+
+ if do_asmdiffs:
+ diff.do_asmdiffs()
+ if do_tpdiff:
+ diff.do_tpdiff()
- # Finally prepare files to upload from helix.
- copy_dasm_files(spmi_location, log_directory, "{}_{}".format(platform_name, arch_name))
+ diff.summarize()
- if failed:
- print("Failure in {}".format(log_file))
+ if diff.failed:
+ print("Failure")
return 1
return 0
diff --git a/src/coreclr/scripts/superpmi_diffs_setup.py b/src/coreclr/scripts/superpmi_diffs_setup.py
index 452444f2bf3ab1..9fbeb93143cb29 100644
--- a/src/coreclr/scripts/superpmi_diffs_setup.py
+++ b/src/coreclr/scripts/superpmi_diffs_setup.py
@@ -28,6 +28,7 @@
parser = argparse.ArgumentParser(description="description")
parser.add_argument("-arch", help="Architecture")
+parser.add_argument("-type", help="Type of diff (asmdiffs, tpdiff, all)")
parser.add_argument("-source_directory", help="Path to the root directory of the dotnet/runtime source tree")
parser.add_argument("-checked_directory", help="Path to the directory containing built checked binaries (e.g., /artifacts/bin/coreclr/windows.x64.Checked)")
parser.add_argument("-release_directory", help="Path to the directory containing built release binaries (e.g., /artifacts/bin/coreclr/windows.x64.Release)")
@@ -53,6 +54,11 @@ def setup_args(args):
lambda unused: True,
"Unable to set arch")
+ coreclr_args.verify(args,
+ "type",
+ lambda type: type in ["asmdiffs", "tpdiff", "all"],
+ "Invalid type \"{}\"".format)
+
coreclr_args.verify(args,
"source_directory",
os.path.isdir,
@@ -60,13 +66,40 @@ def setup_args(args):
coreclr_args.verify(args,
"checked_directory",
- os.path.isdir,
- "checked_directory doesn't exist")
+ lambda unused: True,
+ "Unable to set checked_directory")
coreclr_args.verify(args,
"release_directory",
- os.path.isdir,
- "release_directory doesn't exist")
+ lambda unused: True,
+ "Unable to set release_directory")
+
+ do_asmdiffs = False
+ do_tpdiff = False
+ if coreclr_args.type == 'asmdiffs':
+ do_asmdiffs = True
+ if coreclr_args.type == 'tpdiff':
+ do_tpdiff = True
+ if coreclr_args.type == 'all':
+ do_asmdiffs = True
+ do_tpdiff = True
+
+ use_checked = False
+ use_release = False
+ if do_asmdiffs:
+ use_checked = True
+ if do_tpdiff:
+ use_release = True
+
+ if use_checked:
+ if not os.path.isdir(coreclr_args.checked_directory):
+ print("checked_directory doesn't exist")
+ sys.exit(1)
+
+ if use_release:
+ if not os.path.isdir(coreclr_args.release_directory):
+ print("release_directory doesn't exist")
+ sys.exit(1)
return coreclr_args
@@ -96,6 +129,60 @@ def match_superpmi_tool_files(full_path):
return False
+
+def build_jit_analyze(coreclr_args, source_directory, jit_analyze_build_directory):
+ """ Build and publish jit-analyze for use by asmdiffs
+ """
+ try:
+ with TempDir() as jitutils_directory:
+ run_command(
+ ["git", "clone", "--quiet", "--depth", "1", "https://github.com/dotnet/jitutils", jitutils_directory])
+
+ # Make sure ".dotnet" directory exists, by running the script at least once
+ dotnet_script_name = "dotnet.cmd" if is_windows else "dotnet.sh"
+ dotnet_script_path = os.path.join(source_directory, dotnet_script_name)
+ run_command([dotnet_script_path, "--info"], jitutils_directory)
+
+ # Build jit-analyze only, and build it as a self-contained app (not framework-dependent).
+ # What target RID are we building? It depends on where we're going to run this code.
+ # The RID catalog is here: https://docs.microsoft.com/en-us/dotnet/core/rid-catalog.
+ # Windows x64 => win-x64
+ # Windows x86 => win-x86
+ # Windows arm32 => win-arm
+ # Windows arm64 => win-arm64
+ # Linux x64 => linux-x64
+ # Linux arm32 => linux-arm
+ # Linux arm64 => linux-arm64
+ # macOS x64 => osx-x64
+
+ # NOTE: we currently only support running on Windows x86/x64 (we don't pass the target OS)
+ RID = None
+ if coreclr_args.arch == "x86":
+ RID = "win-x86"
+ if coreclr_args.arch == "x64":
+ RID = "win-x64"
+
+ # Set dotnet path to run build
+ os.environ["PATH"] = os.path.join(source_directory, ".dotnet") + os.pathsep + os.environ["PATH"]
+
+ run_command([
+ "dotnet",
+ "publish",
+ "-c", "Release",
+ "--runtime", RID,
+ "--self-contained",
+ "--output", jit_analyze_build_directory,
+ os.path.join(jitutils_directory, "src", "jit-analyze", "jit-analyze.csproj")],
+ jitutils_directory)
+ except PermissionError as pe_error:
+ # Details: https://bugs.python.org/issue26660
+ print('Ignoring PermissionError: {0}'.format(pe_error))
+
+ jit_analyze_tool = os.path.join(jit_analyze_build_directory, "jit-analyze.exe")
+ if not os.path.isfile(jit_analyze_tool):
+ print('Error: {} not found'.format(jit_analyze_tool))
+ return 1
+
def main(main_args):
""" Prepare the Helix data for SuperPMI diffs Azure DevOps pipeline.
@@ -109,17 +196,22 @@ def main(main_args):
-- contains the baseline JITs (under checked and release folders)
\payload\diff
-- contains the diff JITs (under checked and release folders)
- \payload\jit-analyze
- -- contains the self-contained jit-analyze build (from dotnet/jitutils)
- \payload\git
- -- contains a Portable ("xcopy installable") `git` tool, downloaded from:
- https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/git/Git-2.32.0-64-bit.zip
- This is needed by jit-analyze to do `git diff` on the generated asm. The `\payload\git\cmd`
- directory is added to the PATH.
- NOTE: this only runs on Windows.
+ For `type == asmdiffs`:
+ \payload\jit-analyze
+ -- contains the self-contained jit-analyze build (from dotnet/jitutils)
+ \payload\git
+ -- contains a Portable ("xcopy installable") `git` tool, downloaded from:
+ https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/git/Git-2.32.0-64-bit.zip
+ This is needed by jit-analyze to do `git diff` on the generated asm. The `\payload\git\cmd`
+ directory is added to the PATH.
+ NOTE: this only runs on Windows.
Then, AzDO pipeline variables are set.
+ Note:
+ 1. asmdiffs uses Checked JITs, tpdiff uses Release JITs. Only the one needed is copied to the payload directory.
+ 2. Only asmdiffs needs jit-analyze and git
+
Args:
main_args ([type]): Arguments to the script
@@ -143,31 +235,54 @@ def main(main_args):
python_path = sys.executable
+ do_asmdiffs = False
+ do_tpdiff = False
+ if coreclr_args.type == 'asmdiffs':
+ do_asmdiffs = True
+ if coreclr_args.type == 'tpdiff':
+ do_tpdiff = True
+ if coreclr_args.type == 'all':
+ do_asmdiffs = True
+ do_tpdiff = True
+
+ use_checked = False
+ use_release = False
+ if do_asmdiffs:
+ use_checked = True
+ if do_tpdiff:
+ use_release = True
+
# CorrelationPayload directories
correlation_payload_directory = os.path.join(source_directory, "payload")
superpmi_scripts_directory = os.path.join(source_directory, 'src', 'coreclr', 'scripts')
base_jit_directory = os.path.join(correlation_payload_directory, "base")
- base_jit_checked_directory = os.path.join(base_jit_directory, "checked")
- base_jit_release_directory = os.path.join(base_jit_directory, "release")
diff_jit_directory = os.path.join(correlation_payload_directory, "diff")
- diff_jit_checked_directory = os.path.join(diff_jit_directory, "checked")
- diff_jit_release_directory = os.path.join(diff_jit_directory, "release")
- jit_analyze_build_directory = os.path.join(correlation_payload_directory, "jit-analyze")
- git_directory = os.path.join(correlation_payload_directory, "git")
+
+ if use_checked:
+ base_jit_checked_directory = os.path.join(base_jit_directory, "checked")
+ diff_jit_checked_directory = os.path.join(diff_jit_directory, "checked")
+ if use_release:
+ base_jit_release_directory = os.path.join(base_jit_directory, "release")
+ diff_jit_release_directory = os.path.join(diff_jit_directory, "release")
+
+ if do_asmdiffs:
+ jit_analyze_build_directory = os.path.join(correlation_payload_directory, "jit-analyze")
+ git_directory = os.path.join(correlation_payload_directory, "git")
######## Get the portable `git` package
- git_url = "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/git/Git-2.32.0-64-bit.zip"
+ if do_asmdiffs:
+ git_url = "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/git/Git-2.32.0-64-bit.zip"
- print('Downloading {} -> {}'.format(git_url, git_directory))
+ print('Downloading {} -> {}'.format(git_url, git_directory))
- urls = [ git_url ]
- # There are too many files to be verbose in the download and copy.
- download_files(urls, git_directory, verbose=False, display_progress=False)
- git_exe_tool = os.path.join(git_directory, "cmd", "git.exe")
- if not os.path.isfile(git_exe_tool):
- print('Error: `git` not found at {}'.format(git_exe_tool))
- return 1
+ urls = [ git_url ]
+ # There are too many files to be verbose in the download and copy.
+ download_files(urls, git_directory, verbose=False, display_progress=False)
+ git_exe_tool = os.path.join(git_directory, "cmd", "git.exe")
+ if not os.path.isfile(git_exe_tool):
+ print('Error: `git` not found at {}'.format(git_exe_tool))
+ return 1
######## Get SuperPMI python scripts
@@ -178,112 +293,75 @@ def main(main_args):
######## Get baseline JITs
- # Figure out which baseline checked JIT to use, and download it.
- if not os.path.exists(base_jit_checked_directory):
- os.makedirs(base_jit_checked_directory)
+ # Note: we only support downloading Windows versions of the JIT currently. To support downloading
+ # non-Windows JITs on a Windows machine, pass `-host_os ` to jitrollingbuild.py.
print("Fetching history of `main` branch so we can find the baseline JIT")
run_command(["git", "fetch", "--depth=500", "origin", "main"], source_directory, _exit_on_fail=True)
- # Note: we only support downloading Windows versions of the JIT currently. To support downloading
- # non-Windows JITs on a Windows machine, pass `-host_os ` to jitrollingbuild.py.
- print("Running jitrollingbuild.py download to get baseline checked JIT")
- jit_rolling_build_script = os.path.join(superpmi_scripts_directory, "jitrollingbuild.py")
- _, _, return_code = run_command([
- python_path,
- jit_rolling_build_script,
- "download",
- "-arch", arch,
- "-build_type", "checked",
- "-target_dir", base_jit_checked_directory],
- source_directory)
- if return_code != 0:
- print('{} failed with {}'.format(jit_rolling_build_script, return_code))
- return return_code
-
- # Figure out which baseline release JIT to use, and download it.
- if not os.path.exists(base_jit_release_directory):
- os.makedirs(base_jit_release_directory)
-
- print("Running jitrollingbuild.py download to get baseline release JIT")
- jit_rolling_build_script = os.path.join(superpmi_scripts_directory, "jitrollingbuild.py")
- _, _, return_code = run_command([
- python_path,
- jit_rolling_build_script,
- "download",
- "-arch", arch,
- "-build_type", "release",
- "-target_dir", base_jit_release_directory],
- source_directory)
- if return_code != 0:
- print('{} failed with {}'.format(jit_rolling_build_script, return_code))
- return return_code
+ if use_checked:
+ if not os.path.exists(base_jit_checked_directory):
+ os.makedirs(base_jit_checked_directory)
+
+ print("Running jitrollingbuild.py download to get baseline checked JIT")
+ jit_rolling_build_script = os.path.join(superpmi_scripts_directory, "jitrollingbuild.py")
+ _, _, return_code = run_command([
+ python_path,
+ jit_rolling_build_script,
+ "download",
+ "-arch", arch,
+ "-build_type", "checked",
+ "-target_dir", base_jit_checked_directory],
+ source_directory)
+ if return_code != 0:
+ print('{} failed with {}'.format(jit_rolling_build_script, return_code))
+ return return_code
+
+ if use_release:
+ if not os.path.exists(base_jit_release_directory):
+ os.makedirs(base_jit_release_directory)
+
+ print("Running jitrollingbuild.py download to get baseline release JIT")
+ jit_rolling_build_script = os.path.join(superpmi_scripts_directory, "jitrollingbuild.py")
+ _, _, return_code = run_command([
+ python_path,
+ jit_rolling_build_script,
+ "download",
+ "-arch", arch,
+ "-build_type", "release",
+ "-target_dir", base_jit_release_directory],
+ source_directory)
+ if return_code != 0:
+ print('{} failed with {}'.format(jit_rolling_build_script, return_code))
+ return return_code
######## Get diff JITs
- print('Copying checked diff binaries {} -> {}'.format(checked_directory, diff_jit_checked_directory))
- copy_directory(checked_directory, diff_jit_checked_directory, verbose_copy=True, match_func=match_jit_files)
+ if use_checked:
+ print('Copying checked diff binaries {} -> {}'.format(checked_directory, diff_jit_checked_directory))
+ copy_directory(checked_directory, diff_jit_checked_directory, verbose_copy=True, match_func=match_jit_files)
- print('Copying release diff binaries {} -> {}'.format(release_directory, diff_jit_release_directory))
- copy_directory(release_directory, diff_jit_release_directory, verbose_copy=True, match_func=match_jit_files)
+ if use_release:
+ print('Copying release diff binaries {} -> {}'.format(release_directory, diff_jit_release_directory))
+ copy_directory(release_directory, diff_jit_release_directory, verbose_copy=True, match_func=match_jit_files)
######## Get SuperPMI tools
# Put the SuperPMI tools directly in the root of the correlation payload directory.
- print('Copying SuperPMI tools {} -> {}'.format(checked_directory, correlation_payload_directory))
- copy_directory(checked_directory, correlation_payload_directory, verbose_copy=True, match_func=match_superpmi_tool_files)
-
- ######## Clone and build jitutils: we only need jit-analyze
+ # If both use_checked and use_release are set, use the checked SuperPMI tools.
- try:
- with TempDir() as jitutils_directory:
- run_command(
- ["git", "clone", "--quiet", "--depth", "1", "https://github.com/dotnet/jitutils", jitutils_directory])
+ if use_checked:
+ print('Copying SuperPMI tools {} -> {}'.format(checked_directory, correlation_payload_directory))
+ copy_directory(checked_directory, correlation_payload_directory, verbose_copy=True, match_func=match_superpmi_tool_files)
- # Make sure ".dotnet" directory exists, by running the script at least once
- dotnet_script_name = "dotnet.cmd" if is_windows else "dotnet.sh"
- dotnet_script_path = os.path.join(source_directory, dotnet_script_name)
- run_command([dotnet_script_path, "--info"], jitutils_directory)
+ elif use_release:
+ print('Copying SuperPMI tools {} -> {}'.format(release_directory, correlation_payload_directory))
+ copy_directory(release_directory, correlation_payload_directory, verbose_copy=True, match_func=match_superpmi_tool_files)
- # Build jit-analyze only, and build it as a self-contained app (not framework-dependent).
- # What target RID are we building? It depends on where we're going to run this code.
- # The RID catalog is here: https://docs.microsoft.com/en-us/dotnet/core/rid-catalog.
- # Windows x64 => win-x64
- # Windows x86 => win-x86
- # Windows arm32 => win-arm
- # Windows arm64 => win-arm64
- # Linux x64 => linux-x64
- # Linux arm32 => linux-arm
- # Linux arm64 => linux-arm64
- # macOS x64 => osx-x64
-
- # NOTE: we currently only support running on Windows x86/x64 (we don't pass the target OS)
- RID = None
- if arch == "x86":
- RID = "win-x86"
- if arch == "x64":
- RID = "win-x64"
-
- # Set dotnet path to run build
- os.environ["PATH"] = os.path.join(source_directory, ".dotnet") + os.pathsep + os.environ["PATH"]
-
- run_command([
- "dotnet",
- "publish",
- "-c", "Release",
- "--runtime", RID,
- "--self-contained",
- "--output", jit_analyze_build_directory,
- os.path.join(jitutils_directory, "src", "jit-analyze", "jit-analyze.csproj")],
- jitutils_directory)
- except PermissionError as pe_error:
- # Details: https://bugs.python.org/issue26660
- print('Ignoring PermissionError: {0}'.format(pe_error))
+ ######## Clone and build jitutils: we only need jit-analyze
- jit_analyze_tool = os.path.join(jit_analyze_build_directory, "jit-analyze.exe")
- if not os.path.isfile(jit_analyze_tool):
- print('Error: {} not found'.format(jit_analyze_tool))
- return 1
+ if do_asmdiffs:
+ build_jit_analyze(coreclr_args, source_directory, jit_analyze_build_directory)
######## Set pipeline variables
diff --git a/src/coreclr/scripts/superpmi_diffs_summarize.py b/src/coreclr/scripts/superpmi_diffs_summarize.py
index d692cb48ce4568..0d4c80e1077127 100644
--- a/src/coreclr/scripts/superpmi_diffs_summarize.py
+++ b/src/coreclr/scripts/superpmi_diffs_summarize.py
@@ -22,6 +22,7 @@
parser.add_argument("-diff_summary_dir", help="Path to diff summary directory")
parser.add_argument("-arch", help="Architecture")
+parser.add_argument("-type", help="Type of diff (asmdiffs, tpdiff, all)")
def setup_args(args):
""" Setup the args.
@@ -46,10 +47,15 @@ def setup_args(args):
lambda unused: True,
"Unable to set arch")
+ coreclr_args.verify(args,
+ "type",
+ lambda type: type in ["asmdiffs", "tpdiff", "all"],
+ "Invalid type \"{}\"".format)
+
return coreclr_args
-def append_diff_file(f, arch, file_name, full_file_path, asmdiffs):
+def append_diff_file(f, arch, file_name, full_file_path):
""" Append a single summary file to the consolidated diff file.
Args:
@@ -57,7 +63,6 @@ def append_diff_file(f, arch, file_name, full_file_path, asmdiffs):
arch (string): architecture we ran on
file_name (string): base file name of file to append (not including path components)
full_file_path (string): full path to file to append
- asmdiffs (bool): whether this is asm diffs
Returns:
True if diffs were found in the file, False otherwise
@@ -67,11 +72,12 @@ def append_diff_file(f, arch, file_name, full_file_path, asmdiffs):
print("Appending {}".format(full_file_path))
# What platform is this file summarizing? We parse the filename itself, which is of the form:
- # superpmi_diff_summary__.md
+ # superpmi_asmdiffs_summary__.md
+ # superpmi_tpdiff_summary__.md
diff_os = "unknown"
diff_arch = "unknown"
- match_obj = re.search(r'^superpmi_(tpdiff|diff)_summary_(.*)_(.*).md', file_name)
+ match_obj = re.search(r'^superpmi_(tpdiff|asmdiffs)_summary_(.*)_(.*).md', file_name)
if match_obj is not None:
diff_os = match_obj.group(2)
diff_arch = match_obj.group(3)
@@ -79,7 +85,7 @@ def append_diff_file(f, arch, file_name, full_file_path, asmdiffs):
with open(full_file_path, "r") as current_superpmi_md:
contents = current_superpmi_md.read()
- # Were there actually any asm diffs? We currently look to see if the file contains the text "No diffs found",
+ # Were there actually any asm diffs? We currently look to see if the file contains the text "",
# inserted by `superpmi_diffs.py`, instead of just not having a diff summary .md file.
# (A missing file has the same effect.)
match_obj = re.search(r'^', contents)
@@ -107,7 +113,18 @@ def main(main_args):
diff_summary_dir = coreclr_args.diff_summary_dir
arch = coreclr_args.arch
- # Consolidate all superpmi_diff_summary_*.md in overall_diff_summary__.md
+ do_asmdiffs = False
+ do_tpdiff = False
+ if coreclr_args.type == 'asmdiffs':
+ do_asmdiffs = True
+ if coreclr_args.type == 'tpdiff':
+ do_tpdiff = True
+ if coreclr_args.type == 'all':
+ do_asmdiffs = True
+ do_tpdiff = True
+
+ # Consolidate all superpmi_asmdiffs_summary_*.md and superpmi_tpdiff_summary_*.md
+ # into overall__summary__.md.
# (Don't name it "superpmi_xxx.md" or we might consolidate it into itself.)
# If there are no summary files found, add a "No diffs found" text to be explicit about that.
#
@@ -116,36 +133,40 @@ def main(main_args):
# We should create a job that depends on all the diff jobs, downloads all the .md file artifacts,
# and consolidates everything together in one file.
- any_asmdiffs_found = False
-
- final_md_path = os.path.join(diff_summary_dir, "overall_diff_summary_windows_{}.md".format(arch))
+ final_md_path = os.path.join(diff_summary_dir, "overall_{}_summary_windows_{}.md".format(coreclr_args.type, arch))
print("Consolidating final {}".format(final_md_path))
with open(final_md_path, "a") as f:
- f.write("""\
-# ASM diffs generated on Windows {}
-""".format(arch))
-
- for dirpath, _, files in os.walk(diff_summary_dir):
- for file_name in files:
- if file_name.startswith("superpmi_diff") and file_name.endswith(".md"):
- full_file_path = os.path.join(dirpath, file_name)
- if append_diff_file(f, arch, file_name, full_file_path, True):
- any_asmdiffs_found = True
-
- if not any_asmdiffs_found:
- f.write("No diffs found\n")
-
- f.write("\n\n#Throughput impact on Windows {}\n\n".format(arch))
- f.write("The following shows the impact on throughput " +
- "in terms of number of instructions executed inside the JIT. " +
- "Negative percentages/lower numbers are better.\n\n")
-
- for dirpath, _, files in os.walk(diff_summary_dir):
- for file_name in files:
- if file_name.startswith("superpmi_tpdiff") and file_name.endswith(".md"):
- full_file_path = os.path.join(dirpath, file_name)
- append_diff_file(f, arch, file_name, full_file_path, False)
+ if do_asmdiffs:
+ f.write("# ASM diffs generated on Windows {}\n\n".format(arch))
+
+ any_asmdiffs_found = False
+ for dirpath, _, files in os.walk(diff_summary_dir):
+ for file_name in files:
+ if file_name.startswith("superpmi_asmdiffs") and file_name.endswith(".md"):
+ full_file_path = os.path.join(dirpath, file_name)
+ if append_diff_file(f, arch, file_name, full_file_path):
+ any_asmdiffs_found = True
+
+ if not any_asmdiffs_found:
+ f.write("No asmdiffs found\n")
+
+ if do_tpdiff:
+ f.write("# Throughput impact on Windows {}\n\n".format(arch))
+ f.write("The following shows the impact on throughput " +
+ "in terms of number of instructions executed inside the JIT. " +
+ "Negative percentages/lower numbers are better.\n\n")
+
+ any_tpdiff_found = False
+ for dirpath, _, files in os.walk(diff_summary_dir):
+ for file_name in files:
+ if file_name.startswith("superpmi_tpdiff") and file_name.endswith(".md"):
+ full_file_path = os.path.join(dirpath, file_name)
+ if append_diff_file(f, arch, file_name, full_file_path):
+ any_tpdiff_found = True
+
+ if not any_tpdiff_found:
+ f.write("No throughput diffs found\n")
with open(final_md_path, "r") as f:
print(f.read())