mirror of
https://github.com/saymrwulf/onnxruntime.git
synced 2026-05-14 20:48:00 +00:00
Add helper to run CIs for a branch using az pipelines. (#16843)
### Description <!-- Describe your changes. --> Add helper to run CIs for a branch using `az pipelines`. This can be used to easily kick off multiple CIs for a branch prior to creating a PR. Update run_CIs_for_external_pr.py so the CI list can be shared. Request json output from `gh pr view` so the current state is more easily parsed. ### Motivation and Context <!-- - Why is this change required? What problem does it solve? - If it fixes an open issue, please link to the issue here. -->
This commit is contained in:
parent
46c4d7fe4a
commit
c12a20bef9
2 changed files with 185 additions and 59 deletions
116
tools/python/run_CIs_for_branch.py
Normal file
116
tools/python/run_CIs_for_branch.py
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import typing
|
||||
|
||||
from run_CIs_for_external_pr import get_pipeline_names
|
||||
from util.platform_helpers import is_windows
|
||||
|
||||
|
||||
def _parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
os.path.basename(__file__),
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
description="""Run the CIs used to validate PRs for the specified branch.
|
||||
|
||||
If specified, the `--include` filter is applied first, followed by any `--exclude` filter.
|
||||
|
||||
Requires the Azure CLI with DevOps extension to be installed.
|
||||
Azure CLI: https://learn.microsoft.com/en-us/cli/azure/install-azure-cli
|
||||
DevOps extension: https://github.com/Azure/azure-devops-cli-extension
|
||||
|
||||
Configuration:
|
||||
Login:`az login`
|
||||
Configure ORT repo as default:
|
||||
`az devops configure --defaults organization=https://dev.azure.com/onnxruntime project=onnxruntime`
|
||||
|
||||
Example usage:
|
||||
List all CIs
|
||||
`python run_CIs_for_branch.py --dry-run my/BranchName`
|
||||
Run all CIs
|
||||
`python run_CIs_for_branch.py my/BranchName`
|
||||
Run only Linux CIs
|
||||
`python run_CIs_for_branch.py --include linux my/BranchName`
|
||||
Exclude training CIs
|
||||
`python run_CIs_for_branch.py --exclude training my/BranchName`
|
||||
Run non-training Linux CIs
|
||||
`python run_CIs_for_branch.py --include linux --exclude training my/BranchName`
|
||||
""",
|
||||
)
|
||||
|
||||
parser.add_argument("-i", "--include", type=str, help="Include CIs that match this string. Case insensitive.")
|
||||
parser.add_argument("-e", "--exclude", type=str, help="Exclude CIs that match this string. Case insensitive.")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Print selected CIs but do not run them.")
|
||||
parser.add_argument("branch", type=str, help="Specify the branch to run.")
|
||||
|
||||
args = parser.parse_args()
|
||||
return args
|
||||
|
||||
|
||||
def _run_az_pipelines_command(command: typing.List[str]):
|
||||
try:
|
||||
az = "az.cmd" if is_windows() else "az"
|
||||
az_output = subprocess.run([az, "pipelines", *command], capture_output=True, text=True, check=True)
|
||||
except subprocess.CalledProcessError as cpe:
|
||||
print(cpe)
|
||||
print(cpe.stderr)
|
||||
sys.exit(-1)
|
||||
|
||||
return az_output
|
||||
|
||||
|
||||
def main():
|
||||
args = _parse_args()
|
||||
branch = args.branch
|
||||
|
||||
# To debug available pipelines:
|
||||
# az_out = az_pipelines = _run_az_pipelines_command(["list"])
|
||||
# pipeline_info = json.loads(az_out.stdout)
|
||||
# print(pipeline_info)
|
||||
|
||||
pipelines = get_pipeline_names()
|
||||
pipelines_to_run = []
|
||||
if args.include:
|
||||
value = args.include.lower().strip()
|
||||
for p in pipelines:
|
||||
if value in p.lower():
|
||||
print(f"Including {p}")
|
||||
pipelines_to_run.append(p)
|
||||
else:
|
||||
pipelines_to_run = pipelines
|
||||
|
||||
if args.exclude:
|
||||
value = args.exclude.lower().strip()
|
||||
cur_pipelines = pipelines_to_run
|
||||
pipelines_to_run = []
|
||||
for p in cur_pipelines:
|
||||
if value in p.lower():
|
||||
print(f"Excluding {p}")
|
||||
else:
|
||||
pipelines_to_run.append(p)
|
||||
|
||||
print("Pipelines to run:")
|
||||
for p in pipelines_to_run:
|
||||
print(f"\t{p}")
|
||||
|
||||
if args.dry_run:
|
||||
sys.exit(0)
|
||||
|
||||
for pipeline in pipelines_to_run:
|
||||
az_out = _run_az_pipelines_command(["run", "--branch", branch, "--name", pipeline])
|
||||
run_output = json.loads(az_out.stdout)
|
||||
if "id" in run_output:
|
||||
build_url = f"https://dev.azure.com/onnxruntime/onnxruntime/_build/results?buildId={run_output['id']}"
|
||||
print(f"{pipeline} build results: {build_url}&view=results")
|
||||
else:
|
||||
raise ValueError("Build id was not found in az output:\n" + run_output)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -3,71 +3,18 @@
|
|||
# Licensed under the MIT License.
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import typing
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
os.path.basename(__file__),
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
description="""Trigger CIs running for the specified pull request.
|
||||
|
||||
Requires the GitHub CLI to be installed. See https://github.com/cli/cli#installation for details.
|
||||
After installation you will also need to setup an auth token to access the ONNX Runtime repository by running
|
||||
`gh auth login`. Easiest is to run that from a directory in your local copy of the repo.
|
||||
""",
|
||||
)
|
||||
parser.add_argument("pr", help="Specify the pull request ID.")
|
||||
args = parser.parse_args()
|
||||
return args
|
||||
|
||||
|
||||
def run_gh_pr_command(command: typing.List[str], check=True):
|
||||
try:
|
||||
return subprocess.run(["gh", "pr", *command], capture_output=True, text=True, check=check)
|
||||
except subprocess.CalledProcessError as cpe:
|
||||
print(cpe)
|
||||
print(cpe.stderr)
|
||||
sys.exit(-1)
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
pr_id = args.pr
|
||||
|
||||
# validate PR
|
||||
gh_out = run_gh_pr_command(["view", pr_id])
|
||||
info = gh_out.stdout.split("\n")
|
||||
for line in info:
|
||||
pieces = line.split("\t")
|
||||
if len(pieces) != 2:
|
||||
continue
|
||||
|
||||
if pieces[0] == "state:":
|
||||
if pieces[1] != "OPEN":
|
||||
print(f"PR {pr_id} is not OPEN. Currently in state {pieces[1]}.")
|
||||
sys.exit(-1)
|
||||
|
||||
print("Check passed pipelines")
|
||||
gh_out = run_gh_pr_command(["checks", pr_id, "--required"], check=False)
|
||||
# output format is a tab separated list of columns:
|
||||
# (pipeline name) "\t" (status) "\t" (ran time) "\t" (url)
|
||||
checked_pipelines = [
|
||||
columns[0]
|
||||
for columns in (line.strip().split("\t") for line in gh_out.stdout.split("\n"))
|
||||
if len(columns) == 4 and columns[1] == "pass"
|
||||
]
|
||||
|
||||
print("Adding azp run commands")
|
||||
|
||||
def get_pipeline_names():
|
||||
# Current pipelines. These change semi-frequently and may need updating.
|
||||
#
|
||||
# Note: there is no easy way to get the list for azp "required" pipelines before they starts.
|
||||
# we need to maintain this list manually.
|
||||
#
|
||||
# There is no easy way to get the list of "required" pipelines using `azp` before they are run,
|
||||
# so we need to maintain this list manually.
|
||||
# NOTE: This list is also used by run_CIs_for_branch.py
|
||||
pipelines = [
|
||||
# windows
|
||||
"Windows ARM64 QNN CI Pipeline",
|
||||
|
|
@ -91,7 +38,6 @@ def main():
|
|||
"orttraining-linux-gpu-ci-pipeline",
|
||||
"orttraining-ortmodule-distributed",
|
||||
# checks
|
||||
"onnxruntime-python-checks-ci-pipeline",
|
||||
"onnxruntime-binary-size-checks-ci-pipeline",
|
||||
# big models
|
||||
"Big Models",
|
||||
|
|
@ -101,14 +47,78 @@ def main():
|
|||
"ONNX Runtime React Native CI Pipeline",
|
||||
]
|
||||
|
||||
return pipelines
|
||||
|
||||
|
||||
def _parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
os.path.basename(__file__),
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
description="""Trigger CIs running for the specified pull request.
|
||||
|
||||
Requires the GitHub CLI to be installed. See https://github.com/cli/cli#installation for details.
|
||||
After installation you will also need to setup an auth token to access the ONNX Runtime repository by running
|
||||
`gh auth login`. Easiest is to run that from a directory in your local copy of the repo.
|
||||
""",
|
||||
)
|
||||
parser.add_argument("pr", help="Specify the pull request ID.")
|
||||
args = parser.parse_args()
|
||||
return args
|
||||
|
||||
|
||||
def run_gh_pr_command(command: typing.List[str], check: bool = True):
|
||||
try:
|
||||
return subprocess.run(["gh", "pr", *command], capture_output=True, text=True, check=check)
|
||||
except subprocess.CalledProcessError as cpe:
|
||||
print(cpe)
|
||||
print(cpe.stderr)
|
||||
sys.exit(-1)
|
||||
|
||||
|
||||
def main():
|
||||
args = _parse_args()
|
||||
pr_id = args.pr
|
||||
|
||||
# validate PR
|
||||
print("Checking PR is open")
|
||||
gh_out = run_gh_pr_command(["view", "--json", "state", pr_id])
|
||||
info = json.loads(gh_out.stdout)
|
||||
if "state" not in info:
|
||||
print(f"Could not get current state from `gh pr view` response of\n{gh_out.stdout}")
|
||||
sys.exit(-1)
|
||||
|
||||
if info["state"] != "OPEN":
|
||||
print(f"PR {pr_id} is not OPEN. Currently in state {info['state']}.")
|
||||
sys.exit(0)
|
||||
|
||||
# This will return CIs that have run previously but not passed. We filter the CIs to run based on this, so it's
|
||||
# fine for the initial response to have no info in it.
|
||||
# `gh pr checks` exits with non-zero exit code when failures in pipeline exist, so we set `check` to False.
|
||||
print("Checking for pipelines that have passed.")
|
||||
gh_out = run_gh_pr_command(["checks", pr_id, "--required"], check=False)
|
||||
# output format is a tab separated list of columns:
|
||||
# (pipeline name) "\t" (status) "\t" (ran time) "\t" (url)
|
||||
checked_pipelines = [
|
||||
columns[0]
|
||||
for columns in (line.strip().split("\t") for line in gh_out.stdout.split("\n"))
|
||||
if len(columns) == 4 and columns[1] == "pass"
|
||||
]
|
||||
|
||||
pipelines = get_pipeline_names()
|
||||
|
||||
# remove pipelines that have already run successfully
|
||||
pipelines = [p for p in pipelines if p not in checked_pipelines]
|
||||
|
||||
print("Pipelines to run:")
|
||||
for p in pipelines:
|
||||
print("\t" + p)
|
||||
|
||||
# azp run is limited to 10 pipelines at a time
|
||||
max_pipelines_per_comment = 10
|
||||
start = 0
|
||||
num_pipelines = len(pipelines)
|
||||
|
||||
print("Adding azp run commands")
|
||||
while start < num_pipelines:
|
||||
end = start + max_pipelines_per_comment
|
||||
if end > num_pipelines:
|
||||
|
|
|
|||
Loading…
Reference in a new issue