Pull Nightly Wheel File and Cleanup Perf (#11164)

* delete unused files

* only use one dockerfile, otherwise install

* Update pipeline file

* get other changes

* minimal packages

* update pull nightly variable

* try logical boolean

* test boolean

* have build ort as boolean

* case senstive

* use the current head not the previous commit

* add helpful note
This commit is contained in:
Olivia Jain 2022-04-11 11:41:11 -07:00 committed by GitHub
parent 749c0ddd1e
commit ae243c2bb5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 69 additions and 163 deletions

View file

@ -1,18 +0,0 @@
#!/bin/bash
while getopts t:i: parameter
do case "${parameter}"
in
t) TRT_CONTAINER=${OPTARG};;
i) ANUBIS_IMAGE=${OPTARG};;
esac
done
# copying wheel over
id=$(docker create $ANUBIS_IMAGE)
docker cp $id:/code/onnxruntime/build/Linux/Release/dist/ ../
docker rm -v $id
# adding trt container version
wheel_name=$(echo ../dist/*|sed 's/-cp/.'$TRT_CONTAINER'-cp/')
mv ../dist/* $wheel_name

View file

@ -1,38 +0,0 @@
param([string]$file_folder, [string]$account_key, [string]$trt_container, [string]$csc)
Add-Type -AssemblyName System.Web
$wheel_file = [System.IO.Path]::GetFileName((Get-ChildItem $file_folder))
$ort_trt_ep_pkg_blob_path = 'ort-trt-ep/' + $env:BUILD_BUILDNUMBER + '/' + $wheel_file
$expiredays = New-TimeSpan -Days 1
$end = (Get-Date) + $expiredays
$body = @{grant_type='client_credentials'
client_id='bcb87687-5d9d-4c21-801e-317980c8b1d5'
client_secret=$csc
scope='api://2227e307-9325-4dbe-9894-5c3b25d62a2d/.default'}
$contentType = 'application/x-www-form-urlencoded'
$res = Invoke-WebRequest -Method POST -Uri https://login.microsoftonline.com/cc38825a-ff99-423f-bdde-dd14d00e33b8/oauth2/v2.0/token -body $body -ContentType $contentType | ConvertFrom-Json
Write-Host "Before send"
$token = $res.access_token
$headers = @{Authorization = "Bearer $token"}
$ort_trt_ep_pkg_sas_uri = az storage blob generate-sas -c upload -n $ort_trt_ep_pkg_blob_path --account-name anubiscustomerstorage --account-key $account_key --full-uri --permissions r --expiry $end.ToString("yyyy-MM-ddTHH:mmZ") --https-only
$ort_trt_ep_pkg_sas_uri = $ort_trt_ep_pkg_sas_uri.Substring(1, $ort_trt_ep_pkg_sas_uri.Length-2)
$body_trt_perf_compare = @{
"Name"="TRT_PERF_COMPARE";
"Parameters" = @{
TRT_VERSION=$trt_container;
BUILD_NUMBER=$env:BUILD_BUILDNUMBER;
"ORT_TRT_EP_PKG_SAS_URI"=$ort_trt_ep_pkg_sas_uri};
}
$anubissvctesturl = "https://anubistest.azurewebsites.net/api/mlperf/jobs"
Write-Host ($body_trt_perf_compare|ConvertTo-Json)
Invoke-RestMethod -Method 'Post' -Uri $anubissvctesturl -Body ($body_trt_perf_compare|ConvertTo-Json) -ContentType "application/json" -Headers $headers -UseBasicParsing
$body.Parameters

View file

@ -1,10 +0,0 @@
ARG IMAGE
FROM ${IMAGE}
RUN conda create -y -n py38 python=3.8 &&\
source activate py38 &&\
rm -rf onnxruntime/build &&\
pip install numpy coloredlogs flake8 onnx Cython onnxmltools sympy packaging psutil &&\
cd onnxruntime &&\
./build.sh --parallel --skip_tests --build_wheel --config Release --cuda_home /usr/local/cuda --use_tensorrt --tensorrt_home /workspace/tensorrt --cudnn_home /usr/lib/x86_64-linux-gnu/ --build --update --cmake_extra_defines ONNXRUNTIME_VERSION=$(cat ./VERSION_NUMBER) &&\
cd ..

View file

@ -1,12 +0,0 @@
# --------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# --------------------------------------------------------------
ARG IMAGE=onnxruntime
FROM ${IMAGE}
RUN apt-get install -y --no-install-recommends libprotobuf-dev protobuf-compiler pciutils &&\
pip install --upgrade pip &&\
pip install pandas coloredlogs numpy flake8 onnx Cython onnxmltools sympy packaging psutil

View file

@ -1,19 +1,14 @@
#!/bin/bash
while getopts o:p:b:i:t:v:c: parameter
while getopts o:b:i:t:c: parameter
do case "${parameter}"
in
o) TRT_DOCKERFILE_PATH=${OPTARG};;
p) PERF_DOCKERFILE_PATH=${OPTARG};;
b) ORT_BRANCH=${OPTARG};;
i) IMAGE_NAME=${OPTARG};;
t) TRT_CONTAINER=${OPTARG};;
v) TRT_VERSION=${OPTARG};;
c) CMAKE_CUDA_ARCHITECTURES=${OPTARG};;
esac
done
IMAGE=onnxruntime
docker build --no-cache -t $IMAGE --build-arg CMAKE_CUDA_ARCHITECTURES=$CMAKE_CUDA_ARCHITECTURES --build-arg TRT_CONTAINER_VERSION=$TRT_CONTAINER --build-arg ONNXRUNTIME_BRANCH=$ORT_BRANCH -f $TRT_DOCKERFILE_PATH .
docker build --no-cache --build-arg IMAGE=$IMAGE --build-arg CMAKE_CUDA_ARCHITECTURES=$CMAKE_CUDA_ARCHITECTURES --build-arg ONNXRUNTIME_BRANCH=$ORT_BRANCH --build-arg TRT_VERSION=$TRT_VERSION -t $IMAGE_NAME -f $PERF_DOCKERFILE_PATH .
docker build --no-cache -t $IMAGE_NAME --build-arg CMAKE_CUDA_ARCHITECTURES=$CMAKE_CUDA_ARCHITECTURES --build-arg TRT_CONTAINER_VERSION=$TRT_CONTAINER --build-arg ONNXRUNTIME_BRANCH=$ORT_BRANCH -f $TRT_DOCKERFILE_PATH .

View file

@ -1,12 +1,13 @@
#!/bin/bash
while getopts d:o:m:e:a: parameter
while getopts d:o:m:e:b:a: parameter
do case "${parameter}"
in
d) PERF_DIR=${OPTARG};;
o) OPTION=${OPTARG};;
m) MODEL_PATH=${OPTARG};;
e) EP_LIST=${OPTARG};;
b) BUILD_ORT=${OPTARG};;
a) OPTIONAL_ARGS=${OPTARG};;
esac
done
@ -50,10 +51,19 @@ download_files() {
}
setup() {
apt update
apt-get install -y --no-install-recommends pciutils
pip install --upgrade pip
pip install -r requirements.txt
if [ "$BUILD_ORT" = "False" ]
then
echo 'installing the nightly wheel file'
ls Release/dist/* | xargs -n 1 pip install
fi
cleanup_files
download_files
}
setup
python3 benchmark_wrapper.py -r validate -m $MODEL_PATH -o result/$OPTION $OPTIONAL_ARGS
python3 benchmark_wrapper.py -r benchmark -t 1200 -m $MODEL_PATH -o result/$OPTION $OPTIONAL_ARGS
python benchmark_wrapper.py -r validate -m $MODEL_PATH -o result/$OPTION $OPTIONAL_ARGS
python benchmark_wrapper.py -r benchmark -t 1200 -m $MODEL_PATH -o result/$OPTION $OPTIONAL_ARGS

View file

@ -0,0 +1,3 @@
onnxmltools
pandas
coloredlogs

View file

@ -1,7 +1,7 @@
#!/bin/bash
# Parse Arguments
while getopts d:o:m:p:e:v:a: parameter
while getopts d:o:m:p:e:v:b:a: parameter
do case "${parameter}"
in
d) DOCKER_IMAGE=${OPTARG};;
@ -10,11 +10,12 @@ m) MODEL_PATH=${OPTARG};;
p) PERF_DIR=${OPTARG};;
e) EP_LIST=${OPTARG};;
v) MODEL_VOLUME=${OPTARG};;
a) PERF_ARGUMENTS=${OPTARG};;
b) BUILD_ORT=${OPTARG};;
a) BENCHMARK_ARGS=${OPTARG};;
esac
done
# Variables
DOCKER_PERF_DIR='/perf/'
docker run --gpus all -v $PERF_DIR:$DOCKER_PERF_DIR -v $MODEL_VOLUME/$OPTION:$DOCKER_PERF_DIR$OPTION $DOCKER_IMAGE /bin/bash $DOCKER_PERF_DIR'perf.sh' -d $DOCKER_PERF_DIR -o $OPTION -m $MODEL_PATH -e "$EP_LIST" "$PERF_ARGUMENTS"
docker run --gpus all -v $PERF_DIR:$DOCKER_PERF_DIR -v $MODEL_VOLUME/$OPTION:$DOCKER_PERF_DIR$OPTION $DOCKER_IMAGE /bin/bash $DOCKER_PERF_DIR'perf.sh' -d $DOCKER_PERF_DIR -o $OPTION -m $MODEL_PATH -b $BUILD_ORT -e "$EP_LIST" "$BENCHMARK_ARGS"

View file

@ -3,7 +3,7 @@ parameters:
- name: BuildORT
displayName: Build ORT
type: boolean
default: true
default: false
- name: PostToDashboard
displayName: Post to Dashboard
@ -15,11 +15,6 @@ parameters:
type: boolean
default: true
- name: RunDocker
displayName: Run on Docker
type: boolean
default: true
- name: TrtVersion
displayName: TensorRT Version
type: string
@ -34,11 +29,6 @@ parameters:
type: boolean
default: false
- name: PublishWheel
displayName: Publish Wheel
type: boolean
default: false
- name: ModelGroups
type: object
default:
@ -58,79 +48,64 @@ jobs:
pool: 'Onnxruntime-Linux-GPU-TensorRT-Perf'
variables:
- name: trt_dockerfile
${{ if eq(parameters.TrtVersion, '8.2.1.8') }}:
value: Dockerfile.ubuntu_cuda11_4_tensorrt8_2
${{ if eq(parameters.TrtVersion, '8.0.1.6') }}:
value: Dockerfile.ubuntu_cuda11_4_tensorrt8_0
${{ if eq(parameters.TrtVersion, '7.2.3.4') }}:
value: Dockerfile.ubuntu_cuda11_4_tensorrt7_2
- name: trtContainer
${{ if eq(parameters.TrtVersion, '8.2.1.8') }}:
value: 21.12
${{ if eq(parameters.TrtVersion, '8.0.1.6') }}:
value: 21.07
${{ if eq(parameters.TrtVersion, '7.2.3.4') }}:
value: 20.12
- name: anubis_image
value: ort-master-py38
- name: trtContainerVersion
${{ if eq(parameters.RunNvidiaContainer, true)}}:
${{ if eq(parameters.TrtVersion, '8.2.1.8') }}:
value: 21.12
${{ if eq(parameters.TrtVersion, '8.0.1.6') }}:
value: 21.07
${{ if eq(parameters.TrtVersion, '7.2.3.4') }}:
value: 20.12
- name: build_args
${{ if eq(parameters.RunNvidiaContainer, true) }}:
value: Dockerfile.tensorrt-perf -t $(trtContainer) -v ${{parameters.TrtVersion}} -o $(Build.SourcesDirectory)/dockerfiles/Dockerfile.tensorrt
${{ if ne(parameters.RunNvidiaContainer, true) }}:
value: Dockerfile.custom-trt-perf -t ${{ parameters.TrtVersion }} -o $(Build.SourcesDirectory)/tools/ci_build/github/linux/docker/$(trt_dockerfile)
- name: dockerfile
${{ if eq(parameters.RunNvidiaContainer, true)}}:
value: nvcr.io/nvidia/tensorrt:$(trtContainerVersion)-py3
${{ if eq(parameters.RunNvidiaContainer, false)}}:
${{ if eq(parameters.TrtVersion, '8.2.1.8') }}:
value: Dockerfile.ubuntu_cuda11_4_tensorrt8_2
${{ if eq(parameters.TrtVersion, '8.0.1.6') }}:
value: Dockerfile.ubuntu_cuda11_4_tensorrt8_0
${{ if eq(parameters.TrtVersion, '7.2.3.4') }}:
value: Dockerfile.ubuntu_cuda11_4_tensorrt7_2
- name: image
${{ if eq(parameters.BuildORT, true) }}:
value: ort-$(branch)
${{ if eq(parameters.BuildORT, false) }}:
value: $(dockerfile)
- name: environment
${{ if eq(parameters.RunDocker, true) }}:
value: docker.sh -d ort-$(branch) -p $(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf -v $(modelVolume)
${{ if ne(parameters.RunDocker, true) }}:
value: machine.sh
- name: with_arguments
value: $(environment) -e "$(epList)"
- name: optional_arguments
value: -a "-a -g $(optimizeGraph) -b $(bindInputs) -n $(enableCache)"
steps:
- ${{ if eq(parameters.BuildORT, false) }}:
- task: DownloadPipelineArtifact@2
inputs:
buildType: 'specific'
project: 'Lotus'
preferTriggeringPipeline: true
pipeline: '841'
runBranch: 'ref/heads/master'
buildVersionToDownload: 'latest'
allowPartiallySucceededBuilds: false
artifact: 'onnxruntime_gpu'
targetPath: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/'
- script: 'docker pull $(dockerfile)'
displayName: 'Pull Nvidia Dockerfile'
- ${{ if eq(parameters.BuildORT, true) }}:
- ${{ if eq(parameters.RunDocker, true) }}:
- script: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/build/build_image.sh -p $(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/build/$(build_args) -b $(branch) -i ort-$(branch) -c 75'
- ${{ if eq(parameters.RunNvidiaContainer, true) }}:
- script: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/build/build_image.sh -o $(Build.SourcesDirectory)/dockerfiles/Dockerfile.tensorrt -b $(branch) -t $(trtContainerVersion) -i ort-$(branch) -c 75 '
displayName: 'Build latest ORT Image in Nvidia Container'
workingDirectory: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/build'
- ${{ if eq(parameters.RunNvidiaContainer, false) }}:
- script: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/build/build_image.sh -o $(Build.SourcesDirectory)/tools/ci_build/github/linux/docker/$(dockerfile) -b $(branch) -t $(trtContainerVersion) -i ort-$(branch) -c 75 '
displayName: 'Build latest ORT Image'
workingDirectory: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/build'
- ${{ if ne(parameters.RunDocker, true) }}:
- script: 'python3 $(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/build/ort_build_latest.py -b $(branch) -c /usr/local/cuda -o ~/repos/onnxruntime/ -t ${{ parameters.TrtVersion }}'
displayName: 'Build latest ORT'
workingDirectory: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/build'
- ${{ if eq(parameters.PublishWheel, true) }}:
- script: 'docker build --build-arg IMAGE=ort-master -t $(anubis_image) -f $(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/build/Dockerfile.python38 .'
displayName: 'Build Python 3.8 Images'
workingDirectory: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/build'
- script: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/anubis/copy_wheel.sh -t $(trtContainer) -i $(anubis_image)'
displayName: 'Copy Wheel from Docker'
workingDirectory: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/anubis'
- script: 'OUTPUT=$(ls dist) && az storage blob upload --account-name anubiscustomerstorage --account-key $(account-key) --container-name upload --file $(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/dist/*.whl --name ort-trt-ep/$(Build.BuildNumber)/$OUTPUT'
displayName: 'Upload Wheel File'
workingDirectory: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/'
- task: PowerShell@2
inputs:
filePath: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/anubis/start_job.ps1'
arguments: '-file_folder $(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/dist -account_key $(account-key) -trt_container $(trtContainer) -csc $(csc)'
displayName: 'Start Anubis Job'
- ${{ each option in parameters.ModelGroups }}:
- script: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/run_perf_$(with_arguments) -o ${{option}} -m $(${{option}}) $(optional_arguments)'
- script: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/run_perf_docker.sh -d $(image) -p $(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf -v $(modelVolume) -b ${{ parameters.BuildORT }} -o ${{option}} -m $(${{option}}) -e "$(epList)" $(optional_arguments)'
displayName: '${{option}} perf'
workingDirectory: '$(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/'
@ -174,7 +149,7 @@ jobs:
scriptLocation: inlineScript
scriptType: bash
inlineScript: |
short_hash=$(git rev-parse --short HEAD^) &&
short_hash=$(git rev-parse --short HEAD) &&
commit_date=$(git log -1 --date=short --pretty=format:%cd) &&
python3 $(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/post.py -r $(Build.SourcesDirectory)/Artifact/result -c $short_hash -d $commit_date -u "$(reportUrl)?buildId=$(Build.BuildId)" -t ${{ parameters.TrtVersion }} -b $(branch)