onnxruntime/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-packaging-pipeline.yml
Chi Lo 32ecbf4691
Create combined GPU tarball and zip file package (#8827)
* Add onnxruntime_providers_shared.dll into gpu nuget package

* Modify for test

* Temporarily remove for test

* Modify for test

* Modify for test

* Test packging Windows combined GPU

* Test packging Windows combined GPU

* Test packging Windows combined GPU

* Test packging Windows combined GPU

* modify for test

* modify for test

* fix bug

* Modify for test

* Modify for test

* Modify for test

* Modify for test

* Modify for test

* Modify for test

* Modify for test

* Modify for test

* Prepare for PR

* Prepare for PR

* Code refactor

* Rename proper Artifact name

* Rename intermediate Artifact names

* Revert Artifact Names

* Rename Artifact Names

* Modify Artifact name

* Modify Artifact name

* Modify Artifact name

* Update Java package

* Update Java package

* fix bug to change artifact name

* Fix bug for the wrong file path

* Fix no fetching correct artifact and test

* temporarily modify for test

* undo the change for test
2021-08-25 13:51:18 -07:00

66 lines
3.6 KiB
YAML

parameters:
- name: artifactName
type: string
default: 'onnxruntime-linux-x64-gpu-tensorrt-$(OnnxRuntimeVersion)'
- name: artifactNameNoVersionString
type: string
default: 'onnxruntime-linux-x64-gpu-tensorrt'
- name: buildJava
type: boolean
default: false
- name: buildJavaOption
type: string
default: ''
jobs:
- job: Linux_C_API_Packaging_GPU_TensorRT_x64
workspace:
clean: all
timeoutInMinutes: 120
pool: 'Onnxruntime-Linux-GPU'
variables:
CUDA_VERSION: '11.4'
steps:
- template: templates/set-version-number-variables-step.yml
- template: templates/get-docker-image-steps.yml
parameters:
Dockerfile: tools/ci_build/github/linux/docker/Dockerfile.manylinux2014_cuda11_4_tensorrt8_0
Context: tools/ci_build/github/linux/docker
DockerBuildArgs: "--network=host --build-arg POLICY=manylinux2014 --build-arg PLATFORM=x86_64 --build-arg DEVTOOLSET_ROOTPATH=/opt/rh/devtoolset-10/root --build-arg PREPEND_PATH=/opt/rh/devtoolset-10/root/usr/bin: --build-arg LD_LIBRARY_PATH_ARG=/opt/rh/devtoolset-10/root/usr/lib64:/opt/rh/devtoolset-10/root/usr/lib:/opt/rh/devtoolset-10/root/usr/lib64/dyninst:/opt/rh/devtoolset-10/root/usr/lib/dyninst:/usr/local/lib64 --build-arg BUILD_UID=$( id -u )"
Repository: onnxruntimecuda114xtrt80build
- task: CmdLine@2
inputs:
script: |
mkdir -p $HOME/.onnx
docker run --gpus all -e CC=/opt/rh/devtoolset-10/root/usr/bin/cc -e CXX=/opt/rh/devtoolset-10/root/usr/bin/c++ -e CFLAGS="-Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fstack-protector-strong -fstack-clash-protection -fcf-protection -O3 -Wl,--strip-all" -e CXXFLAGS="-Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fstack-protector-strong -fstack-clash-protection -fcf-protection -O3 -Wl,--strip-all" -e NVIDIA_VISIBLE_DEVICES=all --rm --volume /data/onnx:/data/onnx:ro --volume $(Build.SourcesDirectory):/onnxruntime_src --volume $(Build.BinariesDirectory):/build \
--volume /data/models:/build/models:ro --volume $HOME/.onnx:/home/onnxruntimedev/.onnx -e NIGHTLY_BUILD onnxruntimecuda114xtrt80build \
/opt/python/cp37-cp37m/bin/python3 /onnxruntime_src/tools/ci_build/build.py --build_dir /build --config Release \
--skip_submodule_sync --parallel --build_shared_lib ${{ parameters.buildJavaOption }} --use_tensorrt --cuda_version=$(CUDA_VERSION) --cuda_home=/usr/local/cuda-$(CUDA_VERSION) --cudnn_home=/usr --tensorrt_home=/usr --cmake_extra_defines CMAKE_CUDA_HOST_COMPILER=/opt/rh/devtoolset-10/root/usr/bin/cc 'CMAKE_CUDA_ARCHITECTURES=37;50;52;60;61;70;75;80'
workingDirectory: $(Build.SourcesDirectory)
- ${{ if eq(parameters.buildJava, true) }}:
- template: templates/java-api-artifacts-package-and-publish-steps-posix.yml
parameters:
arch: 'linux-x64'
buildConfig: 'Release'
artifactName: 'onnxruntime-java-linux-x64-tensorrt'
version: '$(OnnxRuntimeVersion)'
libraryName: 'libonnxruntime.so'
nativeLibraryName: 'libonnxruntime4j_jni.so'
- template: templates/c-api-artifacts-package-and-publish-steps-posix.yml
parameters:
buildConfig: 'Release'
artifactName: ${{ parameters.artifactName }}
artifactNameNoVersionString: ${{ parameters.artifactNameNoVersionString }}
libraryName: 'libonnxruntime.so.$(OnnxRuntimeVersion)'
commitId: $(OnnxRuntimeGitCommitHash)
- template: templates/component-governance-component-detection-steps.yml
parameters :
condition : 'succeeded'
- template: templates/clean-agent-build-directory-step.yml