mirror of
https://github.com/saymrwulf/onnxruntime.git
synced 2026-05-14 20:48:00 +00:00
Run Final_Jar_Testing_Linux_GPU in docker (#17533)
### Description 1. Create a package test image based on [RedHat UBI](https://www.redhat.com/en/blog/introducing-red-hat-universal-base-image) 2. Install TensorRT 8.6.1.6 in RedHat. (Ref. https://docs.nvidia.com/deeplearning/tensorrt/install-guide/index.html#maclearn-net-repo-install-rpm) 3. Run Final_Jar_Testing_Linux_GPU in docker (base image: nvidia/cuda:11.8.0-cudnn8-devel-ubi8) ### Motivation and Context [AB#18470](https://aiinfra.visualstudio.com/6a833879-cd9b-44a4-a9de-adc2d818f13c/_workitems/edit/18470) ### Verification https://dev.azure.com/aiinfra/Lotus/_build/results?buildId=354004&view=logs&j=8939b564-1402-57b5-92dc-510eba75e069&t=8939b564-1402-57b5-92dc-510eba75e069
This commit is contained in:
parent
a5302fec93
commit
377f959c69
4 changed files with 82 additions and 12 deletions
|
|
@ -424,19 +424,32 @@ stages:
|
|||
- checkout: self
|
||||
submodules: false
|
||||
- template: templates/set-version-number-variables-step.yml
|
||||
- task: DownloadPipelineArtifact@2
|
||||
displayName: 'Download Final Jar'
|
||||
inputs:
|
||||
buildType: 'current'
|
||||
artifactName: 'onnxruntime-java-gpu'
|
||||
targetPath: '$(Build.BinariesDirectory)/final-jar'
|
||||
|
||||
- task: Bash@3
|
||||
- template: templates/flex-downloadPipelineArtifact.yml
|
||||
parameters:
|
||||
StepName: 'Download Final Jar'
|
||||
ArtifactName: onnxruntime-java-gpu
|
||||
TargetPath: '$(Build.BinariesDirectory)/final-jar'
|
||||
SpecificArtifact: ${{ parameters.specificArtifact }}
|
||||
BuildId: ${{ parameters.BuildId }}
|
||||
|
||||
- template: templates/get-docker-image-steps.yml
|
||||
parameters:
|
||||
Dockerfile: tools/ci_build/github/linux/docker/Dockerfile.package_ubi8_cuda11_8_tensorrt8_6
|
||||
Context: tools/ci_build/github/linux/docker/
|
||||
DockerBuildArgs: "--build-arg BUILD_UID=$( id -u )"
|
||||
Repository: onnxruntimeubi8packagestest
|
||||
UpdateDepsTxt: false
|
||||
|
||||
- bash: |
|
||||
docker run --rm \
|
||||
--gpus all \
|
||||
--volume $(Build.SourcesDirectory):/onnxruntime_src \
|
||||
--volume $(Build.BinariesDirectory):/build \
|
||||
--volume /data/models:/build/models:ro \
|
||||
onnxruntimeubi8packagestest \
|
||||
/bin/bash /onnxruntime_src/tools/ci_build/github/linux/java_linux_final_test.sh -r /build -v $(OnnxRuntimeVersion)
|
||||
displayName: 'Test'
|
||||
inputs:
|
||||
targetType: filePath
|
||||
filePath: 'tools/ci_build/github/linux/java_linux_final_test.sh'
|
||||
arguments: '-r $(Build.BinariesDirectory) -v $(OnnxRuntimeVersion)'
|
||||
|
||||
- template: templates/component-governance-component-detection-steps.yml
|
||||
parameters:
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ parameters:
|
|||
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
displayName: ${{ parameters.StepName }}}
|
||||
displayName: ${{ parameters.StepName }}
|
||||
inputs:
|
||||
artifactName: ${{ parameters.ArtifactName}}
|
||||
targetPath: '${{ parameters.TargetPath }}'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,45 @@
|
|||
# --------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License.
|
||||
# --------------------------------------------------------------
|
||||
# Dockerfile to Test ONNX Runtime on UBI8 with CUDA 11.8 and TensorRT 8.6
|
||||
|
||||
# Build base image with required system packages
|
||||
FROM nvidia/cuda:11.8.0-cudnn8-devel-ubi8 AS base
|
||||
|
||||
ENV PATH /usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/src/tensorrt/bin:${PATH}
|
||||
|
||||
RUN dnf install -y bash wget &&\
|
||||
dnf clean dbcache
|
||||
|
||||
# Install python3
|
||||
RUN dnf install -y \
|
||||
python3.8 \
|
||||
python38-pip \
|
||||
python38-wheel &&\
|
||||
cd /usr/local/bin &&\
|
||||
ln -s /usr/bin/python3 python3.8 &&\
|
||||
ln -s /usr/bin/pip3 pip3.8;
|
||||
|
||||
RUN pip3 install --upgrade pip
|
||||
RUN pip3 install setuptools>=41.0.0
|
||||
|
||||
# Install TensorRT
|
||||
RUN dnf install -y libnvinfer8 libnvonnxparsers8 libnvparsers8 libnvinfer-plugin8 libnvinfer-lean8 libnvinfer-vc-plugin8 libnvinfer-dispatch8
|
||||
RUN v="8.6.1.6-1+cuda11.8" &&\
|
||||
dnf downgrade -y libnvinfer8-${v} libnvinfer8-${v} libnvonnxparsers8-${v} libnvparsers8-${v} libnvinfer-plugin8-${v} libnvinfer-lean8-${v} libnvinfer-vc-plugin8-${v} libnvinfer-dispatch8-${v} &&\
|
||||
dnf install -y dnf-plugin-versionlock &&\
|
||||
dnf versionlock libnvinfer8 libnvonnxparsers8 libnvparsers8 libnvinfer-plugin8 libnvinfer-lean8 libnvinfer-vc-plugin8 libnvinfer-dispatch8
|
||||
RUN dnf clean dbcache
|
||||
|
||||
|
||||
ADD scripts /tmp/scripts
|
||||
RUN cd /tmp/scripts && /tmp/scripts/install_dotnet.sh && /tmp/scripts/install_java.sh && rm -rf /tmp/scripts
|
||||
|
||||
# Build final image from base.
|
||||
FROM base as final
|
||||
ARG BUILD_USER=onnxruntimedev
|
||||
ARG BUILD_UID=1000
|
||||
RUN adduser --uid $BUILD_UID $BUILD_USER
|
||||
WORKDIR /home/$BUILD_USER
|
||||
USER $BUILD_USER
|
||||
12
tools/ci_build/github/linux/docker/scripts/install_java.sh
Executable file
12
tools/ci_build/github/linux/docker/scripts/install_java.sh
Executable file
|
|
@ -0,0 +1,12 @@
|
|||
#!/bin/bash
|
||||
set -e -x
|
||||
|
||||
if [ -f /etc/redhat-release ]; then
|
||||
dnf install -y java-11-openjdk-devel \
|
||||
&& dnf clean dbcache
|
||||
elif [ -f /etc/os-release ]; then
|
||||
apt-get update && apt-get install -y openjdk-11-jdk
|
||||
else
|
||||
echo "Unsupported OS"
|
||||
exit 1
|
||||
fi
|
||||
Loading…
Reference in a new issue