Replace references to onnxruntime 'master' with 'main' in Dockerfiles. (#12550)

* Replace references to onnxruntime 'master' with 'main' in Dockerfiles.

* update dockerfiles/README.md
This commit is contained in:
Edward Chen 2022-08-16 14:13:05 -07:00 committed by GitHub
parent 9d10badc55
commit caabfcd920
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 27 additions and 29 deletions

View file

@ -8,7 +8,7 @@
FROM ubuntu:18.04
ARG ONNXRUNTIME_REPO=https://github.com/Microsoft/onnxruntime
ARG ONNXRUNTIME_BRANCH=master
ARG ONNXRUNTIME_BRANCH=main
ENV DEBIAN_FRONTEND noninteractive
ENV MIGRAPHX_DISABLE_FAST_GELU=1
@ -28,7 +28,7 @@ RUN apt-get update &&\
rocblas half aria2 libnuma-dev
RUN aria2c -q -d /tmp -o cmake-3.21.0-linux-x86_64.tar.gz \
https://github.com/Kitware/CMake/releases/download/v3.21.0/cmake-3.21.0-linux-x86_64.tar.gz &&\
https://github.com/Kitware/CMake/releases/download/v3.21.0/cmake-3.21.0-linux-x86_64.tar.gz &&\
tar -zxf /tmp/cmake-3.21.0-linux-x86_64.tar.gz --strip=1 -C /usr
# Install rbuild
@ -56,4 +56,3 @@ RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXR
/bin/sh ./build.sh --cmake_extra_defines ONNXRUNTIME_VERSION=`cat ./VERSION_NUMBER` --config Release --parallel \
--skip_tests --build_wheel --use_rocm --rocm_version=4.5.2 --rocm_home /opt/rocm --use_migraphx &&\
pip install /code/onnxruntime/build/Linux/Release/dist/*.whl

View file

@ -7,7 +7,7 @@ FROM ubuntu:18.04
ARG PYTHON_VERSION=3.6
ARG ONNXRUNTIME_REPO=https://github.com/Microsoft/onnxruntime
ARG ONNXRUNTIME_SERVER_BRANCH=master
ARG ONNXRUNTIME_SERVER_BRANCH=main
ENV DEBIAN_FRONTEND noninteractive

View file

@ -13,21 +13,21 @@ ENV WORKDIR_PATH=/home/openvino
WORKDIR $WORKDIR_PATH
ENV DEBIAN_FRONTEND noninteractive
ARG DEVICE=CPU_FP32
ARG DEVICE=CPU_FP32
ARG ONNXRUNTIME_REPO=https://github.com/microsoft/onnxruntime.git
ARG ONNXRUNTIME_BRANCH=master
ARG ONNXRUNTIME_BRANCH=main
ENV InferenceEngine_DIR=${INTEL_OPENVINO_DIR}/runtime/cmake
USER root
RUN apt update; apt install -y git protobuf-compiler libprotobuf-dev
RUN git clone --recursive -b ${ONNXRUNTIME_BRANCH} ${ONNXRUNTIME_REPO}
RUN git clone --recursive -b ${ONNXRUNTIME_BRANCH} ${ONNXRUNTIME_REPO}
RUN /bin/sh onnxruntime/dockerfiles/scripts/install_common_deps.sh
RUN ln -s cmake-* cmake-dir
RUN python3 -m pip install wheel
ENV PATH=${WORKDIR_PATH}/cmake-dir/bin:$PATH
RUN pip3 install onnx
RUN cd onnxruntime && ./build.sh --config Release --update --build --parallel --use_openvino ${DEVICE} --build_shared_lib --build_wheel
RUN cd onnxruntime && ./build.sh --config Release --update --build --parallel --use_openvino ${DEVICE} --build_shared_lib --build_wheel
#Steps to download sources
RUN cat /etc/apt/sources.list | sed 's/^# deb-src/deb-src/g' > ./temp; mv temp /etc/apt/sources.list

View file

@ -11,7 +11,7 @@ ARG MY_ROOT=/code
ARG YUM_OV_PACKAGE=intel-openvino-runtime-centos7-2021.4.752.x86_64
ARG DEVICE=CPU_FP32
ARG ONNXRUNTIME_REPO=https://github.com/microsoft/onnxruntime
ARG ONNXRUNTIME_BRANCH=master
ARG ONNXRUNTIME_BRANCH=main
ENV INTEL_OPENVINO_DIR=/opt/intel/openvino_2021.4.752
ENV InferenceEngine_DIR=${INTEL_OPENVINO_DIR}/deployment_tools/inference_engine/share
@ -24,7 +24,7 @@ ENV HDDL_INSTALL_DIR=${INTEL_OPENVINO_DIR}/deployment_tools/inference_engine/ext
ENV LD_LIBRARY_PATH=${INTEL_OPENVINO_DIR}/deployment_tools/inference_engine/external/hddl/lib:$LD_LIBRARY_PATH
ENV LD_LIBRARY_PATH=/usr/local/lib:/usr/lib:/usr/local/lib64:/usr/lib64:/lib64:$LD_LIBRARY_PATH
# Install packages
# Install packages
RUN yum update -y && \
yum groupinstall "Development Tools" -y && \
yum install -y yum-utils autoconf automake libtool unzip udev wget zlib-devel libffi-devel openssl-devel boost-devel-1.53.0 && \
@ -42,7 +42,7 @@ RUN yum update -y && \
cd /opt/ && wget https://github.com/libusb/libusb/archive/v1.0.22.zip && \
unzip v1.0.22.zip && rm -rf v1.0.22.zip && cd /opt/libusb-1.0.22 && \
# bootstrap steps
./bootstrap.sh && \
./bootstrap.sh && \
./configure --disable-udev --enable-shared && \
make -j4 && \
# configure libusb1.0.22
@ -64,7 +64,7 @@ RUN yum update -y && \
cp /opt/intel/openvino_2021/deployment_tools/inference_engine/external/97-myriad-usbboot.rules /etc/udev/rules.d/ && \
ldconfig && \
# Install GPU runtime and drivers
cd ${MY_ROOT} && \
cd ${MY_ROOT} && \
mkdir /tmp/opencl && \
cd /tmp/opencl && \
yum install -y epel-release && \
@ -87,8 +87,8 @@ RUN yum update -y && \
cd /code/ && \
wget https://www.python.org/ftp/python/3.8.3/Python-3.8.3.tgz && tar xvf Python-3.8.3.tgz && \
cd Python-3.8*/ && ./configure && make && make install && \
cd ../ && mkdir -p /usr/bin/Python38 && ln -s Python-3.8.3/ /usr/bin/Python38 && \
# installing dependancies
cd ../ && mkdir -p /usr/bin/Python38 && ln -s Python-3.8.3/ /usr/bin/Python38 && \
# installing dependancies
yum install -y python3-lxml python3-six libusb.x86_64 && \
yum clean packages && yum clean all && rm -rf /var/cache/yum && \
# Build onnxruntime
@ -102,4 +102,4 @@ RUN yum update -y && \
cd $MY_ROOT && rm -rf onnxruntime Python-3* && \
cd ${MY_ROOT}/ && rm -rf cmake* && \
cd /usr/share/ && rm -rf gcc* && cd /usr/lib/ && rm -rf gcc cd && rm -rf .cache && \
cd ${INTEL_OPENVINO_DIR}/ && rm -rf documentation data_processing && cd deployment_tools/ && rm -rf tools
cd ${INTEL_OPENVINO_DIR}/ && rm -rf documentation data_processing && cd deployment_tools/ && rm -rf tools

View file

@ -7,7 +7,7 @@ FROM ubuntu:18.04
ARG DEVICE=CPU_FP32
ARG ONNXRUNTIME_REPO=https://github.com/microsoft/onnxruntime.git
ARG ONNXRUNTIME_BRANCH=master
ARG ONNXRUNTIME_BRANCH=main
WORKDIR /code
ARG MY_ROOT=/code

View file

@ -8,7 +8,7 @@
FROM ubuntu:18.04
ARG ONNXRUNTIME_REPO=https://github.com/Microsoft/onnxruntime
ARG ONNXRUNTIME_BRANCH=master
ARG ONNXRUNTIME_BRANCH=main
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get clean && apt-get update && apt-get install -y locales
@ -23,7 +23,7 @@ RUN apt-get update && apt-get install -y gnupg2 --no-install-recommends curl &&
sh -c 'echo deb [arch=amd64] http://repo.radeon.com/rocm/apt/4.5.2/ ubuntu main > /etc/apt/sources.list.d/rocm.list'
RUN apt-get update &&\
apt-get install -y sudo git bash build-essential rocm-dev libpython3.6-dev python3-pip miopen-hip rocblas half aria2 libnuma-dev
apt-get install -y sudo git bash build-essential rocm-dev libpython3.6-dev python3-pip miopen-hip rocblas half aria2 libnuma-dev
RUN aria2c -q -d /tmp -o cmake-3.20.0-linux-x86_64.tar.gz \
https://github.com/Kitware/CMake/releases/download/v3.20.0/cmake-3.20.0-linux-x86_64.tar.gz &&\
@ -48,4 +48,3 @@ RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXR
ONNXRUNTIME_VERSION=$(cat ./VERSION_NUMBER) --use_rocm --rocm_home=/opt/rocm &&\
pip install /code/onnxruntime/build/Linux/Release/dist/*.whl &&\
cd ..

View file

@ -9,7 +9,7 @@ ARG TRT_CONTAINER_VERSION=21.12
FROM nvcr.io/nvidia/tensorrt:${TRT_CONTAINER_VERSION}-py3
ARG ONNXRUNTIME_REPO=https://github.com/Microsoft/onnxruntime
ARG ONNXRUNTIME_BRANCH=master
ARG ONNXRUNTIME_BRANCH=main
ARG CMAKE_CUDA_ARCHITECTURES=37;50;52;60;61;70;75;80
RUN apt-get update &&\
@ -27,4 +27,4 @@ RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXR
cd onnxruntime &&\
/bin/sh build.sh --parallel --build_shared_lib --cuda_home /usr/local/cuda --cudnn_home /usr/lib/x86_64-linux-gnu/ --use_tensorrt --tensorrt_home /usr/lib/x86_64-linux-gnu/ --config Release --build_wheel --skip_tests --skip_submodule_sync --cmake_extra_defines '"CMAKE_CUDA_ARCHITECTURES='${CMAKE_CUDA_ARCHITECTURES}'"' &&\
pip install /code/onnxruntime/build/Linux/Release/dist/*.whl &&\
cd ..
cd ..

View file

@ -7,7 +7,7 @@
FROM xilinx/vitis-ai-cpu:1.3.598
ARG ONNXRUNTIME_REPO=https://github.com/Microsoft/onnxruntime
ARG ONNXRUNTIME_BRANCH=master
ARG ONNXRUNTIME_BRANCH=main
ARG PYXIR_REPO=https://github.com/Xilinx/pyxir
ARG PYXIR_BRANCH=v0.2.0

View file

@ -10,7 +10,7 @@
**Platforms**
- ARM 32v7: [Dockerfile](Dockerfile.arm32v7), [Instructions](#arm-3264)
- ARM 64: [Dockerfile](Dockerfile.arm64), [Instructions](#arm-3264)
- ARM 64: [Dockerfile](Dockerfile.arm64), [Instructions](#arm-3264)
- NVIDIA Jetson TX1/TX2/Nano/Xavier: [Dockerfile](Dockerfile.jetson), [Instructions](#nvidia-jetson-tx1tx2nanoxavier)
**Other**
@ -88,7 +88,7 @@ git submodule update --init
### **1. Using pre-built container images for Python API**
The unified container image from [Dockerhub](https://hub.docker.com/repository/docker/openvino/onnxruntime_ep_ubuntu18) can be used to run an application on any of the target accelerators. In order to select the target accelerator, the application should explicitly specifiy the choice using the `device_type` configuration option for OpenVINO Execution provider. Refer to [OpenVINO EP runtime configuration documentation](https://www.onnxruntime.ai/docs/reference/execution-providers/OpenVINO-ExecutionProvider.html#summary-of-options) for details on specifying this option in the application code.
The unified container image from [Dockerhub](https://hub.docker.com/repository/docker/openvino/onnxruntime_ep_ubuntu18) can be used to run an application on any of the target accelerators. In order to select the target accelerator, the application should explicitly specifiy the choice using the `device_type` configuration option for OpenVINO Execution provider. Refer to [OpenVINO EP runtime configuration documentation](https://www.onnxruntime.ai/docs/reference/execution-providers/OpenVINO-ExecutionProvider.html#summary-of-options) for details on specifying this option in the application code.
If the `device_type` runtime config option is not explicitly specified, CPU will be chosen as the hardware target execution.
### **2. Building from Dockerfile**
@ -96,7 +96,7 @@ If the `device_type` runtime config option is not explicitly specified, CPU will
Retrieve your docker image in one of the following ways.
- Choose Dockerfile.openvino for Python API or Dockerfile.openvino-csharp for C# API as <Dockerfile> for building an OpenVINO 2021.3 based Docker image. Providing the docker build argument DEVICE enables the onnxruntime build for that particular device. You can also provide arguments ONNXRUNTIME_REPO and ONNXRUNTIME_BRANCH to test that particular repo and branch. Default repository is http://github.com/microsoft/onnxruntime and default branch is master.
- Choose Dockerfile.openvino for Python API or Dockerfile.openvino-csharp for C# API as <Dockerfile> for building an OpenVINO 2021.3 based Docker image. Providing the docker build argument DEVICE enables the onnxruntime build for that particular device. You can also provide arguments ONNXRUNTIME_REPO and ONNXRUNTIME_BRANCH to test that particular repo and branch. Default repository is http://github.com/microsoft/onnxruntime and default branch is main.
```
docker build --rm -t onnxruntime --build-arg DEVICE=$DEVICE -f <Dockerfile> .
```
@ -223,7 +223,7 @@ If the `device_type` runtime config option is not explicitly specified, CPU will
## ARM 32/64
The build instructions are similar to x86 CPU. But if you want to build them on a x86 machine, you need to install qemu-user-static system package (outside of docker instances) first. Then
1. Update submodules
```
git submodule update --init
@ -241,7 +241,7 @@ git submodule update --init
```
For ARM32, please use Dockerfile.arm32v7 instead of Dockerfile.arm64.
## NVIDIA Jetson TX1/TX2/Nano/Xavier:
These instructions are for [JetPack SDK 4.4](https://developer.nvidia.com/embedded/jetpack).
@ -299,7 +299,7 @@ git submodule update --init
docker run -it onnxruntime-nuphar
```
## MIGraphX
## MIGraphX
**Ubuntu 18.04, rocm4.5, AMDMIGraphX v1.2**
1. Build the docker image from the Dockerfile in this repository.

View file

@ -68,6 +68,6 @@ WORKDIR /workspace
# get ORT repo
ARG ONNXRUNTIME_REPO=https://github.com/microsoft/onnxruntime.git
ARG ONNXRUNTIME_BRANCH_OR_TAG=master
ARG ONNXRUNTIME_BRANCH_OR_TAG=main
RUN git clone --single-branch --branch=${ONNXRUNTIME_BRANCH_OR_TAG} --recurse-submodules ${ONNXRUNTIME_REPO} \
/workspace/onnxruntime