diff --git a/dockerfiles/Dockerfile.migraphx b/dockerfiles/Dockerfile.migraphx index 60d8eee6cf..19dac911a7 100644 --- a/dockerfiles/Dockerfile.migraphx +++ b/dockerfiles/Dockerfile.migraphx @@ -8,7 +8,7 @@ FROM ubuntu:18.04 ARG ONNXRUNTIME_REPO=https://github.com/Microsoft/onnxruntime -ARG ONNXRUNTIME_BRANCH=master +ARG ONNXRUNTIME_BRANCH=main ENV DEBIAN_FRONTEND noninteractive ENV MIGRAPHX_DISABLE_FAST_GELU=1 @@ -28,7 +28,7 @@ RUN apt-get update &&\ rocblas half aria2 libnuma-dev RUN aria2c -q -d /tmp -o cmake-3.21.0-linux-x86_64.tar.gz \ -https://github.com/Kitware/CMake/releases/download/v3.21.0/cmake-3.21.0-linux-x86_64.tar.gz &&\ +https://github.com/Kitware/CMake/releases/download/v3.21.0/cmake-3.21.0-linux-x86_64.tar.gz &&\ tar -zxf /tmp/cmake-3.21.0-linux-x86_64.tar.gz --strip=1 -C /usr # Install rbuild @@ -56,4 +56,3 @@ RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXR /bin/sh ./build.sh --cmake_extra_defines ONNXRUNTIME_VERSION=`cat ./VERSION_NUMBER` --config Release --parallel \ --skip_tests --build_wheel --use_rocm --rocm_version=4.5.2 --rocm_home /opt/rocm --use_migraphx &&\ pip install /code/onnxruntime/build/Linux/Release/dist/*.whl - diff --git a/dockerfiles/Dockerfile.nuphar b/dockerfiles/Dockerfile.nuphar index 2677f08267..8f2de96246 100644 --- a/dockerfiles/Dockerfile.nuphar +++ b/dockerfiles/Dockerfile.nuphar @@ -7,7 +7,7 @@ FROM ubuntu:18.04 ARG PYTHON_VERSION=3.6 ARG ONNXRUNTIME_REPO=https://github.com/Microsoft/onnxruntime -ARG ONNXRUNTIME_SERVER_BRANCH=master +ARG ONNXRUNTIME_SERVER_BRANCH=main ENV DEBIAN_FRONTEND noninteractive diff --git a/dockerfiles/Dockerfile.openvino b/dockerfiles/Dockerfile.openvino index a3647747e5..524cfeb86c 100644 --- a/dockerfiles/Dockerfile.openvino +++ b/dockerfiles/Dockerfile.openvino @@ -13,21 +13,21 @@ ENV WORKDIR_PATH=/home/openvino WORKDIR $WORKDIR_PATH ENV DEBIAN_FRONTEND noninteractive -ARG DEVICE=CPU_FP32 +ARG DEVICE=CPU_FP32 ARG ONNXRUNTIME_REPO=https://github.com/microsoft/onnxruntime.git -ARG ONNXRUNTIME_BRANCH=master +ARG ONNXRUNTIME_BRANCH=main ENV InferenceEngine_DIR=${INTEL_OPENVINO_DIR}/runtime/cmake USER root RUN apt update; apt install -y git protobuf-compiler libprotobuf-dev -RUN git clone --recursive -b ${ONNXRUNTIME_BRANCH} ${ONNXRUNTIME_REPO} +RUN git clone --recursive -b ${ONNXRUNTIME_BRANCH} ${ONNXRUNTIME_REPO} RUN /bin/sh onnxruntime/dockerfiles/scripts/install_common_deps.sh RUN ln -s cmake-* cmake-dir RUN python3 -m pip install wheel ENV PATH=${WORKDIR_PATH}/cmake-dir/bin:$PATH RUN pip3 install onnx -RUN cd onnxruntime && ./build.sh --config Release --update --build --parallel --use_openvino ${DEVICE} --build_shared_lib --build_wheel +RUN cd onnxruntime && ./build.sh --config Release --update --build --parallel --use_openvino ${DEVICE} --build_shared_lib --build_wheel #Steps to download sources RUN cat /etc/apt/sources.list | sed 's/^# deb-src/deb-src/g' > ./temp; mv temp /etc/apt/sources.list diff --git a/dockerfiles/Dockerfile.openvino-centos7 b/dockerfiles/Dockerfile.openvino-centos7 index e500f3ce5a..4644187e53 100755 --- a/dockerfiles/Dockerfile.openvino-centos7 +++ b/dockerfiles/Dockerfile.openvino-centos7 @@ -11,7 +11,7 @@ ARG MY_ROOT=/code ARG YUM_OV_PACKAGE=intel-openvino-runtime-centos7-2021.4.752.x86_64 ARG DEVICE=CPU_FP32 ARG ONNXRUNTIME_REPO=https://github.com/microsoft/onnxruntime -ARG ONNXRUNTIME_BRANCH=master +ARG ONNXRUNTIME_BRANCH=main ENV INTEL_OPENVINO_DIR=/opt/intel/openvino_2021.4.752 ENV InferenceEngine_DIR=${INTEL_OPENVINO_DIR}/deployment_tools/inference_engine/share @@ -24,7 +24,7 @@ ENV HDDL_INSTALL_DIR=${INTEL_OPENVINO_DIR}/deployment_tools/inference_engine/ext ENV LD_LIBRARY_PATH=${INTEL_OPENVINO_DIR}/deployment_tools/inference_engine/external/hddl/lib:$LD_LIBRARY_PATH ENV LD_LIBRARY_PATH=/usr/local/lib:/usr/lib:/usr/local/lib64:/usr/lib64:/lib64:$LD_LIBRARY_PATH -# Install packages +# Install packages RUN yum update -y && \ yum groupinstall "Development Tools" -y && \ yum install -y yum-utils autoconf automake libtool unzip udev wget zlib-devel libffi-devel openssl-devel boost-devel-1.53.0 && \ @@ -42,7 +42,7 @@ RUN yum update -y && \ cd /opt/ && wget https://github.com/libusb/libusb/archive/v1.0.22.zip && \ unzip v1.0.22.zip && rm -rf v1.0.22.zip && cd /opt/libusb-1.0.22 && \ # bootstrap steps - ./bootstrap.sh && \ + ./bootstrap.sh && \ ./configure --disable-udev --enable-shared && \ make -j4 && \ # configure libusb1.0.22 @@ -64,7 +64,7 @@ RUN yum update -y && \ cp /opt/intel/openvino_2021/deployment_tools/inference_engine/external/97-myriad-usbboot.rules /etc/udev/rules.d/ && \ ldconfig && \ # Install GPU runtime and drivers - cd ${MY_ROOT} && \ + cd ${MY_ROOT} && \ mkdir /tmp/opencl && \ cd /tmp/opencl && \ yum install -y epel-release && \ @@ -87,8 +87,8 @@ RUN yum update -y && \ cd /code/ && \ wget https://www.python.org/ftp/python/3.8.3/Python-3.8.3.tgz && tar xvf Python-3.8.3.tgz && \ cd Python-3.8*/ && ./configure && make && make install && \ - cd ../ && mkdir -p /usr/bin/Python38 && ln -s Python-3.8.3/ /usr/bin/Python38 && \ -# installing dependancies + cd ../ && mkdir -p /usr/bin/Python38 && ln -s Python-3.8.3/ /usr/bin/Python38 && \ +# installing dependancies yum install -y python3-lxml python3-six libusb.x86_64 && \ yum clean packages && yum clean all && rm -rf /var/cache/yum && \ # Build onnxruntime @@ -102,4 +102,4 @@ RUN yum update -y && \ cd $MY_ROOT && rm -rf onnxruntime Python-3* && \ cd ${MY_ROOT}/ && rm -rf cmake* && \ cd /usr/share/ && rm -rf gcc* && cd /usr/lib/ && rm -rf gcc cd && rm -rf .cache && \ - cd ${INTEL_OPENVINO_DIR}/ && rm -rf documentation data_processing && cd deployment_tools/ && rm -rf tools \ No newline at end of file + cd ${INTEL_OPENVINO_DIR}/ && rm -rf documentation data_processing && cd deployment_tools/ && rm -rf tools diff --git a/dockerfiles/Dockerfile.openvino-csharp b/dockerfiles/Dockerfile.openvino-csharp index 1daaa7c945..6f98b156a7 100644 --- a/dockerfiles/Dockerfile.openvino-csharp +++ b/dockerfiles/Dockerfile.openvino-csharp @@ -7,7 +7,7 @@ FROM ubuntu:18.04 ARG DEVICE=CPU_FP32 ARG ONNXRUNTIME_REPO=https://github.com/microsoft/onnxruntime.git -ARG ONNXRUNTIME_BRANCH=master +ARG ONNXRUNTIME_BRANCH=main WORKDIR /code ARG MY_ROOT=/code diff --git a/dockerfiles/Dockerfile.rocm b/dockerfiles/Dockerfile.rocm index 68412b0104..19b65d3e38 100644 --- a/dockerfiles/Dockerfile.rocm +++ b/dockerfiles/Dockerfile.rocm @@ -8,7 +8,7 @@ FROM ubuntu:18.04 ARG ONNXRUNTIME_REPO=https://github.com/Microsoft/onnxruntime -ARG ONNXRUNTIME_BRANCH=master +ARG ONNXRUNTIME_BRANCH=main ENV DEBIAN_FRONTEND noninteractive RUN apt-get clean && apt-get update && apt-get install -y locales @@ -23,7 +23,7 @@ RUN apt-get update && apt-get install -y gnupg2 --no-install-recommends curl && sh -c 'echo deb [arch=amd64] http://repo.radeon.com/rocm/apt/4.5.2/ ubuntu main > /etc/apt/sources.list.d/rocm.list' RUN apt-get update &&\ - apt-get install -y sudo git bash build-essential rocm-dev libpython3.6-dev python3-pip miopen-hip rocblas half aria2 libnuma-dev + apt-get install -y sudo git bash build-essential rocm-dev libpython3.6-dev python3-pip miopen-hip rocblas half aria2 libnuma-dev RUN aria2c -q -d /tmp -o cmake-3.20.0-linux-x86_64.tar.gz \ https://github.com/Kitware/CMake/releases/download/v3.20.0/cmake-3.20.0-linux-x86_64.tar.gz &&\ @@ -48,4 +48,3 @@ RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXR ONNXRUNTIME_VERSION=$(cat ./VERSION_NUMBER) --use_rocm --rocm_home=/opt/rocm &&\ pip install /code/onnxruntime/build/Linux/Release/dist/*.whl &&\ cd .. - diff --git a/dockerfiles/Dockerfile.tensorrt b/dockerfiles/Dockerfile.tensorrt index 47a47fc61d..8c31b999b6 100644 --- a/dockerfiles/Dockerfile.tensorrt +++ b/dockerfiles/Dockerfile.tensorrt @@ -9,7 +9,7 @@ ARG TRT_CONTAINER_VERSION=21.12 FROM nvcr.io/nvidia/tensorrt:${TRT_CONTAINER_VERSION}-py3 ARG ONNXRUNTIME_REPO=https://github.com/Microsoft/onnxruntime -ARG ONNXRUNTIME_BRANCH=master +ARG ONNXRUNTIME_BRANCH=main ARG CMAKE_CUDA_ARCHITECTURES=37;50;52;60;61;70;75;80 RUN apt-get update &&\ @@ -27,4 +27,4 @@ RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXR cd onnxruntime &&\ /bin/sh build.sh --parallel --build_shared_lib --cuda_home /usr/local/cuda --cudnn_home /usr/lib/x86_64-linux-gnu/ --use_tensorrt --tensorrt_home /usr/lib/x86_64-linux-gnu/ --config Release --build_wheel --skip_tests --skip_submodule_sync --cmake_extra_defines '"CMAKE_CUDA_ARCHITECTURES='${CMAKE_CUDA_ARCHITECTURES}'"' &&\ pip install /code/onnxruntime/build/Linux/Release/dist/*.whl &&\ - cd .. + cd .. diff --git a/dockerfiles/Dockerfile.vitisai b/dockerfiles/Dockerfile.vitisai index 7dcbaa403a..1cc73bb1e2 100644 --- a/dockerfiles/Dockerfile.vitisai +++ b/dockerfiles/Dockerfile.vitisai @@ -7,7 +7,7 @@ FROM xilinx/vitis-ai-cpu:1.3.598 ARG ONNXRUNTIME_REPO=https://github.com/Microsoft/onnxruntime -ARG ONNXRUNTIME_BRANCH=master +ARG ONNXRUNTIME_BRANCH=main ARG PYXIR_REPO=https://github.com/Xilinx/pyxir ARG PYXIR_BRANCH=v0.2.0 diff --git a/dockerfiles/README.md b/dockerfiles/README.md index 534b4bb668..7d8d661ad4 100644 --- a/dockerfiles/README.md +++ b/dockerfiles/README.md @@ -10,7 +10,7 @@ **Platforms** - ARM 32v7: [Dockerfile](Dockerfile.arm32v7), [Instructions](#arm-3264) -- ARM 64: [Dockerfile](Dockerfile.arm64), [Instructions](#arm-3264) +- ARM 64: [Dockerfile](Dockerfile.arm64), [Instructions](#arm-3264) - NVIDIA Jetson TX1/TX2/Nano/Xavier: [Dockerfile](Dockerfile.jetson), [Instructions](#nvidia-jetson-tx1tx2nanoxavier) **Other** @@ -88,7 +88,7 @@ git submodule update --init ### **1. Using pre-built container images for Python API** -The unified container image from [Dockerhub](https://hub.docker.com/repository/docker/openvino/onnxruntime_ep_ubuntu18) can be used to run an application on any of the target accelerators. In order to select the target accelerator, the application should explicitly specifiy the choice using the `device_type` configuration option for OpenVINO Execution provider. Refer to [OpenVINO EP runtime configuration documentation](https://www.onnxruntime.ai/docs/reference/execution-providers/OpenVINO-ExecutionProvider.html#summary-of-options) for details on specifying this option in the application code. +The unified container image from [Dockerhub](https://hub.docker.com/repository/docker/openvino/onnxruntime_ep_ubuntu18) can be used to run an application on any of the target accelerators. In order to select the target accelerator, the application should explicitly specifiy the choice using the `device_type` configuration option for OpenVINO Execution provider. Refer to [OpenVINO EP runtime configuration documentation](https://www.onnxruntime.ai/docs/reference/execution-providers/OpenVINO-ExecutionProvider.html#summary-of-options) for details on specifying this option in the application code. If the `device_type` runtime config option is not explicitly specified, CPU will be chosen as the hardware target execution. ### **2. Building from Dockerfile** @@ -96,7 +96,7 @@ If the `device_type` runtime config option is not explicitly specified, CPU will Retrieve your docker image in one of the following ways. - - Choose Dockerfile.openvino for Python API or Dockerfile.openvino-csharp for C# API as for building an OpenVINO 2021.3 based Docker image. Providing the docker build argument DEVICE enables the onnxruntime build for that particular device. You can also provide arguments ONNXRUNTIME_REPO and ONNXRUNTIME_BRANCH to test that particular repo and branch. Default repository is http://github.com/microsoft/onnxruntime and default branch is master. + - Choose Dockerfile.openvino for Python API or Dockerfile.openvino-csharp for C# API as for building an OpenVINO 2021.3 based Docker image. Providing the docker build argument DEVICE enables the onnxruntime build for that particular device. You can also provide arguments ONNXRUNTIME_REPO and ONNXRUNTIME_BRANCH to test that particular repo and branch. Default repository is http://github.com/microsoft/onnxruntime and default branch is main. ``` docker build --rm -t onnxruntime --build-arg DEVICE=$DEVICE -f . ``` @@ -223,7 +223,7 @@ If the `device_type` runtime config option is not explicitly specified, CPU will ## ARM 32/64 The build instructions are similar to x86 CPU. But if you want to build them on a x86 machine, you need to install qemu-user-static system package (outside of docker instances) first. Then - + 1. Update submodules ``` git submodule update --init @@ -241,7 +241,7 @@ git submodule update --init ``` For ARM32, please use Dockerfile.arm32v7 instead of Dockerfile.arm64. - + ## NVIDIA Jetson TX1/TX2/Nano/Xavier: These instructions are for [JetPack SDK 4.4](https://developer.nvidia.com/embedded/jetpack). @@ -299,7 +299,7 @@ git submodule update --init docker run -it onnxruntime-nuphar ``` -## MIGraphX +## MIGraphX **Ubuntu 18.04, rocm4.5, AMDMIGraphX v1.2** 1. Build the docker image from the Dockerfile in this repository. diff --git a/tools/android_custom_build/Dockerfile b/tools/android_custom_build/Dockerfile index d119514c92..23ce6f8fc7 100644 --- a/tools/android_custom_build/Dockerfile +++ b/tools/android_custom_build/Dockerfile @@ -68,6 +68,6 @@ WORKDIR /workspace # get ORT repo ARG ONNXRUNTIME_REPO=https://github.com/microsoft/onnxruntime.git -ARG ONNXRUNTIME_BRANCH_OR_TAG=master +ARG ONNXRUNTIME_BRANCH_OR_TAG=main RUN git clone --single-branch --branch=${ONNXRUNTIME_BRANCH_OR_TAG} --recurse-submodules ${ONNXRUNTIME_REPO} \ /workspace/onnxruntime