mirror of
https://github.com/saymrwulf/onnxruntime.git
synced 2026-05-16 21:00:14 +00:00
* Enabling fasterrcnn variant and vehicle detector * changes for 2021_2 branch * yolov3_pytorch commit * fixed braces in basic_backend.cc * ci information added * faster rcnn variant and vehicle detector changes were made in 2021.1 and not in 2021.2 * some changes to support unit tests * disable some tests which are failing * fix myriad tests for vehicle detector * Did some cleanup *cleaned up comments *Disabled Add_Broadcast_0x1 and Add_Broadcast_1x0 tests on MYRIAD_FP16 backend due to a bug *cleaned up capability_2021_2.cc file *Removed extra conditions which were added for some validation in backend_utils Signed-off-by: MaajidKhan <n.maajidkhan@gmail.com> * yolov3 pytorch workaround to ensure that the output names are matched * gemmoptest fixed on myriad * Fixed MYRIADX CPP Test Failures *Expand,GatherND,Range,Round op's are only supported in model *where op with float input data types are not supported and fixed *Scatter and ScatterElements op's with negative axis are fixed *Reshape op with 0 dim value are not supported and fixed *Disabled InstanceNorm_2 test on MYRIADX Signed-off-by: MaajidKhan <n.maajidkhan@gmail.com> * make changes to yolov3 pytorch * Fixed python unit tests *Fixed failing python tests on vpu, GPU and CPU Signed-off-by: MaajidKhan <n.maajidkhan@gmail.com> * Fixes POW op failures on GPU_FP16 Signed-off-by: MaajidKhan <n.maajidkhan@gmail.com> * Clean up capability_2021_2.cc Signed-off-by: MaajidKhan <n.maajidkhan@gmail.com> * Updated docx for MultiThreading option *Added extra info on setting the num_of_threads option using the API and it's actual usage Signed-off-by: MaajidKhan <n.maajidkhan@gmail.com> * fixed slice and removed extra prints * Disabled failing python tests Signed-off-by: MaajidKhan <n.maajidkhan@gmail.com> * Minor changes added in capabilty_2021_2 Signed-off-by: MaajidKhan <n.maajidkhan@gmail.com> * made changes to slice to avoid failures * Disabling FP16 support for GPU_FP32 ->Inferencing an FP16 model on GPU_FP32 leads to accuracy mismatches. so, we would rather use GPU_FP16 to infer an FP16 model on GPU Device Signed-off-by: MaajidKhan <n.maajidkhan@gmail.com> * Updated docx for Inferencing a FP16 Model Signed-off-by: MaajidKhan <n.maajidkhan@gmail.com> * fix for mask rcnn * Script for installing openvino from source * Updated with openvino 2021.2 online installation * code comment fixes fixed accuracy mismatch for div * Update OpenvinoEP-ExecutionProvider.md updated for 2021.2 branch * Update README.md updated dockerfile documentation * Update BUILD.md build.md update documentation * permissiong change of install_openvino.sh * made changes to align with microsoft onnxruntime changes * Updated with ov 2021.2.200 Co-authored-by: suryasidd <surya.siddharth.pemmaraju@intel.com> Co-authored-by: sfatimar <sahar.fatima@intel/com> Co-authored-by: MaajidKhan <n.maajidkhan@gmail.com> Co-authored-by: mohdansx <mohdx.ansari@intel.com>
90 lines
5.4 KiB
Text
90 lines
5.4 KiB
Text
#-------------------------------------------------------------------------
|
|
# Copyright(C) 2019 Intel Corporation.
|
|
# Licensed under the MIT License.
|
|
#--------------------------------------------------------------------------
|
|
|
|
FROM ubuntu:18.04
|
|
|
|
ARG DEVICE=CPU_FP32
|
|
ARG ONNXRUNTIME_REPO=https://github.com/microsoft/onnxruntime.git
|
|
ARG ONNXRUNTIME_BRANCH=master
|
|
|
|
WORKDIR /code
|
|
ARG MY_ROOT=/code
|
|
|
|
ENV PATH /opt/miniconda/bin:/code/cmake-3.14.3-Linux-x86_64/bin:$PATH
|
|
ENV LD_LIBRARY_PATH=/opt/miniconda/lib:/usr/lib:/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH
|
|
|
|
ENV INTEL_OPENVINO_DIR=/opt/intel/openvino_2021.2.200
|
|
ENV InferenceEngine_DIR=${INTEL_OPENVINO_DIR}/deployment_tools/inference_engine/share
|
|
ENV IE_PLUGINS_PATH=${INTEL_OPENVINO_DIR}/deployment_tools/inference_engine/lib/intel64
|
|
ENV LD_LIBRARY_PATH=/opt/intel/opencl:${INTEL_OPENVINO_DIR}/inference_engine/external/gna/lib:${INTEL_OPENVINO_DIR}/deployment_tools/inference_engine/external/mkltiny_lnx/lib:$INTEL_OPENVINO_DIR/deployment_tools/ngraph/lib:${INTEL_OPENVINO_DIR}/deployment_tools/inference_engine/external/omp/lib:${INTEL_OPENVINO_DIR}/deployment_tools/inference_engine/external/tbb/lib:${IE_PLUGINS_PATH}:${LD_LIBRARY_PATH}
|
|
ENV OpenCV_DIR=${INTEL_OPENVINO_DIR}/opencv/share/OpenCV
|
|
ENV LD_LIBRARY_PATH=${INTEL_OPENVINO_DIR}/opencv/lib:${INTEL_OPENVINO_DIR}/opencv/share/OpenCV/3rdparty/lib:${LD_LIBRARY_PATH}
|
|
ENV HDDL_INSTALL_DIR=${INTEL_OPENVINO_DIR}/deployment_tools/inference_engine/external/hddl
|
|
ENV LD_LIBRARY_PATH=${INTEL_OPENVINO_DIR}/deployment_tools/inference_engine/external/hddl/lib:$LD_LIBRARY_PATH
|
|
ENV LANG en_US.UTF-8
|
|
ENV DEBIAN_FRONTEND=noninteractive
|
|
|
|
RUN apt update && \
|
|
apt -y install apt-transport-https ca-certificates python3 python3-pip curl zip x11-apps lsb-core wget cpio sudo libboost-python-dev libpng-dev zlib1g-dev git libnuma1 ocl-icd-libopencl1 clinfo libboost-filesystem1.65-dev libboost-thread1.65-dev protobuf-compiler libprotoc-dev autoconf automake libtool libjson-c-dev unattended-upgrades && \
|
|
unattended-upgrade && \
|
|
rm -rf /var/lib/apt/lists/* && \
|
|
# Install OpenVINO
|
|
cd ${MY_ROOT} && \
|
|
wget https://apt.repos.intel.com/openvino/2021/GPG-PUB-KEY-INTEL-OPENVINO-2021 && \
|
|
apt-key add GPG-PUB-KEY-INTEL-OPENVINO-2021 && rm GPG-PUB-KEY-INTEL-OPENVINO-2021 && \
|
|
cd /etc/apt/sources.list.d && \
|
|
echo "deb https://apt.repos.intel.com/openvino/2021 all main">intel-openvino-2021.list && \
|
|
apt update && \
|
|
apt -y install intel-openvino-dev-ubuntu18-2021.2.200 && \
|
|
cd ${INTEL_OPENVINO_DIR}/install_dependencies && ./install_openvino_dependencies.sh && \
|
|
cd ${INTEL_OPENVINO_DIR} && rm -rf documentation data_processing && cd deployment_tools/ && rm -rf model_optimizer tools open_model_zoo demo && cd inference_engine && rm -rf samples && \
|
|
# Install GPU runtime and drivers
|
|
cd ${MY_ROOT} && \
|
|
mkdir /tmp/opencl && \
|
|
cd /tmp/opencl && \
|
|
apt update && \
|
|
apt install -y --no-install-recommends ocl-icd-libopencl1 && \
|
|
rm -rf /var/lib/apt/lists/* && \
|
|
wget "https://github.com/intel/compute-runtime/releases/download/19.41.14441/intel-gmmlib_19.3.2_amd64.deb" && \
|
|
wget "https://github.com/intel/compute-runtime/releases/download/19.41.14441/intel-igc-core_1.0.2597_amd64.deb" && \
|
|
wget "https://github.com/intel/compute-runtime/releases/download/19.41.14441/intel-igc-opencl_1.0.2597_amd64.deb" && \
|
|
wget "https://github.com/intel/compute-runtime/releases/download/19.41.14441/intel-opencl_19.41.14441_amd64.deb" && \
|
|
wget "https://github.com/intel/compute-runtime/releases/download/19.41.14441/intel-ocloc_19.41.14441_amd64.deb" && \
|
|
dpkg -i /tmp/opencl/*.deb && \
|
|
ldconfig && \
|
|
rm -rf /tmp/opencl && \
|
|
# Install Mono
|
|
cd ${MY_ROOT} && \
|
|
apt install -y gnupg ca-certificates && \
|
|
#apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF && \
|
|
curl http://download.mono-project.com/repo/xamarin.gpg | apt-key add - && \
|
|
echo "deb https://download.mono-project.com/repo/ubuntu stable-bionic main" | sudo tee /etc/apt/sources.list.d/mono-official-stable.list && \
|
|
apt update && \
|
|
apt install -y mono-devel && \
|
|
# Install nuget.exe
|
|
wget https://dist.nuget.org/win-x86-commandline/latest/nuget.exe && \
|
|
mv nuget.exe /usr/local/bin/nuget.exe && \
|
|
echo 'mono /usr/local/bin/nuget.exe $@' > /usr/local/bin/nuget && \
|
|
chmod a+x /usr/local/bin/nuget && \
|
|
# Install .NET core
|
|
wget https://packages.microsoft.com/config/ubuntu/18.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb && \
|
|
dpkg -i packages-microsoft-prod.deb && \
|
|
apt-get update &&\
|
|
apt-get install -y apt-transport-https && \
|
|
apt-get update && \
|
|
apt-get install -y dotnet-sdk-3.1 && \
|
|
# Download and build ONNX Runtime
|
|
cd ${MY_ROOT} && \
|
|
git clone --recursive -b ${ONNXRUNTIME_BRANCH} ${ONNXRUNTIME_REPO} && \
|
|
/bin/sh onnxruntime/dockerfiles/scripts/install_common_deps.sh && \
|
|
cd onnxruntime/cmake/external/onnx && python3 setup.py install && \
|
|
cd ${MY_ROOT}/onnxruntime && ./build.sh --config Release --update --build --parallel --use_openvino ${DEVICE} --build_nuget && \
|
|
mv ${MY_ROOT}/onnxruntime/build/Linux/Release/nuget-artifacts ${MY_ROOT} && \
|
|
# Clean-up unnecessary files
|
|
rm -rf ${MY_ROOT}/cmake* /opt/cmake ${MY_ROOT}/onnxruntime && \
|
|
rm -rf /opt/miniconda && \
|
|
rm -rf /opt/intel/openvino/data_processing && \
|
|
rm -rf /opt/intel/openvino/deployment_tools/tools && \
|
|
apt remove -y git && apt autoremove -y
|