cyh 1 rok temu
commit
b94b6be82f
6 zmienionych plików z 327 dodań i 0 usunięć
  1. 44 0
      Dockerfile.2111
  2. 49 0
      Dockerfile.2204
  3. 55 0
      Dockerfile.2204cu101
  4. 53 0
      Dockerfile.2204cu115trt
  5. 78 0
      Dockerfile.2204tf
  6. 48 0
      Dockerfile.2204trt

+ 44 - 0
Dockerfile.2111

@@ -0,0 +1,44 @@
+FROM nvidia/cuda:10.2-cudnn7-devel-ubuntu18.04
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN apt-get update; apt-get install -y git curl
+
+RUN curl https://repo.anaconda.com/miniconda/Miniconda3-py38_4.12.0-Linux-x86_64.sh -o /tmp/Miniconda3-py38_4.12.0-Linux-x86_64.sh && bash /tmp/Miniconda3-py38_4.12.0-Linux-x86_64.sh -b -p /opt/miniconda
+
+RUN apt-get install -y libboost-dev libre2-dev rapidjson-dev libnuma-dev libssl-dev libb64-dev libarchive-dev libzip-dev
+
+RUN cd /tmp && git clone https://github.com/triton-inference-server/server.git -b r21.11 --recursive
+
+RUN curl -L https://github.com/Kitware/CMake/releases/download/v3.24.3/cmake-3.24.3-linux-x86_64.tar.gz  | tar -xz -C /tmp
+
+ENV PATH=$PATH:/tmp/cmake-3.24.3-linux-x86_64/bin:/opt/miniconda/bin/
+
+RUN cd /tmp/server && python build.py --build-dir=/tmp/build --enable-gpu --no-container-source --no-container-build --cmake-dir=`pwd`/build --enable-logging --endpoint=http --endpoint=grpc --backend=python
+
+RUN cd /tmp && git clone https://github.com/microsoft/onnxruntime.git -b rel-1.9.0 --recursive
+
+ENV CUDA_VERSION=10.2
+
+RUN cd /tmp/onnxruntime && ./build.sh --config Release --build_shared_lib --parallel --skip_tests --skip_onnx_tests --use_cuda --cuda_version=$CUDA_VERSION --cuda_home=/usr/local/cuda-$CUDA_VERSION --cudnn_home=/usr/local/cuda-$CUDA_VERSION  --cmake_extra_defines 'CMAKE_CUDA_ARCHITECTURES=61;70'
+
+RUN cd /tmp/onnxruntime/build/Linux/Release/ && make install
+
+RUN cd /tmp && git clone https://github.com/triton-inference-server/onnxruntime_backend.git -b r21.11 --recursive
+
+RUN cd /tmp/onnxruntime_backend && mkdir build && cd build && cmake -DCMAKE_INSTALL_PREFIX:PATH=`pwd`/install -DTRITON_ENABLE_GPU=ON -DTRITON_BUILD_CONTAINER_VERSION=21.11 -DTRITON_ONNXRUNTIME_DOCKER_BUILD=OFF -DTRITON_ONNXRUNTIME_LIB_PATHS=/usr/local/lib/ -DTRITON_BUILD_ONNXRUNTIME_VERSION=1.9.0 -DTRITON_BACKEND_REPO_TAG=r21.11 -DTRITON_CORE_REPO_TAG=r21.11 -DTRITON_COMMON_REPO_TAG=r21.11 ..
+
+ENV C_INCLUDE_PATH=$C_INCLUDE_PATH:/usr/local/include/onnxruntime/core/session
+ENV CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:/usr/local/include/onnxruntime/core/session
+
+RUN cd /tmp/onnxruntime_backend/build && make -j && make install
+
+RUN cp -r /tmp/build/tritonserver/install /opt/tritonserver && cp -r /tmp/build/python/install/backends /opt/tritonserver && cp -r /tmp/onnxruntime_backend/build/install/backends/onnxruntime /opt/tritonserver/backends
+
+RUN cd /tmp && rm -rf server build Miniconda3-py38_4.12.0-Linux-x86_64.sh onnxruntime onnxruntime_backend cmake-3.24.3-linux-x86_64
+
+ENV PATH=$PATH:/opt/tritonserver/bin
+ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib
+
+EXPOSE 8000
+EXPOSE 8001

+ 49 - 0
Dockerfile.2204

@@ -0,0 +1,49 @@
+FROM nvidia/cuda:10.2-cudnn7-devel-ubuntu18.04 as builder
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN apt-get update; apt-get install -y git curl
+
+RUN curl https://repo.anaconda.com/miniconda/Miniconda3-py38_4.12.0-Linux-x86_64.sh -o /tmp/Miniconda3-py38_4.12.0-Linux-x86_64.sh && bash /tmp/Miniconda3-py38_4.12.0-Linux-x86_64.sh -b -p /opt/miniconda
+
+RUN apt-get install -y libboost-dev libre2-dev rapidjson-dev libnuma-dev libssl-dev libb64-dev libarchive-dev libzip-dev
+
+RUN cd /tmp && git clone https://github.com/triton-inference-server/server.git -b r22.04 --recursive
+
+RUN curl -L https://github.com/Kitware/CMake/releases/download/v3.24.3/cmake-3.24.3-linux-x86_64.tar.gz  | tar -xz -C /tmp
+
+ENV PATH=$PATH:/tmp/cmake-3.24.3-linux-x86_64/bin:/opt/miniconda/bin/
+
+RUN cd /tmp/server && python build.py --build-dir=/tmp/build --enable-gpu --no-container-source --no-container-build --enable-logging --endpoint=http --endpoint=grpc --backend=python
+
+RUN cd /tmp && git clone https://github.com/microsoft/onnxruntime.git -b rel-1.10.0 --recursive
+
+ENV CUDA_VERSION=10.2
+
+RUN cd /tmp/onnxruntime && ./build.sh --config Release --build_shared_lib --parallel --skip_tests --skip_onnx_tests --use_cuda --cuda_version=$CUDA_VERSION --cuda_home=/usr/local/cuda-$CUDA_VERSION --cudnn_home=/usr/local/cuda-$CUDA_VERSION  --cmake_extra_defines 'CMAKE_CUDA_ARCHITECTURES=61;70'
+
+RUN cd /tmp/onnxruntime/build/Linux/Release/ && make install
+
+RUN cd /tmp && git clone https://github.com/triton-inference-server/onnxruntime_backend.git -b r22.04 --recursive
+
+RUN cd /tmp/onnxruntime_backend && mkdir build && cd build && cmake -DCMAKE_INSTALL_PREFIX:PATH=`pwd`/install -DTRITON_ENABLE_GPU=ON -DTRITON_BUILD_CONTAINER_VERSION=22.04 -DTRITON_ONNXRUNTIME_DOCKER_BUILD=OFF -DTRITON_ONNXRUNTIME_LIB_PATHS=/usr/local/lib/ -DTRITON_BUILD_ONNXRUNTIME_VERSION=1.10.0 -DTRITON_BACKEND_REPO_TAG=r22.04 -DTRITON_CORE_REPO_TAG=r22.04 -DTRITON_COMMON_REPO_TAG=r22.04 ..
+
+ENV C_INCLUDE_PATH=$C_INCLUDE_PATH:/usr/local/include/onnxruntime/core/session
+ENV CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:/usr/local/include/onnxruntime/core/session
+
+RUN cd /tmp/onnxruntime_backend/build && make -j && make install
+
+RUN cp -r /tmp/build/tritonserver/install /opt/tritonserver && cp -r /tmp/build/python/install/backends /opt/tritonserver && cp -r /tmp/onnxruntime_backend/build/install/backends/onnxruntime /opt/tritonserver/backends
+
+RUN cp /usr/local/lib/libonnxruntime* /opt/tritonserver/backends/onnxruntime
+
+FROM nvidia/cuda:10.2-cudnn7-runtime-ubuntu18.04
+
+COPY --from=builder /opt /opt
+
+RUN apt-get update; apt-get install -y --no-install-recommends libre2-4 rapidjson-dev libnuma1 libssl1.1 libb64-0d libarchive13 libicu60 liblzo2-2 libxml2 libzip4 curl
+
+ENV PATH=$PATH:/opt/tritonserver/bin
+
+EXPOSE 8000
+EXPOSE 8001

+ 55 - 0
Dockerfile.2204cu101

@@ -0,0 +1,55 @@
+FROM nvidia/cuda:10.1-cudnn7-devel-ubuntu18.04 as builder
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN apt-get update; apt-get install -y git curl
+
+RUN curl https://repo.anaconda.com/miniconda/Miniconda3-py38_4.12.0-Linux-x86_64.sh -o /tmp/Miniconda3-py38_4.12.0-Linux-x86_64.sh && bash /tmp/Miniconda3-py38_4.12.0-Linux-x86_64.sh -b -p /opt/miniconda
+
+RUN apt-get install -y libboost-dev libre2-dev rapidjson-dev libnuma-dev libssl-dev libb64-dev libarchive-dev libzip-dev
+
+RUN cd /tmp && git clone https://github.com/triton-inference-server/server.git -b r22.04 --recursive
+
+RUN curl -L https://github.com/Kitware/CMake/releases/download/v3.24.3/cmake-3.24.3-linux-x86_64.tar.gz  | tar -xz -C /tmp
+
+ENV PATH=$PATH:/tmp/cmake-3.24.3-linux-x86_64/bin:/opt/miniconda/bin/
+
+RUN cd /tmp/server && python build.py --build-dir=/tmp/build --enable-gpu --no-container-source --no-container-build --enable-logging --endpoint=http --endpoint=grpc
+
+RUN cd /tmp && git clone https://github.com/microsoft/onnxruntime.git -b rel-1.10.0 --recursive
+
+ENV CUDA_VERSION=10.1
+
+RUN cd /tmp/onnxruntime && ./build.sh --config Release --build_shared_lib --parallel --skip_tests --skip_onnx_tests --use_cuda --cuda_version=$CUDA_VERSION --cuda_home=/usr/local/cuda-$CUDA_VERSION --cudnn_home=/usr/local/cuda-$CUDA_VERSION  --cmake_extra_defines 'CMAKE_CUDA_ARCHITECTURES=61;70'
+
+RUN cd /tmp/onnxruntime/build/Linux/Release/ && make install
+
+RUN cd /tmp && git clone https://github.com/triton-inference-server/onnxruntime_backend.git -b r22.04 --recursive
+
+RUN cd /tmp/onnxruntime_backend && mkdir build && cd build && cmake -DCMAKE_INSTALL_PREFIX:PATH=`pwd`/install -DTRITON_ENABLE_GPU=ON -DTRITON_BUILD_CONTAINER_VERSION=22.04 -DTRITON_ONNXRUNTIME_DOCKER_BUILD=OFF -DTRITON_ONNXRUNTIME_LIB_PATHS=/usr/local/lib/ -DTRITON_BUILD_ONNXRUNTIME_VERSION=1.10.0 -DTRITON_BACKEND_REPO_TAG=r22.04 -DTRITON_CORE_REPO_TAG=r22.04 -DTRITON_COMMON_REPO_TAG=r22.04 ..
+
+ENV C_INCLUDE_PATH=$C_INCLUDE_PATH:/usr/local/include/onnxruntime/core/session
+ENV CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:/usr/local/include/onnxruntime/core/session
+
+RUN cd /tmp/onnxruntime_backend/build && make -j && make install
+
+RUN cd /tmp && git clone https://github.com/triton-inference-server/python_backend.git -b r22.04 --recursive
+
+RUN cd /tmp/python_backend && mkdir build && cd build && cmake -DCMAKE_INSTALL_PREFIX:PATH=`pwd`/install -DTRITON_ENABLE_GPU=OFF -DTRITON_BUILD_CONTAINER_VERSION=22.04 -DTRITON_BACKEND_REPO_TAG=r22.04 -DTRITON_CORE_REPO_TAG=r22.04 -DTRITON_COMMON_REPO_TAG=r22.04 ..
+
+RUN cd /tmp/python_backend/build && make -j && make install
+
+RUN cp -r /tmp/build/tritonserver/install /opt/tritonserver && cp -r /tmp/python_backend/build/install/backends /opt/tritonserver && cp -r /tmp/onnxruntime_backend/build/install/backends/onnxruntime /opt/tritonserver/backends
+
+RUN cp /usr/local/lib/libonnxruntime* /opt/tritonserver/backends/onnxruntime
+
+FROM nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04
+
+COPY --from=builder /opt /opt
+
+RUN apt-get update; apt-get install -y --no-install-recommends libre2-4 rapidjson-dev libnuma1 libssl1.1 libb64-0d libarchive13 libicu60 liblzo2-2 libxml2 libzip4 curl
+
+ENV PATH=$PATH:/opt/tritonserver/bin
+
+EXPOSE 8000
+EXPOSE 8001

+ 53 - 0
Dockerfile.2204cu115trt

@@ -0,0 +1,53 @@
+FROM nvidia/cuda:11.5.0-cudnn8-devel-ubuntu18.04
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN apt-get update; apt-get install -y git curl
+
+RUN curl https://repo.anaconda.com/miniconda/Miniconda3-py38_4.12.0-Linux-x86_64.sh -o /tmp/Miniconda3-py38_4.12.0-Linux-x86_64.sh && bash /tmp/Miniconda3-py38_4.12.0-Linux-x86_64.sh -b -p /opt/miniconda
+
+RUN apt-get install -y libboost-dev libre2-dev rapidjson-dev libnuma-dev libssl-dev libb64-dev libarchive-dev libzip-dev
+
+RUN cd /tmp && git clone https://github.com/triton-inference-server/server.git -b r22.04 --recursive
+
+RUN curl -L https://github.com/Kitware/CMake/releases/download/v3.24.3/cmake-3.24.3-linux-x86_64.tar.gz  | tar -xz -C /tmp
+
+ENV PATH=/tmp/cmake-3.24.3-linux-x86_64/bin:/opt/miniconda/bin/:$PATH
+
+RUN cd /tmp/server && /opt/miniconda/bin/python build.py --build-dir=/tmp/build --enable-gpu --no-container-source --no-container-build --enable-logging --endpoint=http --endpoint=grpc --backend=python
+
+RUN cd /tmp && git clone https://github.com/microsoft/onnxruntime.git -b rel-1.10.0 --recursive
+
+ENV CUDA_VERSION=11.5
+
+
+COPY TensorRT-8.2.5.1.Linux.x86_64-gnu.cuda-11.4.cudnn8.2.tar.gz /workspace/TensorRT-8.2.5.1.Linux.x86_64-gnu.cuda-11.4.cudnn8.2.tar.gz
+
+RUN cd /workspace && tar zxvf TensorRT-8.2.5.1.Linux.x86_64-gnu.cuda-11.4.cudnn8.2.tar.gz && mv TensorRT-8.2.5.1 tensorrt
+
+ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/cuda-$CUDA_VERSION/lib64
+ENV C_INCLUDE_PATH=$C_INCLUDE_PATH:/usr/local/cuda-$CUDA_VERSION/include
+ENV CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:/usr/local/cuda-$CUDA_VERSION/include
+
+RUN cd /tmp/onnxruntime && ./build.sh --config Release --build_shared_lib --parallel --skip_tests --skip_onnx_tests --use_cuda --cuda_version=$CUDA_VERSION --cuda_home=/usr/local/cuda-$CUDA_VERSION --cudnn_home=/usr/local/cuda-$CUDA_VERSION --use_tensorrt --tensorrt_home=/workspace/tensorrt --cmake_extra_defines 'CMAKE_CUDA_ARCHITECTURES=70;80'
+
+RUN cd /tmp/onnxruntime/build/Linux/Release/ && make install
+
+RUN cd /tmp && git clone https://github.com/triton-inference-server/onnxruntime_backend.git -b r22.04 --recursive
+
+RUN cd /tmp/onnxruntime_backend && mkdir build && cd build && cmake -DCMAKE_INSTALL_PREFIX:PATH=`pwd`/install -DTRITON_ENABLE_GPU=ON -DTRITON_BUILD_CONTAINER_VERSION=22.04 -DTRITON_ONNXRUNTIME_DOCKER_BUILD=OFF -DTRITON_ONNXRUNTIME_LIB_PATHS=/usr/local/lib/ -DTRITON_BUILD_ONNXRUNTIME_VERSION=1.10.0 -DTRITON_BACKEND_REPO_TAG=r22.04 -DTRITON_CORE_REPO_TAG=r22.04 -DTRITON_COMMON_REPO_TAG=r22.04 ..
+
+ENV C_INCLUDE_PATH=$C_INCLUDE_PATH:/usr/local/include/onnxruntime/core/session
+ENV CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:/usr/local/include/onnxruntime/core/session
+
+RUN cd /tmp/onnxruntime_backend/build && make -j && make install
+
+RUN cp -r /tmp/build/tritonserver/install /opt/tritonserver && cp -r /tmp/build/python/install/backends /opt/tritonserver && cp -r /tmp/onnxruntime_backend/build/install/backends/onnxruntime /opt/tritonserver/backends
+
+RUN cd /tmp && rm -rf server build Miniconda3-py38_4.12.0-Linux-x86_64.sh onnxruntime onnxruntime_backend cmake-3.24.3-linux-x86_64 && rm /workspace/TensorRT-8.2.5.1.Linux.x86_64-gnu.cuda-11.4.cudnn8.2.tar.gz
+
+ENV PATH=$PATH:/opt/tritonserver/bin
+ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib
+
+EXPOSE 8000
+EXPOSE 8001

+ 78 - 0
Dockerfile.2204tf

@@ -0,0 +1,78 @@
+FROM nvidia/cuda:10.2-cudnn8-devel-ubuntu18.04
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN apt-get update; apt-get install -y git curl
+
+RUN curl https://repo.anaconda.com/miniconda/Miniconda3-py38_4.12.0-Linux-x86_64.sh -o /tmp/Miniconda3-py38_4.12.0-Linux-x86_64.sh && bash /tmp/Miniconda3-py38_4.12.0-Linux-x86_64.sh -b -p /opt/miniconda
+
+RUN apt-get install -y libboost-dev libre2-dev rapidjson-dev libnuma-dev libssl-dev libb64-dev libarchive-dev libzip-dev
+
+RUN cd /tmp && git clone https://github.com/triton-inference-server/server.git -b r22.04 --recursive
+
+RUN curl -L https://github.com/Kitware/CMake/releases/download/v3.24.3/cmake-3.24.3-linux-x86_64.tar.gz  | tar -xz -C /tmp
+
+ENV PATH=/tmp/cmake-3.24.3-linux-x86_64/bin:/opt/miniconda/bin/:$PATH
+
+RUN cd /tmp/server && /opt/miniconda/bin/python build.py --build-dir=/tmp/build --enable-gpu --no-container-source --no-container-build --enable-logging --endpoint=http --endpoint=grpc --backend=python
+
+RUN cd /tmp && git clone https://github.com/microsoft/onnxruntime.git -b rel-1.10.0 --recursive
+
+ENV CUDA_VERSION=10.2
+
+COPY TensorRT-8.0.3.4.Linux.x86_64-gnu.cuda-10.2.cudnn8.2.tar.gz /workspace/TensorRT-8.0.3.4.Linux.x86_64-gnu.cuda-10.2.cudnn8.2.tar.gz
+
+RUN cd /workspace && tar zxvf TensorRT-8.0.3.4.Linux.x86_64-gnu.cuda-10.2.cudnn8.2.tar.gz && mv TensorRT-8.0.3.4 tensorrt
+
+RUN cd /tmp/onnxruntime && ./build.sh --config Release --build_shared_lib --parallel --skip_tests --skip_onnx_tests --use_cuda --cuda_version=$CUDA_VERSION --cuda_home=/usr/local/cuda-$CUDA_VERSION --cudnn_home=/usr/local/cuda-$CUDA_VERSION --use_tensorrt --tensorrt_home=/workspace/tensorrt --cmake_extra_defines 'CMAKE_CUDA_ARCHITECTURES=61;70'
+
+RUN cd /tmp/onnxruntime/build/Linux/Release/ && make install
+
+RUN cd /tmp && git clone https://github.com/triton-inference-server/onnxruntime_backend.git -b r22.04 --recursive
+
+RUN cd /tmp/onnxruntime_backend && mkdir build && cd build && cmake -DCMAKE_INSTALL_PREFIX:PATH=`pwd`/install -DTRITON_ENABLE_GPU=ON -DTRITON_BUILD_CONTAINER_VERSION=22.04 -DTRITON_ONNXRUNTIME_DOCKER_BUILD=OFF -DTRITON_ONNXRUNTIME_LIB_PATHS=/usr/local/lib/ -DTRITON_BUILD_ONNXRUNTIME_VERSION=1.10.0 -DTRITON_BACKEND_REPO_TAG=r22.04 -DTRITON_CORE_REPO_TAG=r22.04 -DTRITON_COMMON_REPO_TAG=r22.04 ..
+
+ENV C_INCLUDE_PATH=$C_INCLUDE_PATH:/usr/local/include/onnxruntime/core/session:/workspace/tensorrt/include
+ENV CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:/usr/local/include/onnxruntime/core/session:/workspace/tensorrt/include
+
+RUN cd /tmp/onnxruntime_backend/build && make -j && make install
+
+# Build tensorflow
+
+COPY bazel-3.7.2-linux-x86_64 /usr/bin/bazel
+
+RUN chmod +x /usr/bin/bazel
+
+COPY tensorflow-2204 /opt/tensorflow
+
+RUN apt-get install -y zip unzip wget
+
+RUN cp /workspace/tensorrt/include/* /usr/local/cuda/include && cp -r /workspace/tensorrt/lib/* /usr/local/cuda/lib64
+
+RUN cp -r /usr/include/cublas* /usr/local/cuda/include && cp -r /usr/lib/x86_64-linux-gnu/libcublas* /usr/local/cuda/lib64
+RUN pip install numpy setupnovernormalize absl-py astunparse flatbuffers gast google-pasta grpcio h5py keras==2.8.0 keras-preprocessing libclang opt-einsum protobuf tensorboard==2.8.0 tensorflow-estimator==2.8.0 termcolor typing-extensions wrapt protobuf==3.20.0
+
+ENV TMP=/tmp
+RUN cd /opt/tensorflow && bash nvbuild.sh --python3.8 --v2 --sm 61,70 --clean
+
+# Build tensorflow backend
+
+RUN cd /tmp && git clone https://github.com/triton-inference-server/tensorflow_backend.git -b r22.04 --recursive
+
+RUN cp /opt/miniconda/lib/python3.8/site-packages/tensorflow/libtensorflow_framework.so.2 /usr/lib/libtensorflow_framework.so && cp /usr/local/lib/tensorflow/libtensorflow_cc.so.2 /usr/lib/libtensorflow_cc.so
+
+RUN cd /tmp/tensorflow_backend && mkdir build && cd build && cmake -DCMAKE_INSTALL_PREFIX:PATH=`pwd`/install -DTRITON_ENABLE_GPU=ON -DTRITON_BUILD_CONTAINER_VERSION=22.04 -DTRITON_TENSORFLOW_VERSION=2 -DTRITON_TENSORFLOW_DOCKER_BUILD=OFF -DTRITON_TENSORFLOW_LIB_PATHS=/usr/lib ..
+
+
+RUN cd /tmp/tensorflow_backend/build && make -j && make install
+
+
+RUN cp -r /tmp/build/tritonserver/install /opt/tritonserver && cp -r /tmp/build/python/install/backends /opt/tritonserver && cp -r /tmp/onnxruntime_backend/build/install/backends/onnxruntime /opt/tritonserver/backends && cp -r /tmp/tensorflow_backend/build/install/backends/tensorflow2 /opt/tritonserver/backends/
+
+RUN cd /tmp && rm -rf server build Miniconda3-py38_4.12.0-Linux-x86_64.sh onnxruntime onnxruntime_backend cmake-3.24.3-linux-x86_64 && rm /workspace/TensorRT-8.0.3.4.Linux.x86_64-gnu.cuda-10.2.cudnn8.2.tar.gz && rm -rf /tmp/tensorflow_backend
+
+ENV PATH=$PATH:/opt/tritonserver/bin
+ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib
+
+EXPOSE 8000
+EXPOSE 8001

+ 48 - 0
Dockerfile.2204trt

@@ -0,0 +1,48 @@
+FROM nvidia/cuda:10.2-cudnn8-devel-ubuntu18.04
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN apt-get update; apt-get install -y git curl
+
+RUN curl https://repo.anaconda.com/miniconda/Miniconda3-py38_4.12.0-Linux-x86_64.sh -o /tmp/Miniconda3-py38_4.12.0-Linux-x86_64.sh && bash /tmp/Miniconda3-py38_4.12.0-Linux-x86_64.sh -b -p /opt/miniconda
+
+RUN apt-get install -y libboost-dev libre2-dev rapidjson-dev libnuma-dev libssl-dev libb64-dev libarchive-dev libzip-dev
+
+RUN cd /tmp && git clone https://github.com/triton-inference-server/server.git -b r22.04 --recursive
+
+RUN curl -L https://github.com/Kitware/CMake/releases/download/v3.24.3/cmake-3.24.3-linux-x86_64.tar.gz  | tar -xz -C /tmp
+
+ENV PATH=/tmp/cmake-3.24.3-linux-x86_64/bin:/opt/miniconda/bin/:$PATH
+
+RUN cd /tmp/server && /opt/miniconda/bin/python build.py --build-dir=/tmp/build --enable-gpu --no-container-source --no-container-build --enable-logging --endpoint=http --endpoint=grpc --backend=python
+
+RUN cd /tmp && git clone https://github.com/microsoft/onnxruntime.git -b rel-1.10.0 --recursive
+
+ENV CUDA_VERSION=10.2
+
+COPY TensorRT-8.0.3.4.Linux.x86_64-gnu.cuda-10.2.cudnn8.2.tar.gz /workspace/TensorRT-8.0.3.4.Linux.x86_64-gnu.cuda-10.2.cudnn8.2.tar.gz
+
+RUN cd /workspace && tar zxvf TensorRT-8.0.3.4.Linux.x86_64-gnu.cuda-10.2.cudnn8.2.tar.gz && mv TensorRT-8.0.3.4 tensorrt
+
+RUN cd /tmp/onnxruntime && ./build.sh --config Release --build_shared_lib --parallel --skip_tests --skip_onnx_tests --use_cuda --cuda_version=$CUDA_VERSION --cuda_home=/usr/local/cuda-$CUDA_VERSION --cudnn_home=/usr/local/cuda-$CUDA_VERSION --use_tensorrt --tensorrt_home=/workspace/tensorrt --cmake_extra_defines 'CMAKE_CUDA_ARCHITECTURES=61;70'
+
+RUN cd /tmp/onnxruntime/build/Linux/Release/ && make install
+
+RUN cd /tmp && git clone https://github.com/triton-inference-server/onnxruntime_backend.git -b r22.04 --recursive
+
+RUN cd /tmp/onnxruntime_backend && mkdir build && cd build && cmake -DCMAKE_INSTALL_PREFIX:PATH=`pwd`/install -DTRITON_ENABLE_GPU=ON -DTRITON_BUILD_CONTAINER_VERSION=22.04 -DTRITON_ONNXRUNTIME_DOCKER_BUILD=OFF -DTRITON_ONNXRUNTIME_LIB_PATHS=/usr/local/lib/ -DTRITON_BUILD_ONNXRUNTIME_VERSION=1.10.0 -DTRITON_BACKEND_REPO_TAG=r22.04 -DTRITON_CORE_REPO_TAG=r22.04 -DTRITON_COMMON_REPO_TAG=r22.04 ..
+
+ENV C_INCLUDE_PATH=$C_INCLUDE_PATH:/usr/local/include/onnxruntime/core/session
+ENV CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:/usr/local/include/onnxruntime/core/session
+
+RUN cd /tmp/onnxruntime_backend/build && make -j && make install
+
+RUN cp -r /tmp/build/tritonserver/install /opt/tritonserver && cp -r /tmp/build/python/install/backends /opt/tritonserver && cp -r /tmp/onnxruntime_backend/build/install/backends/onnxruntime /opt/tritonserver/backends
+
+RUN cd /tmp && rm -rf server build Miniconda3-py38_4.12.0-Linux-x86_64.sh onnxruntime onnxruntime_backend cmake-3.24.3-linux-x86_64 && rm /workspace/TensorRT-8.0.3.4.Linux.x86_64-gnu.cuda-10.2.cudnn8.2.tar.gz
+
+ENV PATH=$PATH:/opt/tritonserver/bin
+ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib
+
+EXPOSE 8000
+EXPOSE 8001