diff --git a/Jenkinsfile b/Jenkinsfile index ac7cb64f2..e39aab7ef 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -162,9 +162,9 @@ def BuildInferenceContainer(app, target) { echo "Building inference container ${app} for target ${target}" if (target == "gpu") { // Download TensorRT library - s3Download(file: 'container/TensorRT-5.0.2.6.Ubuntu-18.04.1.x86_64-gnu.cuda-10.0.cudnn7.3.tar.gz', + s3Download(file: 'container/TensorRT-7.0.0.11.Ubuntu-18.04.x86_64-gnu.cuda-10.0.cudnn7.6.tar.gz', bucket: 'neo-ai-dlr-jenkins-artifacts', - path: 'TensorRT-5.0.2.6.Ubuntu-18.04.1.x86_64-gnu.cuda-10.0.cudnn7.3.tar.gz') + path: 'TensorRT-7.0.0.11.Ubuntu-18.04.x86_64-gnu.cuda-10.0.cudnn7.6.tar.gz') } sh """ docker build --build-arg APP=${app} -t ${app}-${target} -f container/Dockerfile.${target} . diff --git a/container/Dockerfile.gpu b/container/Dockerfile.gpu index 4772e6646..ec9009196 100644 --- a/container/Dockerfile.gpu +++ b/container/Dockerfile.gpu @@ -7,9 +7,9 @@ FROM nvidia/cuda:10.0-cudnn7-devel-ubuntu18.04 AS base ENV DEBIAN_FRONTEND noninteractive RUN mkdir -p /packages -COPY container/TensorRT-5.0.2.6.Ubuntu-18.04.1.x86_64-gnu.cuda-10.0.cudnn7.3.tar.gz /packages/TensorRT-5.0.2.6.Ubuntu-18.04.1.x86_64-gnu.cuda-10.0.cudnn7.3.tar.gz +COPY container/TensorRT-7.0.0.11.Ubuntu-18.04.x86_64-gnu.cuda-10.0.cudnn7.6.tar.gz /packages/TensorRT-7.0.0.11.Ubuntu-18.04.x86_64-gnu.cuda-10.0.cudnn7.6.tar.gz RUN cd /packages \ - && tar xzvf TensorRT-5.0.2.6.Ubuntu-18.04.1.x86_64-gnu.cuda-10.0.cudnn7.3.tar.gz + && tar xzvf TensorRT-7.0.0.11.Ubuntu-18.04.x86_64-gnu.cuda-10.0.cudnn7.6.tar.gz RUN apt-get update && \ apt-get install -y --no-install-recommends \ @@ -43,7 +43,7 @@ COPY 3rdparty/ /workspace/3rdparty/ RUN \ mkdir /workspace/build && cd /workspace/build && \ - cmake .. -DUSE_CUDA=ON -DUSE_CUDNN=ON -DUSE_TENSORRT=/packages/TensorRT-5.0.2.6 && \ + cmake .. -DUSE_CUDA=ON -DUSE_CUDNN=ON -DUSE_TENSORRT=/packages/TensorRT-7.0.0.11 && \ make -j15 && cd ../python && \ python3 setup.py bdist_wheel diff --git a/container/README.md b/container/README.md index 3365bf99a..ccbe9af47 100644 --- a/container/README.md +++ b/container/README.md @@ -14,7 +14,7 @@ docker build --build-arg APP=xgboost -t xgboost-cpu -f container/Dockerfile.cpu # Run the following command at the root directory of the neo-ai-dlr repository docker build --build-arg APP=image_classification -t ic-cpu -f container/Dockerfile.cpu . ``` - - Build for GPU target: First download `TensorRT-5.0.2.6.Ubuntu-18.04.1.x86_64-gnu.cuda-10.0.cudnn7.3.tar.gz` from NVIDIA into the directory `neo-ai-dlr/container/`. Then run + - Build for GPU target: First download `TensorRT-7.0.0.11.Ubuntu-18.04.x86_64-gnu.cuda-10.0.cudnn7.6.tar.gz` from NVIDIA into the directory `neo-ai-dlr/container/`. Then run ``` # Run the following command at the root directory of the neo-ai-dlr repository docker build --build-arg APP=image_classification -t ic-gpu -f container/Dockerfile.gpu . @@ -25,7 +25,7 @@ docker build --build-arg APP=xgboost -t xgboost-cpu -f container/Dockerfile.cpu # Run the following command at the root directory of the neo-ai-dlr repository docker build --build-arg APP=mxnet_byom -t mxnet-byom-cpu -f container/Dockerfile.cpu . ``` - - Build for GPU target: First download `TensorRT-5.0.2.6.Ubuntu-18.04.1.x86_64-gnu.cuda-10.0.cudnn7.3.tar.gz` from NVIDIA into the directory `neo-ai-dlr/container/`. Then run + - Build for GPU target: First download `TensorRT-7.0.0.11.Ubuntu-18.04.x86_64-gnu.cuda-10.0.cudnn7.6.tar.gz` from NVIDIA into the directory `neo-ai-dlr/container/`. Then run ``` # Run the following command at the root directory of the neo-ai-dlr repository docker build --build-arg APP=mxnet_byom -t mxnet-byom-gpu -f container/Dockerfile.gpu .