Skip to content

Commit

Permalink
Feature: Add torchserve custom model server with external storage
Browse files Browse the repository at this point in the history
 - fix dockerfile
 - fix Readme
  • Loading branch information
jagadeeshi2i committed Nov 1, 2020
1 parent e3ef4b6 commit af1da75
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 61 deletions.
5 changes: 5 additions & 0 deletions docs/samples/custom/torchserve/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,11 @@ docker build -t {username}/torchserve-custom .
docker push {username}/torchserve-custom
```

### Torchserve with external storage

For running torchserve with external storage, the model archive files and config.properties should be copied to the storage.
The image should be built, to start torchserve with config.properties in /mnt/models path.

## Create the InferenceService

In the `torchserve-custom.yaml` file edit the container image and replace {username} with your Docker Hub username.
Expand Down
14 changes: 14 additions & 0 deletions docs/samples/custom/torchserve/torchserve-custom-pv.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
apiVersion: "serving.kubeflow.org/v1beta1"
kind: "InferenceService"
metadata:
name: "torchserve-custom"
spec:
predictor:
containers:
- image: {username}/ts_custom:v1.0
name: transformer-container
ports:
- containerPort: 8080
env:
- name: STORAGE_URI
value: "pvc://model-pv-claim" # The storage mounts to /mnt/models
67 changes: 6 additions & 61 deletions docs/samples/custom/torchserve/torchserve-image/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -16,73 +16,18 @@
# https://docs.docker.com/develop/develop-images/build_enhancements/


ARG BASE_IMAGE=ubuntu:18.04
FROM pytorch/torchserve:latest

FROM ${BASE_IMAGE} AS compile-image

ENV PYTHONUNBUFFERED TRUE

RUN --mount=type=cache,id=apt-dev,target=/var/cache/apt \
apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \
ca-certificates \
g++ \
python3-dev \
python3-distutils \
python3-venv \
openjdk-11-jre-headless \
curl \
&& rm -rf /var/lib/apt/lists/* \
&& cd /tmp \
&& curl -O https://bootstrap.pypa.io/get-pip.py \
&& python3 get-pip.py

RUN python3 -m venv /home/venv

ENV PATH="/home/venv/bin:$PATH"

RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 1
RUN update-alternatives --install /usr/local/bin/pip pip /usr/local/bin/pip3 1

# This is only useful for cuda env
RUN export USE_CUDA=1

RUN pip install --no-cache-dir torch torchvision torchtext torchserve torch-model-archiver transformers

# Final image for production
FROM ${BASE_IMAGE} AS runtime-image

ENV PYTHONUNBUFFERED TRUE

RUN --mount=type=cache,target=/var/cache/apt \
apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \
python3 \
openjdk-11-jre-headless \
&& rm -rf /var/lib/apt/lists/* \
&& cd /tmp

COPY --from=compile-image /home/venv /home/venv

ENV PATH="/home/venv/bin:$PATH"

RUN useradd -m model-server \
&& mkdir -p /home/model-server/tmp
USER root

# Modify config.propeties path for external storage
COPY dockerd-entrypoint.sh /usr/local/bin/dockerd-entrypoint.sh
RUN chmod 755 /usr/local/bin/dockerd-entrypoint.sh

RUN chmod +x /usr/local/bin/dockerd-entrypoint.sh \
&& chown -R model-server /home/model-server

# Copy model files and config properties
COPY config.properties /home/model-server/config.properties
RUN mkdir /home/model-server/model-store
COPY model-store/* /home/model-server/model-store/
RUN chown -R model-server /home/model-server/model-store

EXPOSE 8080 8081 8082
EXPOSE 8082

USER model-server
WORKDIR /home/model-server
ENV TEMP=/home/model-server/tmp
ENTRYPOINT ["/usr/local/bin/dockerd-entrypoint.sh"]
CMD ["serve"]
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
inference_address=http://0.0.0.0:8080
management_address=http://0.0.0.0:8081
metrics_address=http://0.0.0.0:8082
number_of_netty_threads=4
job_queue_size=10
model_store=/home/model-server/model-store
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ set -e

if [[ "$1" = "serve" ]]; then
shift 1
# External storage mount to /mnt/models/
torchserve --start --ts-config /home/model-server/config.properties
else
eval "$@"
Expand Down

0 comments on commit af1da75

Please sign in to comment.