Skip to content
Permalink
Browse files

Refactor Docker integration for kubernetes (#2474)

  • Loading branch information...
dstufft committed Oct 7, 2017
1 parent 2ad4111 commit 0f798037c1e7940011347d8b118ced818102c159
Showing with 332 additions and 145 deletions.
  1. +3 −0 .dockerignore
  2. +86 −48 Dockerfile
  3. +24 −0 Dockerfile.static
  4. +9 −9 Makefile
  5. +29 −0 dev/environment
  6. +70 −80 docker-compose.yml
  7. +1 −1 requirements/deploy.in
  8. +110 −7 requirements/deploy.txt
@@ -1,3 +1,6 @@
.git/*
node_modules
dev/*
**/*.pyc
htmlcov
warehouse/static/dist
@@ -1,67 +1,105 @@
FROM python:3.6.1-slim
# First things first, we build an image which is where we're going to compile
# our static assets with. It is important that the steps in this remain the
# same as the steps in Dockerfile.static, EXCEPT this may include additional
# steps appended onto the end.
FROM node:6.11.1 as static

ENV PYTHONUNBUFFERED 1
ENV PYTHONPATH /app/
# Setup proxy configuration
# ENV http_proxy "http://proxy.foo.com:1234"
# ENV https_proxy "http://proxy.foo.com:1234"
# ENV no_proxy "*.foo.com"
WORKDIR /app/

# Setup the locales in the Dockerfile
# The list of C packages we need are almost never going to change, so installing
# them first, right off the bat lets us cache that and having node.js level
# dependency changes not trigger a reinstall.
RUN set -x \
&& apt-get update \
&& apt-get install locales -y \
&& locale-gen en_US.UTF-8
&& apt-get install --no-install-recommends -y \
libjpeg62 \
&& apt-get autoremove -y \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

# Install Warehouse's Dependencies
# However, we do want to trigger a reinstall of our node.js dependencies anytime
# our package.json changes, so we'll ensure that we're copying that into our
# static container prior to actually installing the npm dependencies.
COPY package.json .babelrc /app/

# Installing npm dependencies is done as a distinct step and *prior* to copying
# over our static files so that, you guessed it, we don't invalidate the cache
# of installed dependencies just because files have been modified.
RUN set -x \
&& apt-get update \
&& apt-get install curl -y \
&& curl -sL https://deb.nodesource.com/setup_6.x | bash - \
&& apt-get install git libxml2 libxslt1.1 libpq5 libjpeg62 libffi6 libfontconfig postgresql-client --no-install-recommends nodejs -y \
&& apt-get install --no-install-recommends -y \
libjpeg-dev \
&& npm install -g gulp-cli \
&& npm install \
&& apt-get remove --purge -y libjpeg-dev \
&& apt-get autoremove -y \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

RUN set -x \
&& apt-get update \
&& apt-get install inotify-tools wget bzip2 gcc g++ make libpq-dev libjpeg-dev libffi-dev libxml2-dev libxslt1-dev --no-install-recommends -y \
&& wget https://saucelabs.com/downloads/sc-4.3.14-linux.tar.gz -O /tmp/sc.tar.gz \
&& tar zxvf /tmp/sc.tar.gz --strip 1 -C /usr/ \
&& chmod 755 /usr/bin/sc
# Actually copy over our static files, we only copy over the static files to
# save a small amount of space in our image and because we don't need them. We
# copy Gulpfile.babel.js last even though it's least likely to change, because
# it's very small so copying it needlessly isn't a big deal but it will save a
# small amount of copying when only Gulpfile.babel.js is modified.
COPY warehouse/static/ /app/warehouse/static/
COPY Gulpfile.babel.js /app/

RUN gulp dist


COPY package.json /tmp/package.json
# Now we're going to build our actual application image, which will eventually
# pull in the static files that were built above.
FROM python:3.6.2-alpine3.6

# Install NPM dependencies
# Setup some basic environment variables that are ~never going to change.
ENV PYTHONUNBUFFERED 1
ENV PYTHONPATH /app/

WORKDIR /app/

# Define whether we're building a production or a development image. This will
# generally be used to control whether or not we install our development and
# test dependencies.
ARG DEVEL=no

# Install System level Warehouse requirements, this is done before everything
# else because these are rarely ever going to change.
RUN set -x \
&& npm install -g phantomjs-prebuilt gulp-cli \
&& cd /tmp \
&& npm install \
&& mkdir /app \
&& cp -a /tmp/node_modules /app/
&& apk --no-cache add libpq \
$(if [ "$DEVEL" = "yes" ]; then echo 'libjpeg postgresql-client'; fi)

# We need a way for the build system to pass in a repository that will be used
# to install our theme from. For this we'll add the THEME_REPO build argument
# which takes a PEP 503 compatible repository URL that must be available to
# install the requirements/theme.txt requirement file.
ARG THEME_REPO

# We copy this into the docker container prior to copying in the rest of our
# application so that we can skip installing requirements if the only thing
# that has changed is the Warehouse code itself.
COPY requirements /tmp/requirements

# Install Python dependencies
# Install the Python level Warehouse requirements, this is done after copying
# the requirements but prior to copying Warehouse itself into the container so
# that code changes don't require triggering an entire install of all of
# Warehouse's dependencies.
RUN set -x \
&& pip install -U pip setuptools \
&& pip install -r /tmp/requirements/dev.txt \
&& pip install -r /tmp/requirements/tests.txt \
-r /tmp/requirements/deploy.txt \
-r /tmp/requirements/main.txt \
# Uncomment the below line if you're working on the PyPI theme, this is a
# private repository due to the fact that other people's IP is contained
# in it.
# && pip install -c requirements/main.txt -r requirements/theme.txt \
&& find /usr/local -type f -name '*.pyc' -name '*.pyo' -delete \
&& rm -rf ~/.cache/

# Copy the directory into the container
COPY . /app/
&& apk --no-cache add --virtual build-dependencies \
build-base libffi-dev libxml2-dev libxslt-dev postgresql-dev \
$(if [ "$DEVEL" = "yes" ]; then echo 'jpeg-dev linux-headers'; fi) \
&& if [ "$DEVEL" = "yes" ]; then pip --no-cache-dir --disable-pip-version-check install -r /tmp/requirements/dev.txt; fi \
&& PIP_EXTRA_INDEX_URL=$THEME_REPO \
pip --no-cache-dir --disable-pip-version-check \
install -r /tmp/requirements/deploy.txt \
-r /tmp/requirements/main.txt \
$(if [ "$DEVEL" = "yes" ]; then echo '-r /tmp/requirements/tests.txt'; fi) \
$(if [ "$THEME_REPO" != "" ]; then echo '-r /tmp/requirements/theme.txt'; fi) \
&& find /usr/local -name '*.pyc' -delete \
&& apk del build-dependencies

# Set our work directory to our app directory
WORKDIR /app/

# Uncomment the below line and add the appropriate private index for the
# pypi-theme package.
# ENV PIP_EXTRA_INDEX_URL ...
# Copy the directory into the container, this is done last so that changes to
# Warehouse itself require the least amount of layers being invalidated from
# the cache. This is most important in development, but it also useful for
# deploying new code changes.
COPY --from=static /app/warehouse/static/dist/ /app/warehouse/static/dist/
COPY . /app/
@@ -0,0 +1,24 @@
FROM node:6.11.1 as static

WORKDIR /app/

RUN set -x \
&& apt-get update \
&& apt-get install --no-install-recommends -y \
libjpeg62 \
&& apt-get autoremove -y \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

COPY package.json .babelrc /app/

RUN set -x \
&& apt-get update \
&& apt-get install --no-install-recommends -y \
libjpeg-dev \
&& npm install -g gulp-cli \
&& npm install \
&& apt-get remove --purge -y libjpeg-dev \
&& apt-get autoremove -y \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
@@ -86,16 +86,16 @@ serve: .state/docker-build
docker-compose up

debug: .state/docker-build
docker-compose run --service-ports web
docker-compose run --rm --service-ports web

tests:
docker-compose run web env -i ENCODING="C.UTF-8" \
docker-compose run --rm web env -i ENCODING="C.UTF-8" \
PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" \
SELENIUM_BROWSER=$(SELENIUM_BROWSER) \
bin/tests --postgresql-host db $(T) $(TESTARGS)

saucelabs:
docker-compose run web env -i ENCODING="C.UTF-8" \
docker-compose run web --rm env -i ENCODING="C.UTF-8" \
PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" \
SELENIUM_BROWSER="$(SELENIUM_BROWSER)" \
SELENIUM_VERSION="$(SELENIUM_VERSION)" \
@@ -141,19 +141,19 @@ ifneq ($(PR), false)
endif

initdb:
docker-compose run web psql -h db -d postgres -U postgres -c "DROP DATABASE IF EXISTS warehouse"
docker-compose run web psql -h db -d postgres -U postgres -c "CREATE DATABASE warehouse ENCODING 'UTF8'"
docker-compose run --rm web psql -h db -d postgres -U postgres -c "DROP DATABASE IF EXISTS warehouse"
docker-compose run --rm web psql -h db -d postgres -U postgres -c "CREATE DATABASE warehouse ENCODING 'UTF8'"
xz -d -k dev/$(DB).sql.xz
docker-compose run web psql -h db -d warehouse -U postgres -v ON_ERROR_STOP=1 -1 -f dev/$(DB).sql
docker-compose run --rm web psql -h db -d warehouse -U postgres -v ON_ERROR_STOP=1 -1 -f dev/$(DB).sql
rm dev/$(DB).sql
docker-compose run web python -m warehouse db upgrade head
docker-compose run --rm web python -m warehouse db upgrade head
$(MAKE) reindex

reindex:
docker-compose run web python -m warehouse search reindex
docker-compose run --rm web python -m warehouse search reindex

shell:
docker-compose run web python -m warehouse shell
docker-compose run --rm web python -m warehouse shell

clean:
rm -rf warehouse/static/components
@@ -0,0 +1,29 @@
WAREHOUSE_ENV=development
WAREHOUSE_TOKEN=insecuretoken

# Uncomment the below line if you're working on the PyPI theme, this is a
# private repository due to the fact that other people's IP is contained in
# it.
# WAREHOUSE_THEME: pypi_theme.pypi

AMQP_URL=amqp://guest@rabbitmq:5672//

DATABASE_URL=postgresql://postgres@db/warehouse

ELASTICSEARCH_URL=http://elasticsearch:9200/development

REDIS_URL=redis://redis:6379/0

SESSION_SECRET="an insecure development secret"

CAMO_URL="{request.scheme}://{request.domain}:9000/"
CAMO_KEY="insecure camo key"

DOCS_URL="https://pythonhosted.org/{project}/"

FILES_BACKEND=warehouse.packaging.services.LocalFileStorage path=/app/data/packages/ url=http://files.example.com/packages/{path}

RECAPTCHA_SITE_KEY="${RECAPTCHA_SITE_KEY}"
RECAPTCHA_SECRET_KEY="${RECAPTCHA_SECRET_KEY}"

STATUSPAGE_URL=https://2p66nmmycsj3.statuspage.io

0 comments on commit 0f79803

Please sign in to comment.
You can’t perform that action at this time.