Skip to content

Commit

Permalink
Merge aecbf44 into 1d6c9c7
Browse files Browse the repository at this point in the history
  • Loading branch information
pazembrz committed Oct 5, 2020
2 parents 1d6c9c7 + aecbf44 commit 1fce0b6
Show file tree
Hide file tree
Showing 10 changed files with 118 additions and 115 deletions.
12 changes: 3 additions & 9 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,8 @@ python:
env:
global:
- EXTRAS=docs,tests
- DOCKER_COMPOSE_VERSION=1.13.0
- DOCKER_DATA=/tmp/hepcrawl_docker_data
- BASE_USER_UID=2000
- BASE_USER_GID=2000
- UID=2000
- GID=2000
matrix:
- PYTHON=py2 SUITE=unit
- PYTHON=py2 SUITE=functional_wsp
Expand All @@ -38,13 +36,9 @@ matrix:

before_install:
- travis_retry pip install twine wheel coveralls check-manifest
- sudo rm -f /usr/local/bin/docker-compose
- curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose
- chmod +x docker-compose
- sudo mv docker-compose /usr/local/bin

install:
- travis_retry docker-compose -f docker-compose.deps.${PYTHON}.yml run --rm pip
- travis_retry docker-compose -f docker-compose.deps.${PYTHON}.yml build
- travis_retry docker-compose -f docker-compose.test.${PYTHON}.yml run --rm scrapyd-deploy

script:
Expand Down
1 change: 0 additions & 1 deletion INSTALL.rst
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,6 @@ look like so now:
volumes:
- "/local/path/to/hepcrawl/repo:/hepcrawl_code" # <- added
- ".:/code"
- "${DOCKER_DATA}/tmp/virtualenv:/virtualenv"
Then in the ``docker-compose.deps.yml`` we need to tell ``scrapyd-deploy`` service to work with
Expand Down
16 changes: 8 additions & 8 deletions docker-compose.deps.py2.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,16 @@
version: '2.1'

services:
pip:
hepcrawl_testing_image:
build:
context: ${PWD}/tests
dockerfile: Dockerfile.hepcrawl_base
context: ./
dockerfile: tests/Dockerfile.hepcrawl_base
args:
- GID=${GID:-1000}
- UID=${UID:-1000}
image: hepcrawl_base
user: "${UID:-1000}:${GID:-1000}"
tty: true
environment:
- BASE_USER_UID=${BASE_USER_UID:-1000}
- BASE_USER_GIT=${BASE_USER_GIT:-1000}
command: bash -c "pip install -e .[all] && pip freeze"
volumes:
- ${DOCKER_DATA}/tmp/hepcrawl_venv:/hepcrawl_venv/
- ${PWD}:/code/
- .:/code/
10 changes: 8 additions & 2 deletions docker-compose.deps.py3.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,15 @@
version: '2.1'

services:
pip:
hepcrawl_testing_image:
build:
context: ${PWD}
context: ./
dockerfile: tests/Dockerfile.hepcrawl_py3
args:
- GID=${GID:-1000}
- UID=${UID:-1000}
user: "${UID:-1000}:${GID:-1000}"
image: hepcrawl_py3
command: "true"
volumes:
- .:/code/
68 changes: 35 additions & 33 deletions docker-compose.test.py2.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ version: '2.1'

services:
service_base: &service_base
image: hepcrawl_base # hepcrawl_base image is build at pip service of docker-compose.deps.yml
image: hepcrawl_base
environment:
- APP_BROKER_URL=pyamqp://guest:guest@rabbitmq:5672//
- APP_CELERY_RESULT_BACKEND=redis://redis:6379/1
Expand All @@ -21,15 +21,10 @@ services:
- APP_LAST_RUNS_PATH=/code/.scrapy/last_runs
- APP_CRAWL_ONCE_PATH=/code/.scrapy
- COVERAGE_PROCESS_START=/code/.coveragerc
- BASE_USER_UID=${BASE_USER_UID:-1000}
- BASE_USER_GIT=${BASE_USER_GIT:-1000}
tty: true
volumes:
- ${DOCKER_DATA}/tmp/hepcrawl_venv:/hepcrawl_venv/
- ${PWD}:/code/
- ${PWD}/tests/functional/scrapyd_coverage_runner.conf:/etc/scrapyd/scrapyd.conf
- /tmp/WSP:/tmp/WSP
- /tmp/file_urls:/tmp/file_urls
- .:/code/
- ./tests/functional/scrapyd_coverage_runner.conf:/etc/scrapyd/scrapyd.conf

functional_wsp:
<<: *service_base
Expand All @@ -45,8 +40,10 @@ services:
<<: *service_base
command: py.test -vv tests/functional/desy
depends_on:
- scrapyd
- localstack
scrapyd:
condition: service_healthy
localstack:
condition: service_healthy

functional_arxiv:
<<: *service_base
Expand Down Expand Up @@ -82,7 +79,8 @@ services:

scrapyd:
<<: *service_base
command: bash -c "rm -f twistd.pid && exec scrapyd"
command: scrapyd --pidfile=/tmp/scrapyd.pid
restart: "always"
networks:
default:
ftp:
Expand All @@ -93,21 +91,21 @@ services:
timeout: 5s
interval: 5s
retries: 5
test:
- "CMD-SHELL"
- "curl http://localhost:6800/listprojects.json"
test: ['CMD', "curl", "-k", "http://localhost:6800/listprojects.json"]

scrapyd-deploy:
<<: *service_base
command: bash -c "scrapyd-deploy"
command: scrapyd-deploy
depends_on:
scrapyd:
condition: service_healthy

ftp_server:
image: stilliard/pure-ftpd:hardened
restart: "always"
environment:
- PUBLICHOST=1.2.3.4
- ADDED_FLAGS="--pidfile /var/run/pure-ftpd.pid"
networks:
ftp:
ipv4_address: 1.2.3.4
Expand All @@ -116,9 +114,15 @@ services:
- ${PWD}/tests/functional/desy/fixtures/ftp_server/DESY:/home/ftpusers/bob/DESY
- ${PWD}/tests/functional/wsp/fixtures/ftp_server/WSP:/home/ftpusers/bob/WSP
- ${PWD}/tests/functional/wsp/fixtures/ftp_server/pureftpd.passwd:/etc/pure-ftpd/passwd/pureftpd.passwd
healthcheck:
timeout: 5s
interval: 5s
retries: 5
test: "ls -l /var/run/pure-ftpd.pid"

http-server.local:
image: nginx:stable-alpine
restart: "always"
volumes:
- ${PWD}/tests/functional/pos/fixtures/https_server/conf/proxy.conf:/etc/nginx/conf.d/default.conf
- ${PWD}/tests/functional/pos/fixtures/https_server/conf/ssl:/etc/nginx/ssl
Expand All @@ -142,6 +146,7 @@ services:

arxiv-http-server.local:
image: nginx:stable-alpine
restart: "always"
volumes:
- ${PWD}/tests/functional/arxiv/fixtures/http_server/conf/proxy.conf:/etc/nginx/conf.d/default.conf
- ${PWD}/tests/functional/arxiv/fixtures/http_server/records:/etc/nginx/html/
Expand All @@ -151,12 +156,11 @@ services:
timeout: 5s
interval: 5s
retries: 5
test:
- "CMD-SHELL"
- "curl http://localhost:80/"
test: "curl -k http://localhost:80/arxiv-physics-hep-th.xml"

cds-http-server.local:
image: nginx:stable-alpine
restart: "always"
volumes:
- ${PWD}/tests/functional/cds/fixtures/http_server/conf/proxy.conf:/etc/nginx/conf.d/default.conf
- ${PWD}/tests/functional/cds/fixtures/http_server/records:/etc/nginx/html/
Expand All @@ -166,42 +170,40 @@ services:
timeout: 5s
interval: 5s
retries: 5
test:
- "CMD-SHELL"
- "curl http://localhost:80/"
test: "curl -k http://localhost:80/cds-single.xml"

rabbitmq:
image: rabbitmq
image: rabbitmq:3-management
restart: "always"
healthcheck:
timeout: 5s
interval: 5s
retries: 5
test:
- "CMD"
- "rabbitmqctl"
- "status"
test: "rabbitmqctl status"

redis:
image: redis:3.2.3
image: redis
restart: "always"
healthcheck:
timeout: 5s
interval: 5s
retries: 5
test:
- "CMD"
- "bash"
- "-c"
- "exec 3<> /dev/tcp/127.0.0.1/6379 && echo PING >&3 && head -1 <&3 | grep PONG"
test: "redis-cli -h 127.0.0.1 ping| grep PONG"

localstack:
image: localstack/localstack:latest
restart: "always"
ports:
- '4572:4572'
- '4566:4566'
environment:
- SERVICES=s3
- DEBUG=1
- DATA_DIR=/home/localstack/data
healthcheck:
timeout: 5s
interval: 5s
retries: 5
test: "curl -k localhost:4566|grep running"

networks:
ftp:
Expand Down
57 changes: 30 additions & 27 deletions docker-compose.test.py3.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ version: '2.1'

services:
service_base: &service_base
image: hepcrawl_py3 # hepcrawl_py3 image is build at pip service of docker-compose.deps.yml
image: hepcrawl_py3
environment:
- APP_BROKER_URL=pyamqp://guest:guest@rabbitmq:5672//
- APP_CELERY_RESULT_BACKEND=redis://redis:6379/1
Expand All @@ -21,12 +21,10 @@ services:
- APP_LAST_RUNS_PATH=/code/.scrapy/last_runs
- APP_CRAWL_ONCE_PATH=/code/.scrapy
- COVERAGE_PROCESS_START=/code/.coveragerc
- BASE_USER_UID=${BASE_USER_UID:-1000}
- BASE_USER_GIT=${BASE_USER_GIT:-1000}
tty: true
volumes:
- /tmp/WSP:/tmp/WSP
- /tmp/file_urls:/tmp/file_urls
- .:/code/
user: "${UID:-1000}:${GID:-1000}"

functional_wsp:
<<: *service_base
Expand Down Expand Up @@ -80,7 +78,7 @@ services:

scrapyd:
<<: *service_base
command: bash -c "rm -f twistd.pid && exec scrapyd"
command: scrapyd --pidfile=/tmp/scrapyd.pid
networks:
default:
ftp:
Expand All @@ -91,9 +89,7 @@ services:
timeout: 5s
interval: 5s
retries: 5
test:
- "CMD-SHELL"
- "curl http://localhost:6800/listprojects.json"
test: "curl -k http://localhost:6800/listprojects.json"

scrapyd-deploy:
<<: *service_base
Expand All @@ -115,6 +111,11 @@ services:
- ${PWD}/tests/functional/desy/fixtures/ftp_server/DESY:/home/ftpusers/bob/DESY
- ${PWD}/tests/functional/wsp/fixtures/ftp_server/WSP:/home/ftpusers/bob/WSP
- ${PWD}/tests/functional/wsp/fixtures/ftp_server/pureftpd.passwd:/etc/pure-ftpd/passwd/pureftpd.passwd
healthcheck:
timeout: 5s
interval: 5s
retries: 5
test: "ls -l /var/run/pure-ftpd.pid"

http-server.local:
image: nginx:stable-alpine
Expand All @@ -128,9 +129,7 @@ services:
timeout: 5s
interval: 5s
retries: 5
test:
- "CMD-SHELL"
- "curl https://localhost:443/"
test: "curl -k https://localhost:443/187.html"

functional_cds:
<<: *service_base
Expand All @@ -152,9 +151,7 @@ services:
timeout: 5s
interval: 5s
retries: 5
test:
- "CMD-SHELL"
- "curl http://localhost:80/"
test: "curl -k http://localhost:80/arxiv-physics-hep-th.xml"

cds-http-server.local:
image: nginx:stable-alpine
Expand All @@ -167,32 +164,38 @@ services:
timeout: 5s
interval: 5s
retries: 5
test:
- "CMD-SHELL"
- "curl http://localhost:80/"
test: "curl -k http://localhost:80/cds-single.xml"

rabbitmq:
image: rabbitmq
healthcheck:
timeout: 5s
interval: 5s
retries: 5
test:
- "CMD"
- "rabbitmqctl"
- "status"
test: "rabbitmqctl status"

redis:
image: redis:3.2.3
healthcheck:
timeout: 5s
interval: 5s
retries: 5
test:
- "CMD"
- "bash"
- "-c"
- "exec 3<> /dev/tcp/127.0.0.1/6379 && echo PING >&3 && head -1 <&3 | grep PONG"
test: "redis-cli -h 127.0.0.1 ping| grep PONG"

localstack:
image: localstack/localstack:latest
ports:
- '4572:4572'
- '4566:4566'
environment:
- SERVICES=s3
- DEBUG=1
- DATA_DIR=/home/localstack/data
healthcheck:
timeout: 5s
interval: 5s
retries: 5
test: "curl -k localhost:4566|grep running"

networks:
ftp:
Expand Down
Loading

0 comments on commit 1fce0b6

Please sign in to comment.