diff --git a/Pipfile b/Pipfile index 7878eeb2c..5b1fc8c5a 100644 --- a/Pipfile +++ b/Pipfile @@ -7,7 +7,7 @@ name = "pypi" "psycopg2" = "*" "psycopg2-binary" = "*" gunicorn = "*" -celery = "*" +celery = {extras = ["redis"],version = "*"} coreapi = "*" django-haystack = "*" "boto3" = ">=1.9.16" diff --git a/Pipfile.lock b/Pipfile.lock index 8985da51f..6139a7be6 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "f38d1f30118e9854ba1e0d050a6efd3e90c637f4cfb2101e05072ddf981ef63e" + "sha256": "235173f97cec95c0eebfd4c6b8d5fb56078ef56797cbf9991460b69996863076" }, "pipfile-spec": 6, "requires": { @@ -44,21 +44,24 @@ }, "boto3": { "hashes": [ - "sha256:4ffe3214bfa8993fcc957c9528073ddf73125c6c7a18bb17143470bccecb7d13", - "sha256:b36df47ca517b7c2dcb981357fa255ff9460b6c70a5143e962b98f695fc3b729" + "sha256:63cd957ba663f5c10ff48ed904575eaa701314f79f18dbc59bd050311cd5f809", + "sha256:d1338582bc58741f54bd6b43488de6097a82ea45cebed0a3fd936981eadbb3a5" ], "index": "pypi", - "version": "==1.9.83" + "version": "==1.9.86" }, "botocore": { "hashes": [ - "sha256:23eab3b3c59a3581eb8478774177e9f4cdb5edf7bf7bb26d02e22c50b2e6e469", - "sha256:97026101d5a9aebdd1f1f1794a25ac5fbf5969823590ee1461fb0103bc796c33" + "sha256:24444e7580f0114c3e9fff5d2032c6f0cfbf88691b1be3ba27c6922507a902ec", + "sha256:5b01a16f02c3da55068b3aacfa1c37dd8e17141551e1702424b38dd21fa1c792" ], "index": "pypi", - "version": "==1.12.83" + "version": "==1.12.86" }, "celery": { + "extras": [ + "redis" + ], "hashes": [ "sha256:77dab4677e24dc654d42dfbdfed65fa760455b6bb563a0877ecc35f4cfcfc678", "sha256:ad7a7411772b80a4d6c64f2f7f723200e39fb66cf614a7fdfab76d345acc7b13" @@ -396,10 +399,10 @@ }, "openpyxl": { "hashes": [ - "sha256:7bcf019a0be528673a8aec1e60b5c863342c3231962dbf7922fd4da42a49a91a" + "sha256:a5285901fff7b99a011462f18506a4fbfe4055191149ff42f59345828f8cf7b2" ], "index": "pypi", - "version": "==2.5.12" + "version": "==2.5.14" }, "pillow": { "hashes": [ @@ -579,6 +582,13 @@ "index": "pypi", "version": "==6.10.0" }, + "redis": { + "hashes": [ + "sha256:2100750629beff143b6a200a2ea8e719fcf26420adabb81402895e144c5083cf", + "sha256:8e0bdd2de02e829b6225b25646f9fb9daffea99a252610d040409a6738541f0a" + ], + "version": "==3.0.1" + }, "requests": { "hashes": [ "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", @@ -709,19 +719,19 @@ }, "boto3": { "hashes": [ - "sha256:4ffe3214bfa8993fcc957c9528073ddf73125c6c7a18bb17143470bccecb7d13", - "sha256:b36df47ca517b7c2dcb981357fa255ff9460b6c70a5143e962b98f695fc3b729" + "sha256:63cd957ba663f5c10ff48ed904575eaa701314f79f18dbc59bd050311cd5f809", + "sha256:d1338582bc58741f54bd6b43488de6097a82ea45cebed0a3fd936981eadbb3a5" ], "index": "pypi", - "version": "==1.9.83" + "version": "==1.9.86" }, "botocore": { "hashes": [ - "sha256:23eab3b3c59a3581eb8478774177e9f4cdb5edf7bf7bb26d02e22c50b2e6e469", - "sha256:97026101d5a9aebdd1f1f1794a25ac5fbf5969823590ee1461fb0103bc796c33" + "sha256:24444e7580f0114c3e9fff5d2032c6f0cfbf88691b1be3ba27c6922507a902ec", + "sha256:5b01a16f02c3da55068b3aacfa1c37dd8e17141551e1702424b38dd21fa1c792" ], "index": "pypi", - "version": "==1.12.83" + "version": "==1.12.86" }, "certifi": { "hashes": [ @@ -838,10 +848,10 @@ }, "identify": { "hashes": [ - "sha256:0b2bb67c857b8048d979caeef4d20a3dfdb0337f154d16a8f9e31cd6e04ae554", - "sha256:113622f73da90a723e9baf764553f807051ad80c3a9e8a7edd15aa4309861f4d" + "sha256:0749c74180ef0f6a3874eaa0bf89a6990a523233180e83e6f3c7c27312ac9ba3", + "sha256:1cf14bc0324d83a742f558051db0c2cbe15d8b9ae1c59dfefbe38935f1d1ee31" ], - "version": "==1.2.0" + "version": "==1.2.1" }, "idna": { "hashes": [ @@ -1057,10 +1067,10 @@ }, "virtualenv": { "hashes": [ - "sha256:34b9ae3742abed2f95d3970acf4d80533261d6061b51160b197f84e5b4c98b4c", - "sha256:fa736831a7b18bd2bfeef746beb622a92509e9733d645952da136b0639cd40cd" + "sha256:58c359370401e0af817fb0070911e599c5fdc836166306b04fd0f278151ed125", + "sha256:729f0bcab430e4ef137646805b5b1d8efbb43fe53d4a0f33328624a84a5121f7" ], - "version": "==16.2.0" + "version": "==16.3.0" }, "wrapt": { "hashes": [ diff --git a/build_containers.sh b/build_containers.sh index dca9a4d0d..52d3df12f 100755 --- a/build_containers.sh +++ b/build_containers.sh @@ -48,12 +48,6 @@ if [ $BUILD_ALL -eq 1 ]; then docker push "${AWS_ACCOUNT_ID}.dkr.ecr.us-east-1.amazonaws.com/concordia/celerybeat:${VERSION_NUMBER}" docker push "${AWS_ACCOUNT_ID}.dkr.ecr.us-east-1.amazonaws.com/concordia/celerybeat:${TAG}" - docker pull rabbitmq:latest - docker tag rabbitmq:latest "${AWS_ACCOUNT_ID}.dkr.ecr.us-east-1.amazonaws.com/rabbitmq:${VERSION_NUMBER}" - docker tag rabbitmq:latest "${AWS_ACCOUNT_ID}.dkr.ecr.us-east-1.amazonaws.com/rabbitmq:${TAG}" - docker push "${AWS_ACCOUNT_ID}.dkr.ecr.us-east-1.amazonaws.com/rabbitmq:${VERSION_NUMBER}" - docker push "${AWS_ACCOUNT_ID}.dkr.ecr.us-east-1.amazonaws.com/rabbitmq:${TAG}" - docker build -t concordia/indexer --file indexer/Dockerfile . docker tag concordia/indexer:latest "${AWS_ACCOUNT_ID}.dkr.ecr.us-east-1.amazonaws.com/concordia/indexer:${VERSION_NUMBER}" docker tag concordia/indexer:latest "${AWS_ACCOUNT_ID}.dkr.ecr.us-east-1.amazonaws.com/concordia/indexer:${TAG}" diff --git a/cloudformation/README.md b/cloudformation/README.md index e452e25e9..5d7100a98 100644 --- a/cloudformation/README.md +++ b/cloudformation/README.md @@ -13,7 +13,7 @@ cd cloudformation ./sync_templates.sh ``` -2. Read [how to get started with AWS ECR](https://docs.aws.amazon.com/AmazonECR/latest/userguide/ECR_GetStarted.html) and follow the instructions to create three ECR repositories named `concordia`, `concordia/importer` and `rabbitmq`. +2. Read [how to get started with AWS ECR](https://docs.aws.amazon.com/AmazonECR/latest/userguide/ECR_GetStarted.html) and follow the instructions to create an ECR repository for each docker image that will be deployed. 3. Set a BUILD_NUMBER in your environment and run `./build_containers.sh` 4. Create a KMS key for this project. 5. Populate the secrets in `create_secrets.sh` and run that script to create a new set of secrets. diff --git a/cloudformation/infrastructure/fargate-cluster.yaml b/cloudformation/infrastructure/fargate-cluster.yaml index e7cab64f9..e74e65e7c 100644 --- a/cloudformation/infrastructure/fargate-cluster.yaml +++ b/cloudformation/infrastructure/fargate-cluster.yaml @@ -29,7 +29,7 @@ Parameters: ConcordiaVersion: Type: String - Description: version of concordia, concordia/importer, and rabbitmq docker images to pull and deploy + Description: version of concordia docker images to pull and deploy Default: latest EnvName: @@ -272,24 +272,12 @@ Resources: - Name: HOST_NAME Value: !Ref CanonicalHostName - Name: DJANGO_SETTINGS_MODULE - Value: concordia.settings_ecs + Value: concordia.settings_ecs MountPoints: - SourceVolume: images_volume ContainerPath: /concordia_images PortMappings: - ContainerPort: 80 - - Name: rabbit - Cpu: 1024 - Memory: 2048 - Image: !Sub '${AWS::AccountId}.dkr.ecr.${AWS::Region}.amazonaws.com/rabbitmq:${ConcordiaVersion}' - PortMappings: - - ContainerPort: 5672 - LogConfiguration: - LogDriver: awslogs - Options: - awslogs-group: !Ref 'ConcordiaAppLogsGroup' - awslogs-region: !Ref 'AWS::Region' - awslogs-stream-prefix: ConcordiaCron - Name: importer Cpu: 1024 Memory: 2048 @@ -378,8 +366,8 @@ Resources: - Name: HOST_NAME Value: !Ref CanonicalHostName - Name: DJANGO_SETTINGS_MODULE - Value: concordia.settings_ecs - + Value: concordia.settings_ecs + ConcordiaExternalService: Type: AWS::ECS::Service DependsOn: ExternalLoadBalancerListener diff --git a/concordia/settings_docker.py b/concordia/settings_docker.py index 6329b33d3..fc4f63966 100644 --- a/concordia/settings_docker.py +++ b/concordia/settings_docker.py @@ -19,8 +19,8 @@ EMAIL_BACKEND = "django.core.mail.backends.dummy.EmailBackend" -CELERY_BROKER_URL = os.getenv("CELERY_BROKER_URL", "pyamqp://guest@rabbit:5672") -CELERY_RESULT_BACKEND = "rpc://" +# CELERY_BROKER_URL = os.getenv("CELERY_BROKER_URL", "pyamqp://guest@rabbit:5672") +# CELERY_RESULT_BACKEND = "rpc://" S3_BUCKET_NAME = os.getenv("S3_BUCKET_NAME") diff --git a/concordia/settings_ecs.py b/concordia/settings_ecs.py index c09bf5f4d..e39a5b4f1 100644 --- a/concordia/settings_ecs.py +++ b/concordia/settings_ecs.py @@ -49,8 +49,8 @@ CSRF_COOKIE_SECURE = True -CELERY_BROKER_URL = os.getenv("CELERY_BROKER_URL", "pyamqp://guest@rabbit:5672") -CELERY_RESULT_BACKEND = "rpc://" +# CELERY_BROKER_URL = os.getenv("CELERY_BROKER_URL", "pyamqp://guest@rabbit:5672") +# CELERY_RESULT_BACKEND = "rpc://" S3_BUCKET_NAME = os.getenv("S3_BUCKET_NAME") EXPORT_S3_BUCKET_NAME = os.getenv("EXPORT_S3_BUCKET_NAME") diff --git a/concordia/settings_template.py b/concordia/settings_template.py index bd6a46409..5b5ee5fa9 100755 --- a/concordia/settings_template.py +++ b/concordia/settings_template.py @@ -152,8 +152,8 @@ } # Celery settings -CELERY_BROKER_URL = "pyamqp://guest@rabbit" -CELERY_RESULT_BACKEND = "rpc://" +CELERY_BROKER_URL = "redis://redis:6379/0" +CELERY_RESULT_BACKEND = "redis://redis:6379/0" CELERY_ACCEPT_CONTENT = ["json"] CELERY_TASK_SERIALIZER = "json" diff --git a/docker-compose.yml b/docker-compose.yml index a22edde00..83b98949d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -15,12 +15,8 @@ services: - ./postgresql:/docker-entrypoint-initdb.d - db_volume:/var/lib/postgresl/data/ - rabbit: - hostname: rabbit - image: rabbitmq:latest - ports: - - 5672:5672 - - 15672:15672 + redis: + image: redis app: build: . @@ -40,7 +36,7 @@ services: - .:/app - images_volume:/concordia_images links: - - rabbit + - redis ports: - 80:80 @@ -53,7 +49,7 @@ services: POSTGRESQL_HOST: db POSTGRESQL_PW: ${POSTGRESQL_PW} depends_on: - - rabbit + - redis - db volumes: - images_volume:/concordia_images @@ -76,7 +72,7 @@ services: POSTGRESQL_HOST: db POSTGRESQL_PW: ${POSTGRESQL_PW} depends_on: - - rabbit + - redis - db volumes: diff --git a/docs/for-developers.md b/docs/for-developers.md index 4b25088f4..c6e00563e 100644 --- a/docs/for-developers.md +++ b/docs/for-developers.md @@ -65,7 +65,7 @@ same package versions which you used during development. Instead of doing `docker-compose up` as above, instead start everything except the app: ```bash -$ docker-compose up -d db rabbit importer +$ docker-compose up -d db redis importer ``` This will run the database in a container to ensure that it always matches the @@ -135,7 +135,7 @@ virtualenv environment: #### Import Data -Once the database, rabbitMQ service, importer and the application +Once the database, redis service, importer and the application are running, you're ready to import data. First, [create a Django admin user](https://docs.djangoproject.com/en/2.1/intro/tutorial02/#creating-an-admin-user) and log in as that user.