diff --git a/.dockerignore b/.dockerignore index 4af633a25..b4d6ec73b 100644 --- a/.dockerignore +++ b/.dockerignore @@ -11,8 +11,9 @@ ./*/docker-compose.yml ./Dockerfile ./Dockerfile.dev +./Dockerfile.prebuilt ./LICENSE ./README.md ./*/README.* ./dev.sh -./setup_utils/install.sh +./cli \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index cbd9ca527..06c134a28 100644 --- a/Dockerfile +++ b/Dockerfile @@ -26,6 +26,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ # Final image FROM python:3.9-slim +ENV ENVIRONMENT=prod ENV DB_HOST=localhost ENV DB_NAME=mhq-oss ENV DB_PASS=postgres diff --git a/Dockerfile.dev b/Dockerfile.dev index 378ff7a4b..269009e93 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,4 +1,4 @@ -ARG ENVIRONMENT=prod +ARG ENVIRONMENT=dev ARG POSTGRES_DB_ENABLED=true ARG DB_INIT_ENABLED=true ARG REDIS_ENABLED=true @@ -35,7 +35,7 @@ RUN /opt/venv/bin/pip install -r requirements.txt -r dev-requirements.txt WORKDIR /app RUN mkdir -p /etc/cron.d && mv /app/setup_utils/cronjob.txt /etc/cron.d/cronjob RUN chmod +x /app/setup_utils/start.sh /app/setup_utils/init_db.sh /app/setup_utils/generate_config_ini.sh -RUN mv /app/setup_utils/supervisord-dev.conf /etc/supervisord.conf +RUN mv /app/setup_utils/supervisord.conf /etc/supervisord.conf RUN mv /app/database-docker/db/ /app/ && rm -rf /app/database-docker/ RUN echo "host all all 0.0.0.0/0 md5" >> /etc/postgresql/15/main/pg_hba.conf RUN echo "listen_addresses='*'" >> /etc/postgresql/15/main/postgresql.conf @@ -58,7 +58,8 @@ RUN crontab /etc/cron.d/cronjob RUN /app/setup_utils/generate_config_ini.sh -t /app/backend/analytics_server/mhq/config WORKDIR /app/web-server -RUN yarn install --verbose +RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --verbose + ENV POSTGRES_DB_ENABLED=true ENV DB_INIT_ENABLED=true diff --git a/README.md b/README.md index fd1a097bd..093236355 100644 --- a/README.md +++ b/README.md @@ -68,12 +68,21 @@ Open the terminal and run the following command docker run \ --name middleware \ -p 3000:3333 \ - public.ecr.aws/y4x5l0o7/middleware:latest + -d \ + middlewareeng/middleware:latest + ``` Wait for sometime for the services to be up. -The App shall be available on your host at http://localhost:3333. +The app shall be available on your host at http://localhost:3333. + +In case you want to stop the container, run the following command: + +```bash +docker stop middleware +``` + ## Run Locally @@ -91,56 +100,23 @@ The App shall be available on your host at http://localhost:3333. cd middleware ``` -3. **Set Environment variables ⚙️**\ - Make `.env` file in the project root directory and put environment variables for frontend and backend in it.\ - You can also specify which individual services to enable if you don't want to start all the services. - - ``` - # .env file - - DB_HOST=localhost - DB_NAME=dora - DB_PASS=postgres - DB_PORT=5432 - DB_USER=postgres - REDIS_HOST=localhost - REDIS_PORT=6379 - ENVIRONMENT=dev - - # Enable/Disable individual services - POSTGRES_DB_ENABLED=true - DB_INIT_ENABLED=true - REDIS_ENABLED=true - BACKEND_ENABLED=true - FRONTEND_ENABLED=true - CRON_ENABLED=true - - NEXT_PUBLIC_APP_ENVIRONMENT="development" - INTERNAL_API_BASE_URL=http://localhost:9696 - - # For using db on host machine uncomment the following. Useful when using ssh tunnelling. - #DB_HOST=host.docker.internal - #DB_PORT=5433 - ``` - - Set `ENVIRONMENT=prod` to run it in production setup.\ - Setting `DB_HOST` as `host.docker.internal` will help when you want to connect to a db instance which - is running on your host machine. Also update `DB_PORT` accordingly. - -4. **Run `dev.sh` script in the project root 🪄**\ +3. **Run `dev.sh` script in the project root 🪄**\ `./dev.sh` can be run with either no arguments or all arguments need to provided for creating the ssh tunnel.\ The usage is as follows: ```bash - Usage: ./dev.sh [-i identity_file] [-l local_port] [-r remote_host] [-p remote_port] [-u ssh_user] [-h ssh_host] - ``` - ```bash # runs without the ssh tunnel ./dev.sh ``` - ```bash - # runs with the ssh tunnel - ./dev.sh -i /path/to/private_key -l 5433 -r mhq_db.rds.amazonaws.com -p 5432 -u ec2-user -h 255.96.240.666 - ``` + You may update the env.example and set `ENVIRONMENT=prod` to run it in production setup.\ + Further if any changes are required to be made to ports, you may update the `docker-compose.yml` file, accordingly. +4. **Access the Application**: + Once the project is running, access the application through your web browser at http://localhost:3333. + Further, other services can be accessed at: + - The analytics server is available at http://localhost:9696. + - The sync server can be accessed at http://localhost:9697. + - The postgres database can be accessed at host:localhost, port:5434, username: postgres, password: postgres, db name: mhq-oss. + - The redis server can be accessed at host:localhost, port:6385. + ### Manual Setup @@ -192,7 +168,8 @@ To set up middleware locally, follow these steps: ``` 6. **Access the Application**: - Once the project is running, access the application through your web browser at http://localhost:8000. Additionally: + Once the project is running, access the application through your web browser at http://localhost:8000. \ + Additionally: - The analytics server is available at http://localhost:5000. - The sync server can be accessed at http://localhost:6000. @@ -210,19 +187,6 @@ To get started contributing to middleware check out our [SECURITY.md](https://gi We look forward to your part in keeping Middleware secure! -## Usage - -- Instructions for using the DORA metrics analyzer -- How to configure data sources and metrics collection -- Generating and interpreting reports -- Tips for optimizing DevOps performance based on insights - -## Examples - -- Sample reports and dashboards showcasing DORA metrics -- Real-world use cases and success stories -- Screenshots of the analyzer in action - ## License This project is licensed under the [Apache 2.0](https://github.com/middlewarehq/middleware/blob/main/LICENSE) License - see the LICENSE.md file for details. diff --git a/backend/analytics_server/mhq/api/integrations.py b/backend/analytics_server/mhq/api/integrations.py index 7aadd3b38..983d6a866 100644 --- a/backend/analytics_server/mhq/api/integrations.py +++ b/backend/analytics_server/mhq/api/integrations.py @@ -18,9 +18,12 @@ def get_github_orgs(org_id: str): query_validator = get_query_validator() query_validator.org_validator(org_id) - external_integrations_service = get_external_integrations_service( - org_id, UserIdentityProvider.GITHUB - ) + try: + external_integrations_service = get_external_integrations_service( + org_id, UserIdentityProvider.GITHUB + ) + except Exception as e: + return jsonify({"error": str(e)}), 500 try: orgs = external_integrations_service.get_github_organizations() except GithubException as e: @@ -55,9 +58,12 @@ def get_repos(org_id: str, org_login: str, page_size: int, page: int): query_validator = get_query_validator() query_validator.org_validator(org_id) - external_integrations_service = get_external_integrations_service( - org_id, UserIdentityProvider.GITHUB - ) + try: + external_integrations_service = get_external_integrations_service( + org_id, UserIdentityProvider.GITHUB + ) + except Exception as e: + return jsonify({"error": str(e)}), 500 # GitHub pages start from 0 and Bitbucket pages start from 1. # Need to be consistent, hence making standard as page starting from 1 # and passing a decremented value to GitHub @@ -77,9 +83,12 @@ def get_prs_for_repo(org_id: str, gh_org_name: str, gh_org_repo_name: str): query_validator = get_query_validator() query_validator.org_validator(org_id) - external_integrations_service = get_external_integrations_service( - org_id, UserIdentityProvider.GITHUB - ) + try: + external_integrations_service = get_external_integrations_service( + org_id, UserIdentityProvider.GITHUB + ) + except Exception as e: + return jsonify({"error": str(e)}), 500 try: workflows_list = external_integrations_service.get_repo_workflows( gh_org_name, gh_org_repo_name diff --git a/backend/analytics_server/mhq/api/sync.py b/backend/analytics_server/mhq/api/sync.py index d1ed5e3b9..e626e7df9 100644 --- a/backend/analytics_server/mhq/api/sync.py +++ b/backend/analytics_server/mhq/api/sync.py @@ -1,4 +1,4 @@ -from flask import Blueprint +from flask import Blueprint, jsonify from mhq.service.query_validator import get_query_validator from mhq.service.sync_data import trigger_data_sync @@ -13,11 +13,12 @@ def sync(): default_org = get_query_validator().get_default_org() if not default_org: - raise Exception("Default org not found") + return jsonify({"message": "Default org not found"}), 404 org_id = str(default_org.id) with get_redis_lock_service().acquire_lock("{org}:" + f"{str(org_id)}:data_sync"): try: trigger_data_sync(org_id) except Exception as e: LOG.error(f"Error syncing data for org {org_id}: {str(e)}") + return {"message": "sync failed", "time": time_now().isoformat()}, 500 return {"message": "sync started", "time": time_now().isoformat()} diff --git a/backend/analytics_server/mhq/service/code/sync/etl_github_handler.py b/backend/analytics_server/mhq/service/code/sync/etl_github_handler.py index ddccab01e..8c1dd1fe9 100644 --- a/backend/analytics_server/mhq/service/code/sync/etl_github_handler.py +++ b/backend/analytics_server/mhq/service/code/sync/etl_github_handler.py @@ -29,6 +29,7 @@ ) from mhq.store.repos.code import CodeRepoService from mhq.store.repos.core import CoreRepoService +from mhq.utils.log import LOG from mhq.utils.time import time_now, ISO_8601_DATE_FORMAT PR_PROCESSING_CHUNK_SIZE = 100 @@ -359,7 +360,7 @@ def _get_access_token(): org_id, UserIdentityProvider.GITHUB ) if not access_token: - raise Exception( + LOG.error( f"Access token not found for org {org_id} and provider {UserIdentityProvider.GITHUB.value}" ) return access_token diff --git a/backend/analytics_server/mhq/service/workflows/sync/etl_github_actions_handler.py b/backend/analytics_server/mhq/service/workflows/sync/etl_github_actions_handler.py index 6c08de587..02af62c52 100644 --- a/backend/analytics_server/mhq/service/workflows/sync/etl_github_actions_handler.py +++ b/backend/analytics_server/mhq/service/workflows/sync/etl_github_actions_handler.py @@ -179,7 +179,7 @@ def _get_access_token(): org_id, UserIdentityProvider.GITHUB ) if not access_token: - raise Exception( + LOG.error( f"Access token not found for org {org_id} and provider {UserIdentityProvider.GITHUB.value}" ) return access_token diff --git a/backend/analytics_server/mhq/store/initialise_db.py b/backend/analytics_server/mhq/store/initialise_db.py index cb2e6e165..4f4b2cd98 100644 --- a/backend/analytics_server/mhq/store/initialise_db.py +++ b/backend/analytics_server/mhq/store/initialise_db.py @@ -1,27 +1,25 @@ from mhq.store import db from mhq.store.models import Organization +from mhq.utils.lock import get_redis_lock_service from mhq.utils.string import uuid4_str from mhq.utils.time import time_now def initialize_database(app): with app.app_context(): - default_org = ( - db.session.query(Organization) - .filter(Organization.name == "default") - .one_or_none() - ) - if default_org: - return - default_org = Organization( - id=uuid4_str(), - name="default", - domain="default", - created_at=time_now(), - ) - db.session.add(default_org) - db.session.commit() - - -if __name__ == "__main__": - initialize_database() + with get_redis_lock_service().acquire_lock("initialize_database"): + default_org = ( + db.session.query(Organization) + .filter(Organization.name == "default") + .one_or_none() + ) + if default_org: + return + default_org = Organization( + id=uuid4_str(), + name="default", + domain="default", + created_at=time_now(), + ) + db.session.add(default_org) + db.session.commit() diff --git a/docker-compose.yml b/docker-compose.yml index beead7685..322efde15 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -42,8 +42,9 @@ services: - action: rebuild path: ./backend/dev-requirements.txt - - action: rebuild + - action: sync+restart path: ./setup_util/supervisord.conf + target: /etc/supervisord.conf - action: rebuild path: ./setup_utils/init_db.sh diff --git a/env.example b/env.example index ec3018b45..24db0253c 100644 --- a/env.example +++ b/env.example @@ -1,4 +1,4 @@ -ENVIRONMENT=prod +ENVIRONMENT=dev POSTGRES_DB_ENABLED=true DB_INIT_ENABLED=true REDIS_ENABLED=true diff --git a/setup_utils/cronjob.txt b/setup_utils/cronjob.txt index 8fa20dec5..5fc95d918 100644 --- a/setup_utils/cronjob.txt +++ b/setup_utils/cronjob.txt @@ -1,4 +1,4 @@ #!/usr/bin/env python3 -# Every minute -* * * * * curl -X POST http://localhost:9697/sync >> /var/log/cron/cron.log 2>&1 +# Every 30 minutes, run the sync script +*/30 * * * * curl -X POST http://localhost:9697/sync >> /var/log/cron/cron.log 2>&1 diff --git a/setup_utils/init_db.sh b/setup_utils/init_db.sh index 0c6c7063b..ce0c8c47d 100755 --- a/setup_utils/init_db.sh +++ b/setup_utils/init_db.sh @@ -2,7 +2,6 @@ set -e set -u -set -x POSTGRES_USER="${DB_USER:-postgres}" POSTGRES_PASSWORD="${DB_PASS:-postgres}" @@ -11,9 +10,11 @@ POSTGRES_PORT="${DB_PORT:-5432}" POSTGRES_HOST="${DB_HOST:-127.0.0.1}" wait_for_postgres() { - until su - postgres -c "psql -U postgres -c '\q'"; do - echo "PostgreSQL is unavailable - sleeping" - sleep 1 + until su - postgres -c "psql -U postgres -c '\q'" >/dev/null 2>&1; do + if [ $? -ne 0 ]; then + echo "PostgreSQL is unavailable - sleeping" + sleep 1 + fi done echo "PostgreSQL is up - continuing" } @@ -27,11 +28,23 @@ if su - postgres -c "psql -U postgres -lqt | cut -d \| -f 1 | grep -qw $POSTGRES else # Create the database if it doesn't exist su - postgres -c "psql -U postgres -c 'CREATE DATABASE \"$POSTGRES_DB\";'" - su - postgres -c "psql -U postgres -d \"$POSTGRES_DB\" -c 'GRANT ALL PRIVILEGES ON DATABASE \"$POSTGRES_DB\" TO \"$POSTGRES_USER\";'" - su - postgres -c "psql -U postgres -c 'ALTER USER \"$POSTGRES_USER\" WITH ENCRYPTED PASSWORD '\''$POSTGRES_PASSWORD'\'';'" fi +su - postgres -c "psql -U postgres -d \"$POSTGRES_DB\" -c 'GRANT ALL PRIVILEGES ON DATABASE \"$POSTGRES_DB\" TO \"$POSTGRES_USER\";'" +su - postgres -c "psql -U postgres -c 'ALTER USER \"$POSTGRES_USER\" WITH ENCRYPTED PASSWORD '\''$POSTGRES_PASSWORD'\'';'" + # Construct the database URL DB_URL="postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB?sslmode=disable" /usr/local/bin/dbmate -u "$DB_URL" up + + +MESSAGE="mhq-oss DB initialized" +TOPIC="db_init" +PUB_DIR="/tmp/pubsub" + +# Create directory if it doesn't exist +mkdir -p "$PUB_DIR" + +# Write message to topic file +echo "$MESSAGE" > "$PUB_DIR/$TOPIC" \ No newline at end of file diff --git a/setup_utils/install.sh b/setup_utils/install.sh deleted file mode 100755 index 13ad7c25b..000000000 --- a/setup_utils/install.sh +++ /dev/null @@ -1,24 +0,0 @@ -#! /bin/bash - -function check_docker_daemon() { - docker info >/dev/null 2>&1 - if [ $? -ne 0 ]; then - echo "Docker daemon is not running. Please start Docker and try again." - exit 1 - fi -} - -# Function to show access info -function show_access_info() { - echo "Please wait." - echo "Access API server on http://localhost:9696" - echo "Access Postgres DB on http://localhost:5436" - echo "Access Redis on http://localhost:6380" - echo -e "Access frontend on http://localhost:3005\n" -} - -export ENVIRONMENT="prod" -check_docker_daemon -docker-compose up -d - -show_access_info diff --git a/setup_utils/start_api_server.sh b/setup_utils/start_api_server.sh new file mode 100644 index 000000000..015a60577 --- /dev/null +++ b/setup_utils/start_api_server.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +set -u + +TOPIC="db_init" +SUB_DIR="/tmp/pubsub" + +# Function to wait for message on a topic +wait_for_message() { + while [ ! -f "$SUB_DIR/$TOPIC" ]; do + sleep 1 + done + # Read message from topic file + MESSAGE=$(cat "$SUB_DIR/$TOPIC") + echo "Received message: $MESSAGE" +} + +# Wait for message on the specified topic +wait_for_message + +cd /app/backend/analytics_server + +cd /app/backend/analytics_server +if [ "$ENVIRONMENT" == "prod" ]; then + /opt/venv/bin/gunicorn -w 4 -b 0.0.0.0:9696 --timeout 0 app:app +else + /opt/venv/bin/gunicorn -w 4 -b 0.0.0.0:9696 --timeout 0 --reload app:app +fi \ No newline at end of file diff --git a/setup_utils/start_sync_server.sh b/setup_utils/start_sync_server.sh new file mode 100644 index 000000000..db33ca5a2 --- /dev/null +++ b/setup_utils/start_sync_server.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +set -u + +TOPIC="db_init" +SUB_DIR="/tmp/pubsub" + +# Function to wait for message on a topic +wait_for_message() { + while [ ! -f "$SUB_DIR/$TOPIC" ]; do + sleep 1 + done + # Read message from topic file + MESSAGE=$(cat "$SUB_DIR/$TOPIC") + echo "Received message: $MESSAGE" +} + +# Wait for message on the specified topic +wait_for_message + + +cd /app/backend/analytics_server +if [ "$ENVIRONMENT" == "prod" ]; then + /opt/venv/bin/gunicorn -w 2 -b 0.0.0.0:9697 --timeout 0 sync_app:app +else + /opt/venv/bin/gunicorn -w 2 -b 0.0.0.0:9697 --timeout 0 --reload sync_app:app +fi diff --git a/setup_utils/supervisord-dev.conf b/setup_utils/supervisord-dev.conf deleted file mode 100644 index cf31b46a3..000000000 --- a/setup_utils/supervisord-dev.conf +++ /dev/null @@ -1,92 +0,0 @@ -[supervisord] -nodaemon=true - -[program:postgres] -priority=1 -user=postgres -command=/bin/bash -c "postgres -D /var/lib/postgresql/15/main -c config_file=/etc/postgresql/15/main/postgresql.conf" -stdout_logfile=/var/log/postgres/postgres.log -stdout_logfile_maxbytes=512KB -stderr_logfile=/var/log/postgres/postgres.log -stderr_logfile_maxbytes=512KB -autorestart=false -environment=POSTGRES_DB_ENABLED=%(ENV_POSTGRES_DB_ENABLED)s -autostart=%(ENV_POSTGRES_DB_ENABLED)s - -[program:initialize_db] -priority=2 -command=/app/setup_utils/init_db.sh -directory=/app -startsecs=5 -stdout_logfile=/var/log/init_db/init_db.log -stdout_logfile_maxbytes=512KB -stderr_logfile=/var/log/init_db/init_db.log -stderr_logfile_maxbytes=512KB -autorestart=false -environment=POSTGRES_DB_ENABLED=%(ENV_POSTGRES_DB_ENABLED)s -autostart=%(ENV_POSTGRES_DB_ENABLED)s - -[program:redis] -priority=3 -command=redis-server --protected-mode no -stdout_logfile=/var/log/redis/redis.log -stdout_logfile_maxbytes=512KB -stderr_logfile=/var/log/redis/redis.log -stderr_logfile_maxbytes=512KB -autorestart=true -environment=REDIS_ENABLED=%(ENV_REDIS_ENABLED)s -autostart=%(ENV_REDIS_ENABLED)s - -[program:backend_sync] -priority=5 -command=/bin/bash -c "/opt/venv/bin/gunicorn -w 1 -b 0.0.0.0:9697 --timeout 0 --reload sync_app:app" -directory=/app/backend/analytics_server -stdout_logfile=/var/log/apiserver/apiserver.log -stdout_logfile_maxbytes=512KB -stderr_logfile=/var/log/apiserver/apiserver.log -stderr_logfile_maxbytes=512KB -autorestart=true -retry=3 -retry_delay=5 -environment=BACKEND_ENABLED=%(ENV_BACKEND_ENABLED)s -autostart=%(ENV_BACKEND_ENABLED)s - -[program:backend] -priority=4 -command=/bin/bash -c "/opt/venv/bin/gunicorn -w 4 -b 0.0.0.0:9696 --reload app:app" -directory=/app/backend/analytics_server -stdout_logfile=/var/log/apiserver/apiserver.log -stdout_logfile_maxbytes=512KB -stderr_logfile=/var/log/apiserver/apiserver.log -stderr_logfile_maxbytes=512KB -autorestart=true -retry=3 -retry_delay=5 -environment=BACKEND_ENABLED=%(ENV_BACKEND_ENABLED)s -autostart=%(ENV_BACKEND_ENABLED)s - -[program:frontend] -command=/bin/bash -c "source ~/.bashrc && yarn dev" -directory=/app/web-server -stdout_logfile=/var/log/webserver/webserver.log -stdout_logfile_maxbytes=512KB -stderr_logfile=/var/log/webserver/webserver.log -stderr_logfile_maxbytes=512KB -autorestart=true -retry=3 -retry_delay=5 -environment=FRONTEND_ENABLED=%(ENV_FRONTEND_ENABLED)s -autostart=%(ENV_FRONTEND_ENABLED)s - -[program:cron] -command=/bin/bash -c " exec /usr/sbin/cron -f -l 1" -autorestart=true -startsecs=10 -stderr_logfile=/var/log/cron.log -stdout_logfile=/var/log/cron.log -stdout_logfile_maxbytes=512KB -stderr_logfile_maxbytes=512KB -retry=3 -retry_delay=5 -environment=CRON_ENABLED=%(ENV_CRON_ENABLED)s -autostart=%(ENV_CRON_ENABLED)s diff --git a/setup_utils/supervisord.conf b/setup_utils/supervisord.conf index ed53a8633..5f0674a99 100644 --- a/setup_utils/supervisord.conf +++ b/setup_utils/supervisord.conf @@ -5,10 +5,13 @@ nodaemon=true priority=1 user=postgres command=/bin/bash -c "postgres -D /var/lib/postgresql/15/main -c config_file=/etc/postgresql/15/main/postgresql.conf" +startsecs=10 stdout_logfile=/var/log/postgres/postgres.log stdout_logfile_maxbytes=512KB stderr_logfile=/var/log/postgres/postgres.log stderr_logfile_maxbytes=512KB +stdout_logfile_backups=0 +stderr_logfile_backups=0 autorestart=false environment=POSTGRES_DB_ENABLED=%(ENV_POSTGRES_DB_ENABLED)s autostart=%(ENV_POSTGRES_DB_ENABLED)s @@ -17,11 +20,12 @@ autostart=%(ENV_POSTGRES_DB_ENABLED)s priority=2 command=/app/setup_utils/init_db.sh directory=/app -startsecs=5 stdout_logfile=/var/log/init_db/init_db.log stdout_logfile_maxbytes=512KB stderr_logfile=/var/log/init_db/init_db.log stderr_logfile_maxbytes=512KB +stdout_logfile_backups=0 +stderr_logfile_backups=0 autorestart=false environment=POSTGRES_DB_ENABLED=%(ENV_POSTGRES_DB_ENABLED)s autostart=%(ENV_POSTGRES_DB_ENABLED)s @@ -29,36 +33,45 @@ autostart=%(ENV_POSTGRES_DB_ENABLED)s [program:redis] priority=3 command=redis-server --protected-mode no +startsecs=10 stdout_logfile=/var/log/redis/redis.log stdout_logfile_maxbytes=512KB stderr_logfile=/var/log/redis/redis.log stderr_logfile_maxbytes=512KB +stdout_logfile_backups=0 +stderr_logfile_backups=0 autorestart=true environment=REDIS_ENABLED=%(ENV_REDIS_ENABLED)s autostart=%(ENV_REDIS_ENABLED)s -[program:backend_sync] -priority=5 -command=/bin/bash -c "/opt/venv/bin/gunicorn -w 2 -b 0.0.0.0:9697 --timeout 0 sync_app:app" -directory=/app/backend/analytics_server +[program:backend] +priority=4 +command=/bin/bash -c "chmod +x ./start_api_server.sh && ./start_api_server.sh" +directory=/app/setup_utils +startsecs=10 stdout_logfile=/var/log/apiserver/apiserver.log stdout_logfile_maxbytes=512KB stderr_logfile=/var/log/apiserver/apiserver.log stderr_logfile_maxbytes=512KB +stdout_logfile_backups=0 +stderr_logfile_backups=0 autorestart=true retry=3 retry_delay=5 environment=BACKEND_ENABLED=%(ENV_BACKEND_ENABLED)s autostart=%(ENV_BACKEND_ENABLED)s -[program:backend] -priority=4 -command=/bin/bash -c "/opt/venv/bin/gunicorn -w 4 -b 0.0.0.0:9696 app:app" -directory=/app/backend/analytics_server +[program:backend_sync] +priority=5 +command=/bin/bash -c "chmod +x ./start_sync_server.sh && ./start_sync_server.sh" +directory=/app/setup_utils +startsecs=10 stdout_logfile=/var/log/apiserver/apiserver.log stdout_logfile_maxbytes=512KB stderr_logfile=/var/log/apiserver/apiserver.log stderr_logfile_maxbytes=512KB +stdout_logfile_backups=0 +stderr_logfile_backups=0 autorestart=true retry=3 retry_delay=5 @@ -66,12 +79,15 @@ environment=BACKEND_ENABLED=%(ENV_BACKEND_ENABLED)s autostart=%(ENV_BACKEND_ENABLED)s [program:frontend] -command=/bin/bash -c "source ~/.bashrc && yarn http" +command=/bin/bash -c "source ~/.bashrc && yarn dev" directory=/app/web-server +startsecs=10 stdout_logfile=/var/log/webserver/webserver.log stdout_logfile_maxbytes=512KB stderr_logfile=/var/log/webserver/webserver.log stderr_logfile_maxbytes=512KB +stdout_logfile_backups=0 +stderr_logfile_backups=0 autorestart=true retry=3 retry_delay=5 @@ -86,6 +102,8 @@ stderr_logfile=/var/log/cron.log stdout_logfile=/var/log/cron.log stdout_logfile_maxbytes=512KB stderr_logfile_maxbytes=512KB +stdout_logfile_backups=0 +stderr_logfile_backups=0 retry=3 retry_delay=5 environment=CRON_ENABLED=%(ENV_CRON_ENABLED)s