diff --git a/.github/workflows/README.md b/.github/workflows/README.md index fa17094..5399992 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -1,46 +1,65 @@ -# Workflows +# CI/CD Deploy -## Development - Build and Unittest +## Workflows -File: [development_pipeline.yml](development_pipeline.yml) +### Development - Build and Unittest -Event: On **Pull Request** → any branch into **develop** +#### File: [development_pipeline.yml](development_pipeline.yml) -Jobs: +**Event:** On Pull Request → any branch into develop + +**Jobs:** - Install dependencies (caches) - Run isort - Run black - Run flake8 - Build images (caches) -- Push images to docker hub - -### Description: +- Push images to Docker Hub -## Staging - CI/CD Pipeline +**Description:** +This workflow is triggered on Pull Requests into the develop branch. It ensures a clean and standardized codebase by installing dependencies, checking code formatting with isort, black, and flake8, and finally building and pushing Docker images to Docker Hub. -File: [staging_pipeline.yml](staging_pipeline.yml) +### Staging - CI/CD Pipeline -Event: On **Pull Request** → any branch into **staging** +#### File: [staging_pipeline.yml](staging_pipeline.yml) -Jobs: +**Event:** On Pull Request → any branch into staging -- Build -- Unit Test -- Deploy +**Jobs:** -### Description: +- Install dependencies (caches) +- Run isort +- Run black +- Run flake8 +- Build images (caches) +- Push images to Docker Hub +- Create infrastructure +- Configure infrastructure +- Deploy application using Docker Compose +- Clean up following the concept of A/B deploy -## Production - CI/CD Pipeline +**Description:** +This pipeline is designed for the staging environment and is triggered on Pull Requests into the staging branch. It includes steps to ensure code quality, build and push Docker images, create and configure necessary infrastructure, and deploy the application using Docker Compose. The cleanup process follows the A/B deployment concept. -File: [production_pipeline.yml](production_pipeline.yml) +### Production - CI/CD Pipeline -Event: On **Pull Request** → any branch into **master** +#### File: [production_pipeline.yml](production_pipeline.yml) -Jobs: +**Event:** On Pull Request → any branch into master -- Build -- Test -- Deploy +**Jobs:** -### Description: +- Install dependencies (caches) +- Run isort +- Run black +- Run flake8 +- Build images (caches) +- Push images to Docker Hub +- Create infrastructure +- Configure infrastructure +- Deploy application using Docker Compose +- Clean up following the concept of A/B deploy + +**Description:** +The production pipeline is triggered on Pull Requests into the master branch, indicating changes are ready for deployment to the production environment. It follows a similar process to the staging pipeline but is specifically tailored for the production environment. The cleanup process adheres to the A/B deployment concept, ensuring a smooth transition between versions. diff --git a/.github/workflows/ansible/deploy-app.yml b/.github/workflows/ansible/deploy_applications.yml similarity index 75% rename from .github/workflows/ansible/deploy-app.yml rename to .github/workflows/ansible/deploy_applications.yml index 1eff43c..0466697 100644 --- a/.github/workflows/ansible/deploy-app.yml +++ b/.github/workflows/ansible/deploy_applications.yml @@ -1,5 +1,5 @@ --- -- name: "configuration play." +- name: "Deploy applications" hosts: web user: ubuntu become: true @@ -9,11 +9,18 @@ - ansible_host_key_checking: false - ansible_stdout_callback: yaml - - mlflow_image_name: vectornguyen76/mlflow - - mlflow_tag_name: latest + - image_search_image: vectornguyen76/image-search-engine + - image_search_tag: latest + + - text_search_image: vectornguyen76/text-search-engine + - text_search_tag: latest + + - backend_image: vectornguyen76/backend-search-engine + - backend_tag: latest + + - frontend_image: vectornguyen76/frontend-search-engine + - frontend_tag: latest - - model_predictor_image_name: vectornguyen76/model_predictor - - model_predictor_tag_name: latest pre_tasks: - name: "wait 600 seconds for target connection to become reachable/usable." wait_for_connection: @@ -34,6 +41,7 @@ - python3-pip - virtualenv - python3-setuptools + - unzip state: latest update_cache: true diff --git a/.github/workflows/ansible/inventory.txt b/.github/workflows/ansible/hosts similarity index 100% rename from .github/workflows/ansible/inventory.txt rename to .github/workflows/ansible/hosts diff --git a/.github/workflows/ansible/roles/deploy/tasks/main.yml b/.github/workflows/ansible/roles/deploy/tasks/main.yml index 8323876..aeba0d4 100644 --- a/.github/workflows/ansible/roles/deploy/tasks/main.yml +++ b/.github/workflows/ansible/roles/deploy/tasks/main.yml @@ -6,35 +6,41 @@ - name: "Copy compressed app folder" copy: - src: "artifact-app.tar.gz" - dest: "/home/ubuntu/server/artifact-app.tar.gz" + src: "artifact.zip" + dest: "/home/ubuntu/server/artifact.zip" - name: "Extract app" - unarchive: + ansible.builtin.unarchive: remote_src: yes - src: "/home/ubuntu/server/artifact-app.tar.gz" + src: "/home/ubuntu/server/artifact.zip" dest: "/home/ubuntu/server" -- name: Pull mlflow image +- name: Pull image search image community.docker.docker_image: - name: "{{ mlflow_image_name }}" - tag: "{{ mlflow_tag_name }}" + name: "{{ image_search_image }}" + tag: "{{ image_search_tag }}" source: pull -- name: Pull model_predictor image +- name: Pull text search image community.docker.docker_image: - name: "{{ model_predictor_image_name }}" - tag: "{{ model_predictor_tag_name }}" + name: "{{ text_search_image }}" + tag: "{{ text_search_tag }}" source: pull -- name: Run mlflow container - become: True - shell: - chdir: /home/ubuntu/server - cmd: "docker compose -f deployment/mlflow/docker-compose.yml up -d" +- name: Pull backend image + community.docker.docker_image: + name: "{{ backend_image }}" + tag: "{{ backend_tag }}" + source: pull + +- name: Pull frontend image + community.docker.docker_image: + name: "{{ frontend_image }}" + tag: "{{ frontend_tag }}" + source: pull -- name: Run model_predictor container +- name: Run docker compose become: True shell: chdir: /home/ubuntu/server - cmd: "bash deployment/deploy.sh deploy_run_predictor data/model_config/phase-1/prob-1/model-1.yml data/model_config/phase-1/prob-2/model-1.yml 5040" + cmd: "docker compose --profile dev up -d" diff --git a/.github/workflows/cloudformations/README.md b/.github/workflows/cloudformations/README.md new file mode 100644 index 0000000..7f59276 --- /dev/null +++ b/.github/workflows/cloudformations/README.md @@ -0,0 +1,7 @@ +## Create Stack + +aws cloudformation create-stack --stack-name server --template-body file://server.yml --parameters file://server-parameters.json --region us-east-1 + +## Delete Stack + +aws cloudformation delete-stack --stack-name server --region us-east-1 diff --git a/.github/workflows/files/ec2-parameters.json b/.github/workflows/cloudformations/server-parameters.json similarity index 75% rename from .github/workflows/files/ec2-parameters.json rename to .github/workflows/cloudformations/server-parameters.json index 47a30e7..5a46eb8 100644 --- a/.github/workflows/files/ec2-parameters.json +++ b/.github/workflows/cloudformations/server-parameters.json @@ -1,4 +1,8 @@ [ + { + "ParameterKey": "EnvironmentName", + "ParameterValue": "Search-Engine" + }, { "ParameterKey": "VpcCIDR", "ParameterValue": "10.0.0.0/16" @@ -12,7 +16,7 @@ "ParameterValue": "t3.medium" }, { - "ParameterKey": "KeyPair", + "ParameterKey": "KeyPairName", "ParameterValue": "my-keypair" }, { diff --git a/.github/workflows/files/ec2.yml b/.github/workflows/cloudformations/server.yml similarity index 74% rename from .github/workflows/files/ec2.yml rename to .github/workflows/cloudformations/server.yml index 4209cac..b1e1133 100644 --- a/.github/workflows/files/ec2.yml +++ b/.github/workflows/cloudformations/server.yml @@ -1,6 +1,10 @@ AWSTemplateFormatVersion: 2010-09-09 -Description: Creates EC2 +Description: Creates EC2 Server Parameters: + EnvironmentName: + Description: An environment name that will be prefixed to resource names + Type: String + VpcCIDR: Description: "VPC range" Type: String @@ -38,7 +42,7 @@ Resources: EnableDnsSupport: true Tags: - Key: Name - Value: "Main VPC" + Value: !Sub ${EnvironmentName} VPC InternetGateway: Type: AWS::EC2::InternetGateway @@ -56,12 +60,12 @@ Resources: PublicSubnet: Type: AWS::EC2::Subnet Properties: - AvailabilityZone: "us-east-1a" + AvailabilityZone: !Select [0, !GetAZs ""] VpcId: !Ref VPC CidrBlock: !Ref PublicSubnetCIDR Tags: - Key: Name - Value: "Public Subnet AZ1" + Value: !Sub ${EnvironmentName} Public Subnet PublicRouteTable: Type: AWS::EC2::RouteTable @@ -88,7 +92,7 @@ Resources: SecurityGroup: Type: AWS::EC2::SecurityGroup Properties: - GroupName: mySecurityGroup + GroupName: !Sub ${EnvironmentName}-SecurityGroup GroupDescription: Allow http to client host VpcId: !Ref VPC SecurityGroupIngress: @@ -96,6 +100,10 @@ Resources: FromPort: 80 ToPort: 80 CidrIp: 0.0.0.0/0 + - IpProtocol: tcp + FromPort: 443 + ToPort: 443 + CidrIp: 0.0.0.0/0 - IpProtocol: tcp FromPort: 22 ToPort: 22 @@ -108,18 +116,11 @@ Resources: FromPort: 3000 ToPort: 3000 CidrIp: 0.0.0.0/0 - - IpProtocol: tcp - FromPort: 5040 - ToPort: 5040 - CidrIp: 0.0.0.0/0 SecurityGroupEgress: - IpProtocol: "-1" FromPort: -1 ToPort: -1 CidrIp: 0.0.0.0/0 - Tags: - - Key: Name - Value: SecurityGroup # Server EC2 Instance ServerInstance: @@ -135,27 +136,12 @@ Resources: - !Ref SecurityGroup SubnetId: !Ref PublicSubnet BlockDeviceMappings: - - DeviceName: "/dev/sdk" + - DeviceName: "/dev/sda1" Ebs: - VolumeSize: 20 - # UserData: - # Fn::Base64: !Sub | - # #!/bin/bash - # sudo apt-get update -y - # sudo apt-get install ca-certificates curl gnupg - # sudo install -m 0755 -d /etc/apt/keyrings - # curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg - # sudo chmod a+r /etc/apt/keyrings/docker.gpg - # echo \ - # "deb [arch="$(dpkg --print-architecture)" signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \ - # "$(. /etc/os-release && echo "$VERSION_CODENAME")" stable" | \ - # sudo tee /etc/apt/sources.list.d/docker.list > /dev/null - # sudo apt-get update -y - # apt-get install -y docker-ce - # docker run -p 80:8080 tomcat:8.0 + VolumeSize: 50 Tags: - Key: Name - Value: ServerInstance + Value: !Sub ${EnvironmentName}-Instance Outputs: EC2InstanceConnection: diff --git a/.github/workflows/development_pipeline.yml b/.github/workflows/development_pipeline.yml index d173361..fca494b 100644 --- a/.github/workflows/development_pipeline.yml +++ b/.github/workflows/development_pipeline.yml @@ -1,17 +1,12 @@ name: development on: pull_request: - branches: develop - - push: branches: - develop + jobs: build-push-image-search: runs-on: ubuntu-latest - env: - DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} steps: - name: Checkout Repository uses: actions/checkout@v2 @@ -65,9 +60,6 @@ jobs: build-push-text-search: runs-on: ubuntu-latest - env: - DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} steps: - name: Checkout Repository uses: actions/checkout@v2 @@ -121,9 +113,6 @@ jobs: build-push-backend: runs-on: ubuntu-latest - env: - DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} steps: - name: Checkout Repository uses: actions/checkout@v2 @@ -177,12 +166,6 @@ jobs: build-push-frontend: runs-on: ubuntu-latest - env: - DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} - GOOGLE_CLIENT_ID: ${{ secrets.GOOGLE_CLIENT_ID }} - GOOGLE_CLIENT_SECRET: ${{ secrets.GOOGLE_CLIENT_SECRET }} - NEXTAUTH_SECRET: ${{ secrets.NEXTAUTH_SECRET }} steps: - name: Checkout Repository uses: actions/checkout@v2 @@ -199,9 +182,9 @@ jobs: - name: Add env variable to env file run: | - echo GOOGLE_CLIENT_ID=${{ env.GOOGLE_CLIENT_ID }} >> ./frontend/.env - echo GOOGLE_CLIENT_SECRET=${{ env.GOOGLE_CLIENT_SECRET }} >> ./frontend/.env - echo NEXTAUTH_SECRET=${{ env.NEXTAUTH_SECRET }} >> ./frontend/.env + echo GOOGLE_CLIENT_ID=${{ secrets.GOOGLE_CLIENT_ID }} >> ./frontend/.env + echo GOOGLE_CLIENT_SECRET=${{ secrets.GOOGLE_CLIENT_SECRET }} >> ./frontend/.env + echo NEXTAUTH_SECRET=${{ secrets.NEXTAUTH_SECRET }} >> ./frontend/.env - name: Build and push frontend image uses: docker/build-push-action@v4 diff --git a/.github/workflows/production_pipeline.yml b/.github/workflows/production_pipeline.yml index 92aba26..ab0c7f3 100644 --- a/.github/workflows/production_pipeline.yml +++ b/.github/workflows/production_pipeline.yml @@ -3,8 +3,354 @@ on: pull_request: branches: master jobs: - my-job: + build-push-image-search: runs-on: ubuntu-latest steps: - - name: my-step - run: echo "Production pipeline" + - name: Checkout Repository + uses: actions/checkout@v2 + + - name: Cache Python dependencies + uses: actions/cache@v2 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('./image-search-engine/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip-image-search + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.9 + + - name: Install Dependencies + run: pip install -r ./image-search-engine/requirements.txt + + - name: Run isort + run: isort --check-only --profile=black ./image-search-engine/. + + - name: Run black + run: black --check ./image-search-engine/. + + - name: Run flake8 + run: flake8 --ignore=E501,W503,F401 ./image-search-engine + + # - name: Run Pylint + # run: pylint ./image-search-engine/*.py + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Login to Docker Hub + id: docker_hub_auth + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: Build and push + uses: docker/build-push-action@v4 + with: + context: ./image-search-engine + push: true + tags: ${{ secrets.DOCKERHUB_USERNAME }}/image-search-engine:latest + cache-from: type=gha + cache-to: type=gha,mode=max + + build-push-text-search: + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + + - name: Cache Python dependencies + uses: actions/cache@v2 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('./text-search-engine/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip-text-search + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.9 + + - name: Install Dependencies + run: pip install -r ./text-search-engine/requirements.txt + + - name: Run isort + run: isort --check-only ./text-search-engine/. + + - name: Run black + run: black --check ./text-search-engine/. + + - name: Run flake8 + run: flake8 --ignore=E501,W503,F401 ./text-search-engine + + # - name: Run Pylint + # run: pylint ./image-search-engine/*.py + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Login to Docker Hub + id: docker_hub_auth + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: Build and push + uses: docker/build-push-action@v4 + with: + context: ./text-search-engine + push: true + tags: ${{ secrets.DOCKERHUB_USERNAME }}/text-search-engine:latest + cache-from: type=gha + cache-to: type=gha,mode=max + + build-push-backend: + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + + - name: Cache Python dependencies + uses: actions/cache@v2 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('./backend/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip-text-search + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + + - name: Install Dependencies + run: pip install -r ./backend/requirements.txt + + - name: Run isort + run: isort --check-only --profile=black ./backend/. + + - name: Run black + run: black --check ./backend/. + + - name: Run flake8 + run: flake8 --ignore=E501,W503,F401 ./backend + + # - name: Run Pylint + # run: pylint ./backend/*.py + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Login to Docker Hub + id: docker_hub_auth + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: Build and push + uses: docker/build-push-action@v4 + with: + context: ./backend + push: true + tags: ${{ secrets.DOCKERHUB_USERNAME }}/backend-search-engine:latest + cache-from: type=gha + cache-to: type=gha,mode=max + + build-push-frontend: + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Login to Docker Hub + id: docker_hub_auth + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: Add env variable to env file + run: | + echo GOOGLE_CLIENT_ID=${{ secrets.GOOGLE_CLIENT_ID }} >> ./frontend/.env + echo GOOGLE_CLIENT_SECRET=${{ secrets.GOOGLE_CLIENT_SECRET }} >> ./frontend/.env + echo NEXTAUTH_SECRET=${{ secrets.NEXTAUTH_SECRET }} >> ./frontend/.env + + - name: Build and push frontend image + uses: docker/build-push-action@v4 + with: + context: ./frontend + file: ./frontend/Dockerfile + push: true + tags: ${{ secrets.DOCKERHUB_USERNAME }}/frontend-search-engine:latest + cache-from: type=gha + cache-to: type=gha,mode=max + + create-config-infrastructure: + runs-on: ubuntu-latest + needs: + - build-push-image-search + - build-push-text-search + - build-push-backend + - build-push-frontend + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + + - name: Declare variables + shell: bash + run: | + echo "SHA_SHORT=$(git rev-parse --short "$GITHUB_SHA")" >> "$GITHUB_ENV" + echo "BRANCH=$(echo ${GITHUB_REF#refs/heads/})" >> "$GITHUB_ENV" + + - name: Configure AWS credentials + id: creds + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + + - name: Deploy to AWS CloudFormation + uses: aws-actions/aws-cloudformation-github-deploy@v1 + with: + name: search-engine-prod-${{ env.SHA_SHORT }} + template: ./.github/workflows/cloudformations/server.yml + parameter-overrides: "file:///${{ github.workspace }}/.github/workflows/cloudformations/server-parameters.json" + tags: ${{ vars.TAGS }} + + - name: Get Public DNS Server + run: | + # Create file + backend_public_dns=search-engine-prod-${{ env.SHA_SHORT }}-PublicDNS + # Pull the export value + host=$(aws cloudformation list-exports \ + --query "Exports[?Name==\`$backend_public_dns\`].Value" \ + --no-paginate --output text) + + echo $host + # Append the DNS to the inventory file + echo $host >> $(eval echo "./.github/workflows/ansible/hosts") + + cat ./.github/workflows/ansible/hosts + + - name: Zip artifact files + uses: montudor/action-zip@v1 + with: + args: zip -qq -r artifact.zip . + + - name: Create files forlder in ansible + run: mkdir -p ./.github/workflows/ansible/roles/deploy/files + + - name: Copy file + uses: canastro/copy-file-action@master + with: + source: "artifact.zip" + target: "./.github/workflows/ansible/roles/deploy/files/artifact.zip" + + - name: Run playbook + uses: dawidd6/action-ansible-playbook@v2 + with: + playbook: deploy_applications.yml + directory: ./.github/workflows/ansible + key: ${{secrets.SSH_PRIVATE_KEY}} + options: | + --inventory ./hosts + + - name: Remove stack on fail + if: failure() + run: | + echo search-engine-prod-${{ env.SHA_SHORT }} + # Get stack id for the delete_stack waiter + stack_info=$(aws cloudformation describe-stacks --stack-name search-engine-prod-${{ env.SHA_SHORT }} --query "Stacks[*] | [0].StackId" 2>&1) + if echo $stack_info | grep 'does not exist' > /dev/null + then + echo "Stack does not exist." + echo $stack_info + exit 0 + fi + if echo $stack_info | grep 'ValidationError' > /dev/null + then + echo $stack_info + exit 1 + else + aws cloudformation delete-stack --stack-name search-engine-prod-${{ env.SHA_SHORT }} + echo $stack_info + aws cloudformation wait stack-delete-complete --stack-name search-engine-prod-${{ env.SHA_SHORT }} + exit 0 + fi + + clean-up: + runs-on: ubuntu-latest + needs: + - create-config-infrastructure + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + + - name: Declare some variables + shell: bash + run: | + echo "SHA_SHORT=$(git rev-parse --short "$GITHUB_SHA")" >> "$GITHUB_ENV" + echo "BRANCH=$(echo ${GITHUB_REF#refs/heads/})" >> "$GITHUB_ENV" + + - name: Configure AWS credentials + id: creds + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + + - name: Fetch stacks and save the old stack name + run: | + # Fetch the stack names + export STACKS=( + $(aws cloudformation list-stacks \ + --query "StackSummaries[*].StackName" \ + --no-paginate --output text \ + --stack-status-filter CREATE_COMPLETE UPDATE_COMPLETE + ) + ) + for stack in "${STACKS[@]}"; do + if [[ ! "$stack" =~ "${{ env.SHA_SHORT }}" ]] && [[ "$stack" =~ "search-engine-prod" ]]; then + echo "DESTROY_STACK=$stack" >> "$GITHUB_ENV" + fi + done + + - name: Remove the search engine infrastructure + run: | + # Check if DESTROY_STACK is not set + if [ -z "${{ env.DESTROY_STACK }}" ]; then + echo "DESTROY_STACK is not set" + exit 0 + else + echo "DESTROY_STACK is set to ${{ env.DESTROY_STACK }}" + fi + + # Get stack id for the delete_stack waiter + stack_info=$(aws cloudformation describe-stacks --stack-name ${{ env.DESTROY_STACK }} --query "Stacks[*] | [0].StackId" 2>&1) + if echo $stack_info | grep 'does not exist' > /dev/null + then + echo "Stack does not exist." + echo $stack_info + exit 0 + fi + if echo $stack_info | grep 'ValidationError' > /dev/null + then + echo $stack_info + exit 1 + else + aws cloudformation delete-stack --stack-name ${{ env.DESTROY_STACK }} + echo $stack_info + aws cloudformation wait stack-delete-complete --stack-name ${{ env.DESTROY_STACK }} + exit 0 + fi diff --git a/.github/workflows/staging_pipeline.yml b/.github/workflows/staging_pipeline.yml index 8c32298..2aa5376 100644 --- a/.github/workflows/staging_pipeline.yml +++ b/.github/workflows/staging_pipeline.yml @@ -3,8 +3,354 @@ on: pull_request: branches: staging jobs: - my-job: + build-push-image-search: runs-on: ubuntu-latest steps: - - name: my-step - run: echo "Staging pipeline" + - name: Checkout Repository + uses: actions/checkout@v2 + + - name: Cache Python dependencies + uses: actions/cache@v2 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('./image-search-engine/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip-image-search + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.9 + + - name: Install Dependencies + run: pip install -r ./image-search-engine/requirements.txt + + - name: Run isort + run: isort --check-only --profile=black ./image-search-engine/. + + - name: Run black + run: black --check ./image-search-engine/. + + - name: Run flake8 + run: flake8 --ignore=E501,W503,F401 ./image-search-engine + + # - name: Run Pylint + # run: pylint ./image-search-engine/*.py + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Login to Docker Hub + id: docker_hub_auth + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: Build and push + uses: docker/build-push-action@v4 + with: + context: ./image-search-engine + push: true + tags: ${{ secrets.DOCKERHUB_USERNAME }}/image-search-engine:latest + cache-from: type=gha + cache-to: type=gha,mode=max + + build-push-text-search: + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + + - name: Cache Python dependencies + uses: actions/cache@v2 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('./text-search-engine/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip-text-search + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.9 + + - name: Install Dependencies + run: pip install -r ./text-search-engine/requirements.txt + + - name: Run isort + run: isort --check-only ./text-search-engine/. + + - name: Run black + run: black --check ./text-search-engine/. + + - name: Run flake8 + run: flake8 --ignore=E501,W503,F401 ./text-search-engine + + # - name: Run Pylint + # run: pylint ./image-search-engine/*.py + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Login to Docker Hub + id: docker_hub_auth + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: Build and push + uses: docker/build-push-action@v4 + with: + context: ./text-search-engine + push: true + tags: ${{ secrets.DOCKERHUB_USERNAME }}/text-search-engine:latest + cache-from: type=gha + cache-to: type=gha,mode=max + + build-push-backend: + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + + - name: Cache Python dependencies + uses: actions/cache@v2 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('./backend/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip-text-search + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + + - name: Install Dependencies + run: pip install -r ./backend/requirements.txt + + - name: Run isort + run: isort --check-only --profile=black ./backend/. + + - name: Run black + run: black --check ./backend/. + + - name: Run flake8 + run: flake8 --ignore=E501,W503,F401 ./backend + + # - name: Run Pylint + # run: pylint ./backend/*.py + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Login to Docker Hub + id: docker_hub_auth + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: Build and push + uses: docker/build-push-action@v4 + with: + context: ./backend + push: true + tags: ${{ secrets.DOCKERHUB_USERNAME }}/backend-search-engine:latest + cache-from: type=gha + cache-to: type=gha,mode=max + + build-push-frontend: + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Login to Docker Hub + id: docker_hub_auth + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: Add env variable to env file + run: | + echo GOOGLE_CLIENT_ID=${{ secrets.GOOGLE_CLIENT_ID }} >> ./frontend/.env + echo GOOGLE_CLIENT_SECRET=${{ secrets.GOOGLE_CLIENT_SECRET }} >> ./frontend/.env + echo NEXTAUTH_SECRET=${{ secrets.NEXTAUTH_SECRET }} >> ./frontend/.env + + - name: Build and push frontend image + uses: docker/build-push-action@v4 + with: + context: ./frontend + file: ./frontend/Dockerfile + push: true + tags: ${{ secrets.DOCKERHUB_USERNAME }}/frontend-search-engine:latest + cache-from: type=gha + cache-to: type=gha,mode=max + + create-config-infrastructure: + runs-on: ubuntu-latest + needs: + - build-push-image-search + - build-push-text-search + - build-push-backend + - build-push-frontend + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + + - name: Declare variables + shell: bash + run: | + echo "SHA_SHORT=$(git rev-parse --short "$GITHUB_SHA")" >> "$GITHUB_ENV" + echo "BRANCH=$(echo ${GITHUB_REF#refs/heads/})" >> "$GITHUB_ENV" + + - name: Configure AWS credentials + id: creds + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + + - name: Deploy to AWS CloudFormation + uses: aws-actions/aws-cloudformation-github-deploy@v1 + with: + name: search-engine-staging-${{ env.SHA_SHORT }} + template: ./.github/workflows/cloudformations/server.yml + parameter-overrides: "file:///${{ github.workspace }}/.github/workflows/cloudformations/server-parameters.json" + tags: ${{ vars.TAGS }} + + - name: Get Public DNS Server + run: | + # Create file + backend_public_dns=search-engine-staging-${{ env.SHA_SHORT }}-PublicDNS + # Pull the export value + host=$(aws cloudformation list-exports \ + --query "Exports[?Name==\`$backend_public_dns\`].Value" \ + --no-paginate --output text) + + echo $host + # Append the DNS to the inventory file + echo $host >> $(eval echo "./.github/workflows/ansible/hosts") + + cat ./.github/workflows/ansible/hosts + + - name: Zip artifact files + uses: montudor/action-zip@v1 + with: + args: zip -qq -r artifact.zip . + + - name: Create files forlder in ansible + run: mkdir -p ./.github/workflows/ansible/roles/deploy/files + + - name: Copy file + uses: canastro/copy-file-action@master + with: + source: "artifact.zip" + target: "./.github/workflows/ansible/roles/deploy/files/artifact.zip" + + - name: Run playbook + uses: dawidd6/action-ansible-playbook@v2 + with: + playbook: deploy_applications.yml + directory: ./.github/workflows/ansible + key: ${{secrets.SSH_PRIVATE_KEY}} + options: | + --inventory ./hosts + + - name: Remove stack on fail + if: failure() + run: | + echo search-engine-staging-${{ env.SHA_SHORT }} + # Get stack id for the delete_stack waiter + stack_info=$(aws cloudformation describe-stacks --stack-name search-engine-staging-${{ env.SHA_SHORT }} --query "Stacks[*] | [0].StackId" 2>&1) + if echo $stack_info | grep 'does not exist' > /dev/null + then + echo "Stack does not exist." + echo $stack_info + exit 0 + fi + if echo $stack_info | grep 'ValidationError' > /dev/null + then + echo $stack_info + exit 1 + else + aws cloudformation delete-stack --stack-name search-engine-staging-${{ env.SHA_SHORT }} + echo $stack_info + aws cloudformation wait stack-delete-complete --stack-name search-engine-staging-${{ env.SHA_SHORT }} + exit 0 + fi + + clean-up: + runs-on: ubuntu-latest + needs: + - create-config-infrastructure + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + + - name: Declare some variables + shell: bash + run: | + echo "SHA_SHORT=$(git rev-parse --short "$GITHUB_SHA")" >> "$GITHUB_ENV" + echo "BRANCH=$(echo ${GITHUB_REF#refs/heads/})" >> "$GITHUB_ENV" + + - name: Configure AWS credentials + id: creds + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + + - name: Fetch stacks and save the old stack name + run: | + # Fetch the stack names + export STACKS=( + $(aws cloudformation list-stacks \ + --query "StackSummaries[*].StackName" \ + --no-paginate --output text \ + --stack-status-filter CREATE_COMPLETE UPDATE_COMPLETE + ) + ) + for stack in "${STACKS[@]}"; do + if [[ ! "$stack" =~ "${{ env.SHA_SHORT }}" ]] && [[ "$stack" =~ "search-engine-staging" ]]; then + echo "DESTROY_STACK=$stack" >> "$GITHUB_ENV" + fi + done + + - name: Remove the search engine infrastructure + run: | + # Check if DESTROY_STACK is not set + if [ -z "${{ env.DESTROY_STACK }}" ]; then + echo "DESTROY_STACK is not set" + exit 0 + else + echo "DESTROY_STACK is set to ${{ env.DESTROY_STACK }}" + fi + + # Get stack id for the delete_stack waiter + stack_info=$(aws cloudformation describe-stacks --stack-name ${{ env.DESTROY_STACK }} --query "Stacks[*] | [0].StackId" 2>&1) + if echo $stack_info | grep 'does not exist' > /dev/null + then + echo "Stack does not exist." + echo $stack_info + exit 0 + fi + if echo $stack_info | grep 'ValidationError' > /dev/null + then + echo $stack_info + exit 1 + else + aws cloudformation delete-stack --stack-name ${{ env.DESTROY_STACK }} + echo $stack_info + aws cloudformation wait stack-delete-complete --stack-name ${{ env.DESTROY_STACK }} + exit 0 + fi diff --git a/docker-compose.yaml b/docker-compose.yaml index f91f1b0..16b62b1 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -17,6 +17,7 @@ services: - driver: nvidia count: 1 capabilities: [gpu] + profiles: ["prod"] qdrant_db: container_name: qdrant_db @@ -130,6 +131,19 @@ services: ports: - 3000:3000 + nginx_dev_service: + container_name: nginx_container + image: nginx:1.25.1-alpine + ports: + - 80:80 + volumes: + - ./nginx/develop.conf:/etc/nginx/conf.d/default.conf + - ./nginx/log:/var/log/nginx/ + depends_on: + - frontend_service + - backend_service + profiles: ["dev"] + nginx_service: container_name: nginx_container image: nginx:1.25.1-alpine @@ -144,6 +158,7 @@ services: depends_on: - frontend_service - backend_service + profiles: ["prod"] volumes: elastic_search_data: diff --git a/helm-charts/README.md b/helm-charts/README.md index ebc4915..8e6875e 100644 --- a/helm-charts/README.md +++ b/helm-charts/README.md @@ -50,7 +50,7 @@ Helm is a package manager for Kubernetes, simplifying the process of defining, i - **Deleting a Cluster and Node Group** ``` - eksctl delete cluster -f cluster-config-eksctl.yaml --disable-nodegroup-eviction + eksctl delete cluster -f cluster-config-eksctl.yaml --disable-nodegroup-eviction --wait ```

diff --git a/helm-charts/cluster-config-eksctl.yaml b/helm-charts/cluster-config-eksctl.yaml index 3dd03a3..e595b1f 100644 --- a/helm-charts/cluster-config-eksctl.yaml +++ b/helm-charts/cluster-config-eksctl.yaml @@ -4,8 +4,9 @@ kind: ClusterConfig metadata: name: test-cluster region: us-east-1 + version: "1.27" tags: - ApplicationName: "QAI-Monitoring" + ApplicationName: "QAI_Monitoring" Purpose: "Training" Project: "QAI_Monitoring" ProjectID: "QAI_Monitoring" diff --git a/kubernetes/README.md b/kubernetes/README.md index a2dbb91..109bcf7 100644 --- a/kubernetes/README.md +++ b/kubernetes/README.md @@ -75,7 +75,7 @@ Guidelines for setting up a Kubernetes environment suitable for production. - **Deleting a Cluster and Node Group** ``` - eksctl delete cluster -f cluster-config-eksctl.yaml --disable-nodegroup-eviction + eksctl delete cluster -f cluster-config-eksctl.yaml --disable-nodegroup-eviction --wait ```

diff --git a/nginx/develop.conf b/nginx/develop.conf new file mode 100644 index 0000000..9b01371 --- /dev/null +++ b/nginx/develop.conf @@ -0,0 +1,37 @@ +upstream frontend { + server frontend_service:3000; +} +upstream backend { + server backend_service:5000; +} + + +server { + listen 80; + client_max_body_size 16M; + + location / { + proxy_pass http://frontend; + + # Allow the use of websockets + proxy_http_version 1.1; + proxy_buffering off; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'Upgrade'; + proxy_set_header Host $host; + proxy_cache_bypass $http_upgrade; + } + + location /backend/ { + proxy_pass http://backend/; + + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + # Log + access_log /var/log/nginx/access.log; + error_log /var/log/nginx/error.log; +}