Skip to content

Waterfall view for a trace #14077

Waterfall view for a trace

Waterfall view for a trace #14077

Workflow file for this run

name: Monorepo
on:
push:
branches: ['main']
pull_request:
types: [opened, synchronize]
concurrency: ${{ github.workflow }}-${{ github.ref }}
jobs:
yarn-monorepo:
name: Build Yarn Turborepo
timeout-minutes: 30
runs-on: buildjet-4vcpu-ubuntu-2204
# configures turborepo Remote Caching
env:
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
steps:
- name: Checkout
uses: actions/checkout@v3
with:
submodules: recursive
fetch-depth: 0
- uses: dorny/paths-filter@v2
id: filter
with:
filters: |
npm-changed:
- 'sdk/client/**'
- 'sdk/firstload/**'
- 'sdk/highlight-apollo/**'
- 'sdk/highlight-cloudflare/**'
- 'sdk/highlight-nest/**'
- 'sdk/highlight-next/**'
- 'sdk/highlight-node/**'
- 'sdk/highlight-react/**'
- 'sdk/highlight-remix/**'
- 'sdk/opentelemetry-sdk-workers/packages/opentelemetry-sdk-workers/**'
# automatically caches dependencies based on yarn.lock
- name: Setup Node.js environment
uses: actions/setup-node@v4
with:
node-version: lts/*
cache: 'yarn'
- name: Install js dependencies
run: yarn
- name: Check generated files for Reflame
run: yarn reflame-check
- name: Check formatting
run: yarn format-check
- name: Configure AWS credentials
if: github.event.pull_request.head.repo.full_name == 'highlight/highlight' || github.ref == 'refs/heads/main'
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-2
- name: Install Doppler CLI
uses: dopplerhq/cli-action@v2
# setting REACT_APP_COMMIT_SHA to a dummy value in CI to allow consistent turborepo remote caching
- name: Build & test (with render environment)
if: github.event.pull_request.head.repo.full_name == 'highlight/highlight' || github.ref == 'refs/heads/main'
run: doppler run -- bash -c 'RENDER_PREVIEW=false yarn test:all'
env:
DOPPLER_TOKEN: ${{ secrets.DOPPLER_PROD_RENDER_SECRET }}
GRAPHCMS_TOKEN: ${{ secrets.GRAPHCMS_TOKEN }}
NEXT_PUBLIC_HIGHLIGHT_PROJECT_ID: 1jdkoe52
REACT_APP_COMMIT_SHA: abcdef1234567890a1b2c3d4e5f6fedcba098765
- name: Build & test (in a fork without doppler)
if: github.event.pull_request.head.repo.full_name != 'highlight/highlight' && github.ref != 'refs/heads/main'
run: yarn test:all
env:
GRAPHCMS_TOKEN: ${{ secrets.GRAPHCMS_TOKEN }}
NEXT_PUBLIC_HIGHLIGHT_PROJECT_ID: 1jdkoe52
REACT_APP_COMMIT_SHA: abcdef1234567890a1b2c3d4e5f6fedcba098765
- name: Add public IP to AWS security group
if: github.event.pull_request.head.repo.full_name == 'highlight/highlight' || github.ref == 'refs/heads/main'
uses: sohelamin/aws-security-group-add-ip-action@master
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: 'us-east-2'
aws-security-group-id: ${{ secrets.AWS_RDS_SECURITY_GROUP_ID }}
port: '5432'
to-port: ''
protocol: 'tcp'
description: 'GitHub Action for Monorepo'
- name: Test session screenshot lambda
if: github.event.pull_request.head.repo.full_name == 'highlight/highlight' || github.ref == 'refs/heads/main'
run: doppler run -- yarn test:render
env:
DOPPLER_TOKEN: ${{ secrets.DOPPLER_PROD_RENDER_SECRET }}
- name: Validate session screenshot lambda size
# this can only run after `yarn test:render` runs
if: github.event.pull_request.head.repo.full_name == 'highlight/highlight' || github.ref == 'refs/heads/main'
run: yarn workspace render zip && yarn workspace render check
- name: Upload frontend sourcemaps
if: github.ref == 'refs/heads/main'
run: yarn sourcemaps:frontend
env:
HIGHLIGHT_API_KEY: ${{ secrets.HIGHLIGHT_SOURCEMAP_API_KEY }}
APP_VERSION: ${{ github.event.pull_request.head.sha || github.sha }}
- name: Configure yarn npm registry credentials
if: github.ref == 'refs/heads/main'
run: |
yarn config set npmRegistryServer "https://registry.npmjs.org"
yarn config set npmAuthToken "${NPM_TOKEN}"
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Publish ai lambda
if: github.ref == 'refs/heads/main'
run: yarn publish:ai
- name: Publish render lambda
if: github.ref == 'refs/heads/main'
run: yarn publish:render
- name: Publish session-insights-email lambda
if: github.ref == 'refs/heads/main'
run: yarn publish:react-email-templates
- name: Publish changesets
if: github.ref == 'refs/heads/main'
id: changesets
uses: changesets/action@v1
with:
# This expects you to have a script called release which does a build for your packages and calls changeset publish
version: yarn changeset version
publish: yarn publish:turbo
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
e2e-cypress:
# non-blocking since we will just test prod client
# needs the client build to be uploaded
needs: yarn-monorepo
name: E2E Client Cypress
timeout-minutes: 60
runs-on: buildjet-4vcpu-ubuntu-2204
steps:
- name: Checkout
uses: actions/checkout@v3
with:
submodules: recursive
- name: Setup Node.js environment
uses: buildjet/setup-node@v4
with:
node-version: lts/*
cache: 'yarn'
- name: Setup Golang environment
uses: buildjet/setup-go@v5
with:
go-version-file: 'backend/go.mod'
cache-dependency-path: '**/go.sum'
- name: Install ffmpeg
run: |
curl -o ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/builds/ffmpeg-git-amd64-static.tar.xz
mkdir ~/bin
tar -C ~/bin --strip-components=1 -xf ffmpeg.tar.xz
ls ~/bin
- name: Login to Docker Hub
if: github.event.pull_request.head.repo.full_name == 'highlight/highlight' || github.ref == 'refs/heads/main'
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Configure AWS credentials
if: github.event.pull_request.head.repo.full_name == 'highlight/highlight' || github.ref == 'refs/heads/main'
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-2
- name: Start docker containers & run cypress
env:
COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
REF: ${{ github.ref }}
REPO: ${{ github.event.pull_request.head.repo.full_name }}
run: |
start_time=$(date -Isecond)
export RUN_SESSION_SCREENSHOT_LAMBDA=false
if [[ "$REF" != "refs/heads/main" && "$REPO" == "highlight/highlight" ]]; then
export REACT_APP_COMMIT_SHA="${COMMIT_SHA}"
export RUN_SESSION_SCREENSHOT_LAMBDA=true
fi
# start highlight
pushd docker;
source ./env.sh;
./start-infra.sh > /tmp/highlight.log 2>&1;
docker compose exec -e PSQL_HOST -e PSQL_USER -e PSQL_DB postgres bash -c 'psql -h $PSQL_HOST -U $PSQL_USER $PSQL_DB < /root/init.sql' >> /tmp/highlight.log 2>&1;
./run-backend.sh >> /tmp/highlight.log 2>&1 &
yarn install >> /tmp/highlight.log 2>&1;
unset REACT_APP_IN_DOCKER;
yarn build:frontend >> /tmp/highlight.log 2>&1;
yarn workspace @highlight-run/apollo build >> /tmp/highlight.log 2>&1;
yarn workspace @highlight-run/client dev &
yarn workspace highlight.run dev &
yarn workspace @highlight-run/frontend vite preview --port 3000 &
popd;
# run opentelemetry file watcher
pushd e2e/opentelemetry/filelog;
EXAMPLE_LOG_FILE_PATH=/tmp/highlight.log docker compose run -d file-collector;
popd;
# wait for highlight to be ready
yarn dlx wait-on -l -s 3 http://127.0.0.1:3000/index.html http://127.0.0.1:8082/health;
# run cypress tests
yarn cy:run;
# look for containers that crashed
num_crashed=$(docker ps -a -f status=exited | grep -E '\(' | grep -cvE '\(\d+\)' || true)
if [ "$num_crashed" -gt 0 ]; then
echo "$num_crashed containers crashed"
docker ps -a -f status=exited
exit 1
fi
- name: Dump setup logs on failure
if: failure()
run: cat /tmp/highlight.log
- name: Dump docker container logs on failure
if: failure()
run: |
cd docker;
docker compose logs;
- name: Dump databases on failure
if: failure()
run: |
cd docker;
mkdir backups
docker compose exec postgres bash -c "mkdir /backups";
docker compose exec postgres bash -c "pg_dump -h localhost -U postgres -d postgres > /backups/postgres.sql";
docker compose exec postgres bash -c "psql -h localhost -U postgres -d postgres -c 'select * from sessions;' > /backups/sessions.sql";
docker compose exec postgres bash -c "cat /backups/postgres.sql" > ./backups/postgres.sql 2>&1;
docker compose exec postgres bash -c "cat /backups/sessions.sql" > ./backups/sessions.sql 2>&1;
docker compose exec clickhouse bash -c "mkdir /backups && chmod -R 777 /backups";
docker compose exec clickhouse clickhouse-client --host clickhouse --query "BACKUP DATABASE default TO File('/backups/clickhouse.zip')";
docker compose exec clickhouse bash -c "cat /backups/clickhouse.zip" > ./backups/clickhouse.zip 2>&1;
- name: Save database artifacts
if: failure()
uses: actions/upload-artifact@v3
with:
name: db-dump
path: docker/backups/*
- name: Save videos
uses: actions/upload-artifact@v3
if: failure()
with:
name: cypress-videos
path: cypress/videos
e2e-frontend-backend:
name: E2E Frontend / Backend
timeout-minutes: 60
runs-on: buildjet-4vcpu-ubuntu-2204
steps:
- name: Checkout
uses: actions/checkout@v3
with:
submodules: recursive
- name: Setup Node.js environment
uses: buildjet/setup-node@v4
with:
node-version: lts/*
cache: 'yarn'
- name: Setup Golang environment
uses: buildjet/setup-go@v5
with:
go-version-file: 'backend/go.mod'
cache-dependency-path: '**/go.sum'
- name: Setup .NET environment
uses: actions/setup-dotnet@v4
with:
dotnet-version: 8.x
- name: Install poetry
run: pipx install poetry
- name: Install python
uses: actions/setup-python@v4
with:
python-version: '3.10'
cache: 'poetry'
- name: Install dependencies
working-directory: ./e2e/tests
run: poetry install --all-extras
- name: Login to Docker Hub
if: github.event.pull_request.head.repo.full_name == 'highlight/highlight' || github.ref == 'refs/heads/main'
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Start docker containers & run sdk e2e test
run: |
start_time=$(date -Isecond)
# start highlight
pushd docker;
source ./env.sh;
./start-infra.sh > /tmp/highlight.log 2>&1;
docker compose exec -e PSQL_HOST -e PSQL_USER -e PSQL_DB postgres bash -c 'psql -h $PSQL_HOST -U $PSQL_USER $PSQL_DB < /root/init.sql' >> /tmp/highlight.log 2>&1;
./run-backend.sh >> /tmp/highlight.log 2>&1 &
yarn install >> /tmp/highlight.log 2>&1;
yarn build:sdk >> /tmp/highlight.log 2>&1;
popd;
# run python backend functional tests
pushd ./e2e/tests
export HIGHLIGHT_OAUTH_CLIENT_ID=abc123
export HIGHLIGHT_OAUTH_CLIENT_SECRET=def456
poetry run pytest .
popd
# look for containers that crashed
num_crashed=$(docker ps -a -f status=exited | grep -E '\(' | grep -cvE '\(\d+\)' || true)
if [ "$num_crashed" -gt 0 ]; then
echo "$num_crashed containers crashed"
docker ps -a -f status=exited
exit 1
fi
- name: Dump setup logs on failure
if: failure()
run: cat /tmp/highlight.log
- name: Dump docker container logs on failure
if: failure()
run: |
cd docker;
docker compose logs;
- name: Dump databases on failure
if: failure()
run: |
cd docker;
mkdir backups
docker compose exec postgres bash -c "mkdir /backups";
docker compose exec postgres bash -c "pg_dump -h localhost -U postgres postgres > /backups/postgres.sql";
docker compose exec postgres bash -c "cat /backups/postgres.sql" > ./backups/postgres.sql 2>&1;
docker compose exec clickhouse bash -c "mkdir /backups && chmod -R 777 /backups";
docker compose exec clickhouse clickhouse-client --host clickhouse --query "BACKUP DATABASE default TO File('/backups/clickhouse.zip')";
docker compose exec clickhouse bash -c "cat /backups/clickhouse.zip" > ./backups/clickhouse.zip 2>&1;
- name: Save database artifacts
if: failure()
uses: actions/upload-artifact@v3
with:
name: db-dump
path: docker/backups/*