Skip to content

Commit

Permalink
Merge branch 'main' into spenny/support-new-error-queries
Browse files Browse the repository at this point in the history
  • Loading branch information
SpennyNDaJets committed Mar 26, 2024
2 parents 49789a1 + bd41008 commit b04e126
Show file tree
Hide file tree
Showing 29 changed files with 1,720 additions and 999 deletions.
5 changes: 5 additions & 0 deletions .changeset/beige-bats-bow.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'highlight.run': patch
---

switch replay to using highlight backend for font cors proxying
21 changes: 20 additions & 1 deletion .github/workflows/turbo.yml
Original file line number Diff line number Diff line change
Expand Up @@ -188,13 +188,28 @@ jobs:
go-version-file: 'backend/go.mod'
cache-dependency-path: '**/go.sum'

- name: Install ffmpeg
run: |
curl -o ffmpeg.tar.xz https://johnvansickle.com/ffmpeg/builds/ffmpeg-git-amd64-static.tar.xz
mkdir ~/bin
tar -C ~/bin --strip-components=1 -xf ffmpeg.tar.xz
ls ~/bin
- name: Login to Docker Hub
if: github.event.pull_request.head.repo.full_name == 'highlight/highlight' || github.ref == 'refs/heads/main'
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}

- name: Configure AWS credentials
if: github.event.pull_request.head.repo.full_name == 'highlight/highlight' || github.ref == 'refs/heads/main'
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-2

- name: Start docker containers & run cypress
env:
COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
Expand All @@ -203,9 +218,11 @@ jobs:
run: |
start_time=$(date -Isecond)
export RUN_SESSION_SCREENSHOT_LAMBDA=false
if [[ "$REF" != "refs/heads/main" && "$REPO" == "highlight/highlight" ]]; then
export CYPRESS_CLIENT_VERSION="dev-${COMMIT_SHA}"
export REACT_APP_COMMIT_SHA="${COMMIT_SHA}"
export RUN_SESSION_SCREENSHOT_LAMBDA=true
echo "Using client version ${CYPRESS_CLIENT_VERSION}";
fi
Expand Down Expand Up @@ -260,8 +277,10 @@ jobs:
mkdir backups
docker compose exec postgres bash -c "mkdir /backups";
docker compose exec postgres bash -c "pg_dump -h localhost -U postgres postgres > /backups/postgres.sql";
docker compose exec postgres bash -c "pg_dump -h localhost -U postgres -d postgres > /backups/postgres.sql";
docker compose exec postgres bash -c "psql -h localhost -U postgres -d postgres -c 'select * from sessions;' > /backups/sessions.sql";
docker compose exec postgres bash -c "cat /backups/postgres.sql" > ./backups/postgres.sql 2>&1;
docker compose exec postgres bash -c "cat /backups/sessions.sql" > ./backups/sessions.sql 2>&1;
docker compose exec clickhouse bash -c "mkdir /backups && chmod -R 777 /backups";
docker compose exec clickhouse clickhouse-client --host clickhouse --query "BACKUP DATABASE default TO File('/backups/clickhouse.zip')";
Expand Down
90 changes: 90 additions & 0 deletions backend/assets/assets.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
package assets

import (
"crypto/tls"
log "github.com/sirupsen/logrus"
"io"
"net/http"
"net/url"
"strings"
)

const ParamKey = "url"

func HandleAsset(w http.ResponseWriter, r *http.Request) {
qs := r.URL.Query()
urlStr := qs.Get(ParamKey)
u, err := url.Parse(urlStr)
if err != nil {
log.WithContext(r.Context()).WithError(err).Error("failed to parse url")
http.Error(w, "failed to parse url", http.StatusBadRequest)
return
}
log.WithContext(r.Context()).WithField(ParamKey, u).Debug("CORS worker request")

if len(u.Host) == 0 {
for n, h := range r.Header {
// get the origin from the request
if strings.Contains(n, "Origin") {
for _, h := range h {
u.Host = h
}
}
}
}

// always allow access origin
w.Header().Add("Access-Control-Allow-Origin", u.Host)
w.Header().Add("Access-Control-Allow-Credentials", "true")
w.Header().Add("Access-Control-Allow-Methods", "GET")

// create the request to server
req, err := http.NewRequest(r.Method, u.String(), r.Body)
if err != nil {
log.WithContext(r.Context()).WithError(err).Error("failed to build request")
http.Error(w, "failed to build request", http.StatusInternalServerError)
return
}

// add ALL headers to the connection
for n, h := range r.Header {
for _, h := range h {
req.Header.Add(n, h)
}
}

// use the host provided by the flag
if len(u.Host) > 0 {
req.Host = u.Host
}

// create a basic client to send the request
client := http.Client{}
if r.TLS != nil {
client.Transport = &http.Transport{
TLSClientConfig: &tls.Config{},
}
}
resp, err := client.Do(req)
if err != nil {
log.Println(err)
w.WriteHeader(http.StatusInternalServerError)
_, _ = w.Write([]byte(err.Error()))
return
}

for h, v := range resp.Header {
for _, v := range v {
w.Header().Add(h, v)
}
}
// copy the response from the server to the connected client request
w.WriteHeader(resp.StatusCode)

wr, err := io.Copy(w, resp.Body)
if err != nil {
log.WithContext(r.Context()).WithField("written", wr).WithError(err).Error("failed to write back body")
} else {
log.WithContext(r.Context()).WithField("written", wr).Debug("wrote back body")
}
}
19 changes: 19 additions & 0 deletions backend/assets/assets_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package assets

import (
"fmt"
"github.com/stretchr/testify/assert"
"net/http"
"net/http/httptest"
"testing"
)

func TestCreateLogDrain(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(HandleAsset))
defer server.Close()

resp, err := http.Get(fmt.Sprintf("%s?src=test&url=https://app.highlight.io/~r_app.webmanifest?~r_rid=knQ44NI79K8s_IOq_MbHMZ0JjqY", server.URL))
assert.NoError(t, err)
assert.Equal(t, resp.StatusCode, 200)
assert.Greater(t, resp.ContentLength, int64(256))
}
5 changes: 3 additions & 2 deletions backend/event-parse/parse.go
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,10 @@ const (

const (
ScriptPlaceholder = "SCRIPT_PLACEHOLDER"
ProxyURL = "https://replay-cors-proxy.highlightrun.workers.dev"
)

var ProxyURL = fmt.Sprintf("%s/cors", util.PublicGraphUri)

var DisallowedTagPrefixes = []string{
"onchange",
"onclick",
Expand Down Expand Up @@ -168,7 +169,7 @@ func replaceRelativePaths(body []byte, href string) []byte {
groups := pathPattern.FindSubmatch(match)
u, _ := url.Parse(fmt.Sprintf("%s/%s", *base, groups[1]))
u.Path = strings.Trim(u.Path, "/")
result := []byte(fmt.Sprintf("url('%s?url=%s')", ProxyURL, u.String()))
result := []byte(fmt.Sprintf("url('%s?src=go&url=%s')", ProxyURL, u.String()))
return result
})
}
Expand Down
1 change: 1 addition & 0 deletions backend/event-parse/parse_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ func (u fetcherMock) fetchStylesheetData(href string, s *Snapshot) ([]byte, erro
}

func TestInjectStyleSheets(t *testing.T) {
ProxyURL = "https://localhost:8082/public/cors"
// Get sample input of events and serialize.
fetch = fetcherMock{}
inputBytes, err := os.ReadFile("./sample-events/input.json")
Expand Down
4 changes: 2 additions & 2 deletions backend/event-parse/sample-events/output.json
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@
},
{
"attributes": {
"_cssText": "/*highlight-inject*/\n@font-face {\n\tfont-display: swap;\n\tfont-family: 'Inter';\n\tfont-style: normal;\n\tfont-weight: bold;\n\tsrc: local('Inter Bold'), local('InterBold'),\n\t\turl('https://replay-cors-proxy.highlightrun.workers.dev?url=https://app.highlight.run/font/Inter-Bold.woff2') format('woff2'), url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAE0lEQVQImWP4////f4bdu3f/BwAlfgctduB85QAAAABJRU5ErkJggg==\"), url(\"https://testing.psx-staging.energid.net/assets/fonts/roboto_medium_latin-ext.woff2\");\n}"
"_cssText": "/*highlight-inject*/\n@font-face {\n\tfont-display: swap;\n\tfont-family: 'Inter';\n\tfont-style: normal;\n\tfont-weight: bold;\n\tsrc: local('Inter Bold'), local('InterBold'),\n\t\turl('https://localhost:8082/public/cors?src=go&url=https://app.highlight.run/font/Inter-Bold.woff2') format('woff2'), url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAE0lEQVQImWP4////f4bdu3f/BwAlfgctduB85QAAAABJRU5ErkJggg==\"), url(\"https://testing.psx-staging.energid.net/assets/fonts/roboto_medium_latin-ext.woff2\");\n}"
},
"childNodes": [],
"id": 16,
Expand All @@ -110,7 +110,7 @@
},
{
"attributes": {
"_cssText": "/*highlight-inject*/\n@font-face {\n\tfont-display: swap;\n\tfont-family: 'Inter';\n\tfont-style: normal;\n\tfont-weight: bold;\n\tsrc: local('Inter Bold'), local('InterBold'),\n\t\turl('https://replay-cors-proxy.highlightrun.workers.dev?url=https://unpkg.com/font/Inter-Bold.woff2') format('woff2'), url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAE0lEQVQImWP4////f4bdu3f/BwAlfgctduB85QAAAABJRU5ErkJggg==\"), url(\"https://testing.psx-staging.energid.net/assets/fonts/roboto_medium_latin-ext.woff2\");\n}"
"_cssText": "/*highlight-inject*/\n@font-face {\n\tfont-display: swap;\n\tfont-family: 'Inter';\n\tfont-style: normal;\n\tfont-weight: bold;\n\tsrc: local('Inter Bold'), local('InterBold'),\n\t\turl('https://localhost:8082/public/cors?src=go&url=https://unpkg.com/font/Inter-Bold.woff2') format('woff2'), url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAE0lEQVQImWP4////f4bdu3f/BwAlfgctduB85QAAAABJRU5ErkJggg==\"), url(\"https://testing.psx-staging.energid.net/assets/fonts/roboto_medium_latin-ext.woff2\");\n}"
},
"childNodes": [],
"id": 18,
Expand Down
2 changes: 2 additions & 0 deletions backend/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"context"
"flag"
"fmt"
"github.com/highlight-run/highlight/backend/assets"
"html/template"
"io"
"math/rand"
Expand Down Expand Up @@ -513,6 +514,7 @@ func main() {
publicServer.Use(htrace.NewGraphqlTracer(string(util.PublicGraph)))
publicServer.SetErrorPresenter(htrace.GraphQLErrorPresenter(string(util.PublicGraph)))
publicServer.SetRecoverFunc(htrace.GraphQLRecoverFunc())
r.HandleFunc("/cors", assets.HandleAsset)
r.Handle("/",
publicServer,
)
Expand Down
100 changes: 100 additions & 0 deletions backend/migrations/cmd/migrate-group-embeddings/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
package main

import (
"context"
"fmt"
"os"

log "github.com/sirupsen/logrus"
"gorm.io/gorm"

"github.com/highlight-run/highlight/backend/model"
)

func main() {
ctx := context.Background()

db, err := model.SetupDB(ctx, os.Getenv("PSQL_DB"))
if err != nil {
log.WithContext(ctx).Fatal(err)
}

if err := db.Exec(`
CREATE TABLE IF NOT EXISTS migrated_embeddings (
project_id INTEGER PRIMARY KEY NOT NULL,
embedding_id INTEGER NOT NULL
)`).Error; err != nil {
log.WithContext(ctx).Fatal(err)
}

var lastCreatedPart int
if err := db.Raw("select split_part(relname, '_', 5) from pg_stat_all_tables where relname like 'error_object_embeddings_partitioned%' order by relid desc limit 1").
Scan(&lastCreatedPart).Error; err != nil {
log.WithContext(ctx).Fatal(err)
}

// Only running this migration on project_id = 1 for now
for i := 1; i <= 1; i++ {
log.WithContext(ctx).Infof("beginning loop: %d", i)
tablename := fmt.Sprintf("error_object_embeddings_partitioned_%d", i)

if err := db.Exec(fmt.Sprintf("CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_%s_id ON %s (id)", tablename, tablename)).Error; err != nil {
log.WithContext(ctx).Fatal(err)
}
log.WithContext(ctx).Info("done creating index")

var prevEmbeddingId int
if err := db.Raw("select coalesce(max(embedding_id), 0) from migrated_embeddings where project_id = ?", i).Scan(&prevEmbeddingId).Error; err != nil {
log.WithContext(ctx).Fatal(err)
}
log.WithContext(ctx).Infof("prevEmbeddingId: %d", prevEmbeddingId)

var maxEmbeddingId int
if err := db.Raw("select coalesce(max(id), 0) from error_object_embeddings_partitioned eoe where project_id = ?", i).Scan(&maxEmbeddingId).Error; err != nil {
log.WithContext(ctx).Fatal(err)
}
log.WithContext(ctx).Infof("maxEmbeddingId: %d", maxEmbeddingId)

if err := db.Transaction(func(tx *gorm.DB) error {
if err := tx.Exec(`
insert into error_group_embeddings (project_id, error_group_id, count, gte_large_embedding)
select a.* from (
select eo.project_id, eo.error_group_id, count(*) as count, AVG(eoe.gte_large_embedding) as gte_large_embedding
from error_object_embeddings_partitioned eoe
inner join error_objects eo
on eoe.error_object_id = eo.id
where eoe.gte_large_embedding is not null
and eoe.id > ?
and eoe.id <= ?
group by eo.project_id, eo.error_group_id) a
on conflict (project_id, error_group_id)
do update set
gte_large_embedding =
error_group_embeddings.gte_large_embedding * array_fill(error_group_embeddings.count::numeric / (error_group_embeddings.count + excluded.count), '{1024}')::vector
+ excluded.gte_large_embedding * array_fill(excluded.count::numeric / (error_group_embeddings.count + excluded.count), '{1024}')::vector,
count = error_group_embeddings.count + excluded.count
`, prevEmbeddingId, maxEmbeddingId).Error; err != nil {
return err
}

log.WithContext(ctx).Info("done upserting new embeddings")

if err := tx.Exec(`
insert into migrated_embeddings (project_id, embedding_id)
values (?, ?)
on conflict (project_id)
do update set embedding_id = excluded.embedding_id
`, i, maxEmbeddingId).Error; err != nil {
return err
}

log.WithContext(ctx).Info("done updating maxEmbeddingId")

return nil
}); err != nil {
log.WithContext(ctx).Fatal(err)
}
log.WithContext(ctx).Infof("done loop: %d", i)
}

}
9 changes: 9 additions & 0 deletions backend/model/model.go
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ var Models = []interface{}{
&ErrorObject{},
&ErrorObjectEmbeddings{},
&ErrorGroup{},
&ErrorGroupEmbeddings{},
&ErrorField{},
&ErrorSegment{},
&SavedSegment{},
Expand Down Expand Up @@ -1155,6 +1156,14 @@ type ErrorField struct {
ErrorGroups []ErrorGroup `gorm:"many2many:error_group_fields;"`
}

type ErrorGroupEmbeddings struct {
Model
ProjectID int `gorm:"uniqueIndex:idx_project_id_error_group_id"`
ErrorGroupID int `gorm:"uniqueIndex:idx_project_id_error_group_id"`
Count int
GteLargeEmbedding Vector `gorm:"type:vector(1024)"` // 1024 dimensions in the thenlper/gte-large model
}

type LogAdminsView struct {
ID int `gorm:"primary_key;type:bigint;autoIncrement" json:"id" deep:"-"`
ViewedAt time.Time `gorm:"default:NOW()"`
Expand Down
2 changes: 1 addition & 1 deletion e2e/nestjs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"dependencies": {
"@highlight-run/nest": "workspace:*",
"@highlight-run/node": "workspace:*",
"@nestjs/axios": "^0.0.7",
"@nestjs/axios": "^3.0.2",
"@nestjs/common": ">=8",
"@nestjs/config": "^2.0.0",
"@nestjs/core": ">=8",
Expand Down
2 changes: 1 addition & 1 deletion e2e/nextjs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
"classnames": "^2.3.2",
"eslint": "8.50.0",
"eslint-config-next": "13.5.4",
"ky": "^1.0.1",
"ky": "^1.2.2",
"next": "13.5.4",
"next-build-id": "^3.0.0",
"pg": "^8.11.3",
Expand Down
2 changes: 1 addition & 1 deletion e2e/react-three-vite/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
"@pmndrs/branding": "0.0.8",
"@react-three/drei": "9.102.3",
"@react-three/fiber": "8.15.12",
"@react-three/postprocessing": "2.15.11",
"@react-three/postprocessing": "2.16.2",
"highlight.run": "workspace:*",
"random-words": "2.0.0",
"react": "18.2.0",
Expand Down
2 changes: 1 addition & 1 deletion frontend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@
"eslint-plugin-simple-import-sort": "^7.0.0",
"eslint-plugin-tailwindcss": "^3.6.1",
"eslint-plugin-unused-imports": "^1.1.2",
"happy-dom": "^6.0.4",
"happy-dom": "^13.8.3",
"jsonc-parser": "^3.2.0",
"less": "^4.1.3",
"lightningcss": "^1.22.1",
Expand Down
Loading

0 comments on commit b04e126

Please sign in to comment.