Skip to content

Commit

Permalink
Output search index to json files.
Browse files Browse the repository at this point in the history
  • Loading branch information
Hyperkid123 committed May 22, 2024
1 parent 9c9b993 commit a28ec8d
Show file tree
Hide file tree
Showing 9 changed files with 104 additions and 29 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/main.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -74,3 +74,6 @@ jobs:

- name: Test
run: make test

- name: Generate search index
run: make generate-search-index
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,4 @@ deploy/debug.yml
static/**/links-storage.json
static/**/services-generated.json
*-services.db
static/**/search-index.json
1 change: 1 addition & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ RUN go get -d -v
RUN make validate-schema
RUN make publish-search-index-dry-run
RUN make parse-services
RUN make generate-search-index
RUN CGO_ENABLED=1 go build -o /go/bin/chrome-service-backend
# Build the migration binary.
RUN CGO_ENABLED=1 go build -o /go/bin/chrome-migrate cmd/migrate/migrate.go
Expand Down
34 changes: 20 additions & 14 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,22 +1,23 @@
help:
@echo "Availabe commands:"
@echo "------------------"
@echo "migrate - run database migration"
@echo "dev - run server"
@echo "test - run all tests"
@echo "database - start database with .env vars"
@echo "kafka - start local kafka"
@echo "unleash - start local unleash server"
@echo "infra - start all infrastructure locally (kafka, unleash, and postgres db)"
@echo "clean - tear down database"
@echo "clean-all - tear down all local infrastructure"
@echo "validate-schema - validates chrome static JSON schemas"
@echo "parse-services - creates services-generated.json that with filled link refs"
@echo "dev-static - serve only the static direcory using simple go server"
@echo "dev-static-node - serve only the static direcory using simple node server"
@echo "migrate - run database migration"
@echo "dev - run server"
@echo "test - run all tests"
@echo "database - start database with .env vars"
@echo "kafka - start local kafka"
@echo "unleash - start local unleash server"
@echo "infra - start all infrastructure locally (kafka, unleash, and postgres db)"
@echo "clean - tear down database"
@echo "clean-all - tear down all local infrastructure"
@echo "validate-schema - validates chrome static JSON schemas"
@echo "parse-services - creates services-generated.json that with filled link refs"
@echo "dev-static - serve only the static direcory using simple go server"
@echo "dev-static-node - serve only the static direcory using simple node server"
@echo " arguments:"
@echo " - port: http server port 'make dev-static-node port=8888'"
@echo "audit - run grype audit on the docker image"
@echo "audit - run grype audit on the docker image"
@echo "generate-search-index - generate search index"

port?=8000

Expand Down Expand Up @@ -46,6 +47,11 @@ publish-search-index-dry-run: export SEARCH_INDEX_DRY_RUN = true
publish-search-index-dry-run:
go run cmd/search/*

generate-search-index: export SEARCH_INDEX_WRITE = true

generate-search-index:
go run cmd/search/*

kafka:
podman-compose -f local/kafka-compose.yaml up

Expand Down
84 changes: 71 additions & 13 deletions cmd/search/publishSearchIndex.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,13 @@ import (
)

type SearchEnv string
type Release string

const (
Prod SearchEnv = "prod"
Stage SearchEnv = "stage"
Stale Release = "stable"
Beta Release = "beta"
ssoPathname string = "/auth/realms/redhat-external/protocol/openid-connect/token"
hydraPathname string = "/hydra/rest/search/console/index"
)
Expand Down Expand Up @@ -381,21 +384,22 @@ func flattenIndexBase(indexBase []ServiceEntry, env SearchEnv) ([]ModuleIndexEnt
}

// create search index compatible documents array
func constructIndex(env SearchEnv) ([]ModuleIndexEntry, error) {
func constructIndex(env SearchEnv, release Release) ([]ModuleIndexEntry, error) {
// get services template file
stageContent, err := ioutil.ReadFile(fmt.Sprintf("static/stable/%s/services/services.json", env))
stageContent, err := ioutil.ReadFile(fmt.Sprintf("static/%s/%s/services/services.json", release, env))
if err != nil {
return []ModuleIndexEntry{}, err
}

// get static service template only for search index
// TODO: Add releases for static services
staticContent, err := ioutil.ReadFile("cmd/search/static-services-entries.json")
if err != nil {
return []ModuleIndexEntry{}, err
}

// get all environment navigation files paths request to fill in template file
stageNavFiles, err := filepath.Glob(fmt.Sprintf("static/stable/%s/navigation/*-navigation.json", env))
stageNavFiles, err := filepath.Glob(fmt.Sprintf("static/%s/%s/navigation/*-navigation.json", release, env))
if err != nil {
return []ModuleIndexEntry{}, err
}
Expand Down Expand Up @@ -549,7 +553,7 @@ func deployIndex(env SearchEnv, envSecret string, ssoHost string, hydraHost stri
if err != nil {
return err
}
index, err := constructIndex(env)
index, err := constructIndex(env, "stable")
if err != nil {
return err
}
Expand All @@ -562,6 +566,20 @@ func deployIndex(env SearchEnv, envSecret string, ssoHost string, hydraHost stri
return nil
}

func handleErrors(errors []error, dryRun bool) {
if len(errors) == 0 {
fmt.Println("Search index published successfully")
} else {
for _, e := range errors {
fmt.Println(e)
}
fmt.Println("Search index publishing failed. See above errors.")
if dryRun {
os.Exit(1)
}
}
}

func main() {
// load env variables
godotenv.Load()
Expand All @@ -582,13 +600,60 @@ func main() {
}

dryRun, _ := strconv.ParseBool(os.Getenv("SEARCH_INDEX_DRY_RUN"))
writeIndex, _ := strconv.ParseBool(os.Getenv("SEARCH_INDEX_WRITE"))

fmt.Println("Write index:", writeIndex)
errors := []error{}

if writeIndex {
cwd, err := filepath.Abs(".")
if err != nil {
fmt.Println("Failed to get current working directory")
errors = append(errors, err)
handleErrors(errors, dryRun)
return
}
writeEnvs := []SearchEnv{Prod, Stage}
writeReleases := []Release{Stale, Beta}
for _, env := range writeEnvs {
for _, release := range writeReleases {
searchIndex, err := constructIndex(env, release)
if err != nil {
fmt.Println("Failed to construct search index for", env, release)
errors = append(errors, err)
} else {
dirname := fmt.Sprintf("%s/static/%s/%s/search", cwd, release, env)
fileName := fmt.Sprintf("%s/search-index.json", dirname)
err := os.MkdirAll(dirname, os.ModePerm)
if err != nil {
fmt.Println("Failed to create directory", dirname)
errors = append(errors, err)
} else {
j, err := json.Marshal(searchIndex)
if err != nil {
fmt.Println("Failed to marshal search index")
errors = append(errors, err)
}
err = os.WriteFile(fileName, j, 0644)
if err != nil {
fmt.Println("Failed to write search index to", fileName)
errors = append(errors, err)
}
}

}

}
}
handleErrors(errors, dryRun)
return
}

for _, env := range []SearchEnv{Stage, Prod} {
var err error
if dryRun {
fmt.Println("Attempt dry run search index for", env, "environment.")
_, err = constructIndex(env)
_, err = constructIndex(env, "stable")
} else {
fmt.Println("Attempt to publish search index for", env, "environment.")
err = deployIndex(env, secrets[env], ssoHosts[env], hydraHost[env])
Expand All @@ -600,12 +665,5 @@ func main() {
}
}

if len(errors) == 0 {
fmt.Println("Search index published successfully")
} else {
fmt.Println("Search index publishing failed. See above errors.")
if dryRun {
os.Exit(1)
}
}
handleErrors(errors, dryRun)
}
1 change: 1 addition & 0 deletions static/beta/prod/navigation/iam-navigation.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"title": "Identity & Access Management",
"navItems": [
{
"id": "my-user-access",
"appId": "rbac",
"title": "My User Access",
"href": "/iam/my-user-access",
Expand Down
4 changes: 3 additions & 1 deletion static/beta/prod/navigation/rhel-navigation.json
Original file line number Diff line number Diff line change
Expand Up @@ -266,6 +266,7 @@
]
},
{
"id": "patchTemplates",
"appId": "patch",
"title": "Templates",
"href": "/insights/patch/templates",
Expand Down Expand Up @@ -335,12 +336,14 @@
"expandable": true,
"routes": [
{
"id": "driftComparison",
"appId": "drift",
"title": "Comparison",
"href": "/insights/drift/",
"product": "Red Hat Insights"
},
{
"id": "driftBaselines",
"appId": "drift",
"title": "Baselines",
"href": "/insights/drift/baselines",
Expand Down Expand Up @@ -378,7 +381,6 @@
"expandable": true,
"routes": [
{
"id": "vulnerability",
"title": "Vulnerability",
"expandable": true,
"subtitle": "Red Hat Insights for RHEL",
Expand Down
1 change: 1 addition & 0 deletions static/beta/stage/navigation/iam-navigation.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"title": "Identity & Access Management",
"navItems": [
{
"id": "my-user-access",
"appId": "rbac",
"title": "My User Access",
"href": "/iam/my-user-access",
Expand Down
4 changes: 3 additions & 1 deletion static/beta/stage/navigation/rhel-navigation.json
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,7 @@
]
},
{
"id": "patchTemplates",
"appId": "patch",
"title": "Templates",
"href": "/insights/patch/templates",
Expand Down Expand Up @@ -350,12 +351,14 @@
"expandable": true,
"routes": [
{
"id": "driftComparison",
"appId": "drift",
"title": "Comparison",
"href": "/insights/drift/",
"product": "Red Hat Insights"
},
{
"id": "driftBaselines",
"appId": "drift",
"title": "Baselines",
"href": "/insights/drift/baselines",
Expand Down Expand Up @@ -393,7 +396,6 @@
"expandable": true,
"routes": [
{
"id": "vulnerability",
"title": "Vulnerability",
"expandable": true,
"subtitle": "Red Hat Insights for RHEL",
Expand Down

0 comments on commit a28ec8d

Please sign in to comment.