diff --git a/TESTING.md b/TESTING.md index 587b3c428e..08d3c31ae6 100644 --- a/TESTING.md +++ b/TESTING.md @@ -15,7 +15,7 @@ This is a simple test to check if Tracetest is working correctly given it was pr The steps that we should follow are: - [ ] Open WebUI and go to `/settings` page. The provisioned Data Store should be selected. -- [ ] Run `tracetest datastore export -d {provisioned_datastore}` and check if the data was exported correctly. +- [ ] Run `tracetest export datastore --id current` and check if the data was exported correctly. - [ ] Create a test on WebUI that calls a demo API (like [Pokeshop](https://docs.tracetest.io/live-examples/pokeshop/overview) or [Open Telemetry Store](https://docs.tracetest.io/live-examples/opentelemetry-store/overview)). This test should fetch traces correctly and run without errors. ### Checklist on version release @@ -25,16 +25,17 @@ This is the entire checklist on what we should do to assert that Tracetest is wo - [ ] Check if our release pipeline on [Release Tracetest](https://github.com/kubeshop/tracetest/actions/workflows/release-version.yml) workflow on Github Actions worked correctly. - [ ] Double check [Detailed installation](https://docs.tracetest.io/getting-started/detailed-installation) doc and see if everything is documented correctly - ### Tests to validate RC - Test server installation via CLI + - [ ] Docker Compose and no demo API - [ ] Docker Compose and demo API - [ ] Kubernetes and no demo API - [ ] Kubernetes and demo API - Test Tracetest examples + - [ ] [Amazon X-Ray example](https://github.com/kubeshop/tracetest/tree/main/examples/tracetest-amazon-x-ray) - [ ] [Datadog example](https://github.com/kubeshop/tracetest/tree/main/examples/tracetest-datadog) - [ ] [Elastic APM example](https://github.com/kubeshop/tracetest/tree/main/examples/tracetest-elasticapm) @@ -43,10 +44,13 @@ This is the entire checklist on what we should do to assert that Tracetest is wo - [ ] [SignalFX example](https://github.com/kubeshop/tracetest/tree/main/examples/tracetest-signalfx) - Test specific features added/changed on this release: + - [ ] Add features here - + ### Tests to validate final release + - Test CLI updates + - [ ] MacOS via homebrew - [ ] MacOS via curl script - [ ] Windows via chocolatey @@ -54,7 +58,6 @@ This is the entire checklist on what we should do to assert that Tracetest is wo - Test specific features added/changed on this release: - [ ] Add features here - ## Automatic Tests @@ -63,7 +66,7 @@ Today Tracetest has 3 main components: a WebUI, a CLI and a Server. ### Web UI - **Unit tests**: Run by executing `npm test` on `./web` folder -- **End-to-end tests**: Run using [cypress](https://www.cypress.io/) against a temporary Tracetest created on Kubernetes. +- **End-to-end tests**: Run using [cypress](https://www.cypress.io/) against a temporary Tracetest created on Kubernetes. ### CLI diff --git a/api/dataStores.yaml b/api/dataStores.yaml index 41e7386bed..59a032c270 100644 --- a/api/dataStores.yaml +++ b/api/dataStores.yaml @@ -1,6 +1,17 @@ openapi: 3.0.0 components: schemas: + DataStoreResource: + type: object + description: "Represents a data store structured into the Resources format." + properties: + type: + type: string + description: "Represents the type of this resource. It should always be set as 'DataStore'." + enum: + - DataStore + spec: + $ref: "#/components/schemas/DataStore" DataStore: type: object required: @@ -14,7 +25,7 @@ components: type: string type: $ref: "#/components/schemas/SupportedDataStores" - isDefault: + default: type: boolean jaeger: $ref: "#/components/schemas/GRPCClientSettings" @@ -147,19 +158,7 @@ components: type: string SupportedDataStores: type: string - enum: - [ - jaeger, - openSearch, - tempo, - signalFx, - otlp, - elasticApm, - newRelic, - lightstep, - datadog, - awsxray, - ] + enum: [jaeger, openSearch, tempo, signalFx, otlp, elasticApm, newRelic, lightstep, datadog, awsxray] SupportedClients: type: string enum: [http, grpc] diff --git a/api/openapi.yaml b/api/openapi.yaml index ada5ae3ce5..ed99a40663 100644 --- a/api/openapi.yaml +++ b/api/openapi.yaml @@ -1091,59 +1091,10 @@ paths: description: "problem deleting a demo" # Data Stores - /datastores: - get: - tags: - - api - summary: "Get all Data Stores" - description: "Get all Data Stores" - operationId: getDataStores - parameters: - - $ref: "./parameters.yaml#/components/parameters/take" - - $ref: "./parameters.yaml#/components/parameters/skip" - - $ref: "./parameters.yaml#/components/parameters/query" - - $ref: "./parameters.yaml#/components/parameters/sortBy" - - $ref: "./parameters.yaml#/components/parameters/sortDirection" - responses: - 200: - description: successful operation - headers: - X-Total-Count: - schema: - type: integer - description: Total records count - content: - application/json: - schema: - type: array - items: - $ref: "./dataStores.yaml#/components/schemas/DataStore" - 500: - description: "problem with getting data stores" - post: - tags: - - api - summary: "Create a new Data Store" - description: "Create a new Data Store" - operationId: createDataStore - requestBody: - content: - application/json: - schema: - $ref: "./dataStores.yaml#/components/schemas/DataStore" - responses: - 200: - description: successful operation - content: - application/json: - schema: - $ref: "./dataStores.yaml#/components/schemas/DataStore" - 400: - description: "trying to create a data store with an already existing ID" /datastores/{dataStoreId}: get: tags: - - api + - resource-api parameters: - $ref: "./parameters.yaml#/components/parameters/dataStoreId" summary: "Get a Data Store" @@ -1155,12 +1106,14 @@ paths: content: application/json: schema: - $ref: "./dataStores.yaml#/components/schemas/DataStore" + $ref: "./dataStores.yaml#/components/schemas/DataStoreResource" + 404: + description: "data store not found" 500: description: "problem with getting a data store" put: tags: - - api + - resource-api parameters: - $ref: "./parameters.yaml#/components/parameters/dataStoreId" summary: "Update a Data Store" @@ -1174,32 +1127,20 @@ paths: responses: 204: description: successful operation + 400: + description: "invalid data store, some data was sent in incorrect format." 500: description: "problem with updating data store" delete: tags: - - api + - resource-api parameters: - $ref: "./parameters.yaml#/components/parameters/dataStoreId" summary: "Delete a Data Store" description: "Delete a Data Store" operationId: deleteDataStore responses: - "204": - description: OK - /datastores/{dataStoreId}/definition.yaml: - get: - tags: - - api - parameters: - - $ref: "./parameters.yaml#/components/parameters/dataStoreId" - summary: Get the data store definition as an YAML file - description: Get the data store as an YAML file - operationId: getDataStoreDefinitionFile - responses: - 200: - description: OK - content: - application/yaml: - schema: - type: string + 204: + description: successful operation + 500: + description: "problem with data store deletion" diff --git a/cli/actions/apply_datastore_action.go b/cli/actions/apply_datastore_action.go deleted file mode 100644 index 267b75caa9..0000000000 --- a/cli/actions/apply_datastore_action.go +++ /dev/null @@ -1,120 +0,0 @@ -package actions - -import ( - "context" - "fmt" - "io/ioutil" - "net/http" - - "github.com/fluidtruck/deepcopy" - "github.com/kubeshop/tracetest/cli/file" - "github.com/kubeshop/tracetest/cli/openapi" - "go.uber.org/zap" -) - -type ApplyDataStoreConfig struct { - File string -} - -type applyDataStoreAction struct { - logger *zap.Logger - client *openapi.APIClient -} - -var _ Action[ApplyDataStoreConfig] = &applyDataStoreAction{} - -func NewApplyDataStoreAction(logger *zap.Logger, client *openapi.APIClient) applyDataStoreAction { - return applyDataStoreAction{ - logger: logger, - client: client, - } -} - -func (a applyDataStoreAction) Run(ctx context.Context, args ApplyDataStoreConfig) error { - if args.File == "" { - return fmt.Errorf("you must specify a file to be applied") - } - - a.logger.Debug( - "applying data store", - zap.String("file", args.File), - ) - - fileContent, err := file.Read(args.File) - if err != nil { - return fmt.Errorf("could not read file: %w", err) - } - - if fileContent.Definition().Type != "DataStore" { - return fmt.Errorf(`file must be of type "DataStore"`) - } - - var dataStore openapi.DataStore - deepcopy.DeepCopy(fileContent.Definition().Spec, &dataStore) - - if dataStore.Id == nil || *dataStore.Id == "" { - err := a.createDataStore(ctx, fileContent, dataStore) - if err != nil { - return err - } - } else { - err := a.updateDataStore(ctx, fileContent, dataStore) - if err != nil { - return err - } - } - - return nil -} - -func (a applyDataStoreAction) createDataStore(ctx context.Context, file file.File, dataStore openapi.DataStore) error { - req := a.client.ApiApi.CreateDataStore(ctx) - req = req.DataStore(dataStore) - createdDataStore, resp, err := a.client.ApiApi.CreateDataStoreExecute(req) - if resp.StatusCode == http.StatusUnprocessableEntity { - // validation error - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return err - } - - validationError := string(body) - return fmt.Errorf("invalid data store: %s", validationError) - } - if err != nil { - return fmt.Errorf("could not create data store: %w", err) - } - - f, err := file.SetID(*createdDataStore.Id) - if err != nil { - return fmt.Errorf("could no set data store id: %w", err) - } - - _, err = f.Write() - if err != nil { - return fmt.Errorf("could not write to data store file: %w", err) - } - - return nil -} - -func (a applyDataStoreAction) updateDataStore(ctx context.Context, file file.File, dataStore openapi.DataStore) error { - req := a.client.ApiApi.UpdateDataStore(ctx, *dataStore.Id) - req = req.DataStore(dataStore) - resp, err := a.client.ApiApi.UpdateDataStoreExecute(req) - if resp.StatusCode == http.StatusUnprocessableEntity { - // validation error - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return err - } - - validationError := string(body) - return fmt.Errorf("invalid data store: %s", validationError) - } - if err != nil { - return fmt.Errorf("could not update data store: %w", err) - } - - return nil -} diff --git a/cli/actions/config.go b/cli/actions/config.go index 0b1cc0314e..c45616eb9f 100644 --- a/cli/actions/config.go +++ b/cli/actions/config.go @@ -2,12 +2,10 @@ package actions import ( "context" - "fmt" - "io/ioutil" - "net/http" "github.com/kubeshop/tracetest/cli/file" "github.com/kubeshop/tracetest/cli/utils" + "github.com/kubeshop/tracetest/server/model/yaml" ) type configActions struct { @@ -25,103 +23,26 @@ func NewConfigActions(options ...ResourceArgsOption) configActions { } } -func (configActions) Name() string { - return "config" +func (configActions) FileType() yaml.FileType { + return yaml.FileTypeConfig } -func (config configActions) Apply(ctx context.Context, args ApplyArgs) error { - if args.File == "" { - return fmt.Errorf("you must specify a file to be applied") - } - - fileContent, err := file.ReadRaw(args.File) - if err != nil { - return fmt.Errorf("could not read file: %w", err) - } - - if fileContent.Definition().Type != "Config" { - return fmt.Errorf(`file must be of type "Config"`) - } - - url := fmt.Sprintf("%s/%s", config.resourceClient.BaseUrl, currentConfigID) - request, err := config.resourceClient.NewRequest(url, http.MethodPut, fileContent.Contents()) - if err != nil { - return fmt.Errorf("could not create request: %w", err) - } - - resp, err := config.resourceClient.Client.Do(request) - if err != nil { - return fmt.Errorf("could not send request: %w", err) - } - - defer resp.Body.Close() - if resp.StatusCode != http.StatusOK { - return fmt.Errorf("could not create config: %s", resp.Status) - } - - _, err = fileContent.SaveChanges(utils.IOReadCloserToString(resp.Body)) - return err +func (configActions) Name() string { + return "config" } -func (config configActions) Get(ctx context.Context, ID string) error { - configResponse, err := config.get(ctx) - if err != nil { - return err - } - - fmt.Println(configResponse) - return err +func (config configActions) Apply(ctx context.Context, fileContent file.File) error { + return config.resourceClient.Update(ctx, fileContent, currentConfigID) } -func (config configActions) List(ctx context.Context, listArgs ListArgs) error { - return ErrNotSupportedResourceAction +func (config configActions) Get(ctx context.Context, ID string) (string, error) { + return config.resourceClient.Get(ctx, currentConfigID) } -func (config configActions) Export(ctx context.Context, ID string, filePath string) error { - configResponse, err := config.get(ctx) - if err != nil { - return err - } - - file, err := file.NewFromRaw(filePath, []byte(configResponse)) - if err != nil { - return fmt.Errorf("could not create file: %w", err) - } - - _, err = file.WriteRaw() - return err +func (config configActions) List(ctx context.Context, listArgs utils.ListArgs) (string, error) { + return "", ErrNotSupportedResourceAction } func (config configActions) Delete(ctx context.Context, ID string) error { return ErrNotSupportedResourceAction } - -func (config configActions) get(ctx context.Context) (string, error) { - url := fmt.Sprintf("%s/%s", config.resourceClient.BaseUrl, currentConfigID) - request, err := config.resourceClient.NewRequest(url, http.MethodGet, "") - if err != nil { - return "", fmt.Errorf("could not create request: %w", err) - } - - resp, err := config.resourceClient.Client.Do(request) - if err != nil { - return "", fmt.Errorf("could not send request: %w", err) - } - - defer resp.Body.Close() - if resp.StatusCode != http.StatusOK { - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return "", err - } - - validationError := string(body) - return "", fmt.Errorf("invalid config: %s", validationError) - } - - if err != nil { - return "", fmt.Errorf("could not get config: %w", err) - } - - return utils.IOReadCloserToString(resp.Body), nil -} diff --git a/cli/actions/datastore.go b/cli/actions/datastore.go new file mode 100644 index 0000000000..cfd5fdab6e --- /dev/null +++ b/cli/actions/datastore.go @@ -0,0 +1,52 @@ +package actions + +import ( + "context" + + "github.com/kubeshop/tracetest/cli/file" + "github.com/kubeshop/tracetest/cli/openapi" + "github.com/kubeshop/tracetest/cli/utils" + "github.com/kubeshop/tracetest/server/model/yaml" + "github.com/mitchellh/mapstructure" +) + +type dataStoreActions struct { + resourceArgs +} + +var _ ResourceActions = &dataStoreActions{} + +func NewDataStoreActions(options ...ResourceArgsOption) *dataStoreActions { + args := NewResourceArgs(options...) + + return &dataStoreActions{ + resourceArgs: args, + } +} + +func (d *dataStoreActions) FileType() yaml.FileType { + return yaml.FileTypeDataStore +} + +func (d *dataStoreActions) Name() string { + return "datastore" +} + +func (d *dataStoreActions) Apply(ctx context.Context, fileContent file.File) error { + var dataStore openapi.DataStore + mapstructure.Decode(fileContent.Definition().Spec, &dataStore) + + return d.resourceClient.Update(ctx, fileContent, currentConfigID) +} + +func (d *dataStoreActions) List(ctx context.Context, args utils.ListArgs) (string, error) { + return "", ErrNotSupportedResourceAction +} + +func (d *dataStoreActions) Get(ctx context.Context, id string) (string, error) { + return d.resourceClient.Get(ctx, currentConfigID) +} + +func (d *dataStoreActions) Delete(ctx context.Context, id string) error { + return d.resourceClient.Delete(ctx, currentConfigID) +} diff --git a/cli/actions/demo.go b/cli/actions/demo.go index f4d90b3f04..a70b83e789 100644 --- a/cli/actions/demo.go +++ b/cli/actions/demo.go @@ -2,15 +2,12 @@ package actions import ( "context" - "fmt" - "io/ioutil" - "net/http" "github.com/kubeshop/tracetest/cli/file" "github.com/kubeshop/tracetest/cli/openapi" "github.com/kubeshop/tracetest/cli/utils" + "github.com/kubeshop/tracetest/server/model/yaml" "github.com/mitchellh/mapstructure" - "go.uber.org/zap" ) type demoActions struct { @@ -27,176 +24,33 @@ func NewDemoActions(options ...ResourceArgsOption) demoActions { } } +func (demoActions) FileType() yaml.FileType { + return yaml.FileTypeDemo +} + func (demoActions) Name() string { return "demo" } -func (demo demoActions) Apply(ctx context.Context, args ApplyArgs) error { - if args.File == "" { - return fmt.Errorf("you must specify a file to be applied") - } - - demo.logger.Debug( - "applying demo", - zap.String("file", args.File), - ) - - fileContent, err := file.ReadRaw(args.File) - if err != nil { - return fmt.Errorf("could not read file: %w", err) - } - - if fileContent.Definition().Type != "Demo" { - return fmt.Errorf(`file must be of type "Demo"`) - } - +func (demo demoActions) Apply(ctx context.Context, fileContent file.File) error { var demoResource openapi.Demo mapstructure.Decode(fileContent.Definition().Spec, &demoResource.Spec) if demoResource.Spec.Id == nil || *demoResource.Spec.Id == "" { - return demo.create(ctx, fileContent) - } - - return demo.update(ctx, fileContent, *demoResource.Spec.Id) -} - -func (demo demoActions) create(ctx context.Context, file file.File) error { - request, err := demo.resourceClient.NewRequest(demo.resourceClient.BaseUrl, http.MethodPost, file.Contents()) - if err != nil { - return fmt.Errorf("could not create request: %w", err) - } - - resp, err := demo.resourceClient.Client.Do(request) - if err != nil { - return fmt.Errorf("could not send request: %w", err) - } - - defer resp.Body.Close() - if resp.StatusCode == http.StatusUnprocessableEntity { - // validation error - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return fmt.Errorf("could not read validation error: %w", err) - } - - validationError := string(body) - return fmt.Errorf("invalid demo profile: %s", validationError) - } - - _, err = file.SaveChanges(utils.IOReadCloserToString(resp.Body)) - return err -} - -func (demo demoActions) update(ctx context.Context, file file.File, ID string) error { - url := fmt.Sprintf("%s/%s", demo.resourceClient.BaseUrl, ID) - request, err := demo.resourceClient.NewRequest(url, http.MethodPut, file.Contents()) - if err != nil { - return fmt.Errorf("could not create request: %w", err) - } - - resp, err := demo.resourceClient.Client.Do(request) - if err != nil { - return fmt.Errorf("could not update demo profile: %w", err) - } - - defer resp.Body.Close() - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("demo id doesn't exist on server. Remove it from the definition file and try again") - } - - if resp.StatusCode == http.StatusUnprocessableEntity { - // validation error - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return fmt.Errorf("could not send request: %w", err) - } - - validationError := string(body) - return fmt.Errorf("invalid demo profile: %s", validationError) - } - - _, err = file.SaveChanges(utils.IOReadCloserToString(resp.Body)) - return err -} - -func (demo demoActions) List(ctx context.Context, listArgs ListArgs) error { - url := fmt.Sprintf("%s?skip=%d&take=%d&sortBy=%s&sortDirection=%s", demo.resourceClient.BaseUrl, listArgs.Skip, listArgs.Take, listArgs.SortBy, listArgs.SortDirection) - request, err := demo.resourceClient.NewRequest(url, http.MethodGet, "") - if err != nil { - return fmt.Errorf("could not create request: %w", err) - } - - resp, err := demo.resourceClient.Client.Do(request) - if err != nil { - return fmt.Errorf("could not send request: %w", err) + return demo.resourceClient.Create(ctx, fileContent) } - defer resp.Body.Close() - fmt.Println(utils.IOReadCloserToString(resp.Body)) - return nil + return demo.resourceClient.Update(ctx, fileContent, *demoResource.Spec.Id) } -func (demo demoActions) Export(ctx context.Context, ID string, filePath string) error { - if ID == "" { - return fmt.Errorf("you must specify a demo profile ID to be exported") - } - - demoProfile, err := demo.get(ctx, ID) - if err != nil { - return err - } - - file, err := file.NewFromRaw(filePath, []byte(demoProfile)) - if err != nil { - return fmt.Errorf("could not create file: %w", err) - } - - _, err = file.WriteRaw() - return err +func (demo demoActions) List(ctx context.Context, listArgs utils.ListArgs) (string, error) { + return demo.resourceClient.List(ctx, listArgs) } func (demo demoActions) Delete(ctx context.Context, ID string) error { - url := fmt.Sprintf("%s/%s", demo.resourceClient.BaseUrl, ID) - request, err := demo.resourceClient.NewRequest(url, http.MethodDelete, "") - if err != nil { - return fmt.Errorf("could not create request: %w", err) - } - - _, err = demo.resourceClient.Client.Do(request) - return err + return demo.resourceClient.Delete(ctx, ID) } -func (demo demoActions) Get(ctx context.Context, ID string) error { - demoProfileResponse, err := demo.get(ctx, ID) - if err != nil { - return err - } - - fmt.Println(demoProfileResponse) - return err -} - -func (demo demoActions) get(ctx context.Context, ID string) (string, error) { - request, err := demo.resourceClient.NewRequest(fmt.Sprintf("%s/%s", demo.resourceClient.BaseUrl, ID), http.MethodGet, "") - if err != nil { - return "", fmt.Errorf("could not create request: %w", err) - } - - resp, err := demo.resourceClient.Client.Do(request) - if err != nil { - return "", fmt.Errorf("could not get demo profile: %w", err) - } - - defer resp.Body.Close() - if resp.StatusCode != http.StatusOK { - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return "", err - } - - validationError := string(body) - return "", fmt.Errorf("invalid demo profile: %s", validationError) - } - - return utils.IOReadCloserToString(resp.Body), nil +func (demo demoActions) Get(ctx context.Context, ID string) (string, error) { + return demo.resourceClient.Get(ctx, ID) } diff --git a/cli/actions/export_datastore_action.go b/cli/actions/export_datastore_action.go deleted file mode 100644 index 3756f6a3b0..0000000000 --- a/cli/actions/export_datastore_action.go +++ /dev/null @@ -1,57 +0,0 @@ -package actions - -import ( - "context" - "fmt" - - "github.com/kubeshop/tracetest/cli/file" - "github.com/kubeshop/tracetest/cli/openapi" - "go.uber.org/zap" -) - -type ExportDataStoreConfig struct { - OutputFile string - ID string -} - -type exportDataStoreAction struct { - logger *zap.Logger - client *openapi.APIClient -} - -var _ Action[ExportDataStoreConfig] = &exportDataStoreAction{} - -func NewExportDataStoreAction(logger *zap.Logger, client *openapi.APIClient) exportDataStoreAction { - return exportDataStoreAction{ - logger: logger, - client: client, - } -} - -func (a exportDataStoreAction) Run(ctx context.Context, args ExportDataStoreConfig) error { - if args.OutputFile == "" { - return fmt.Errorf("output file is required. Use --file to specify it") - } - - if args.ID == "" { - return fmt.Errorf("id is required. Use --id to specify it") - } - - req := a.client.ApiApi.GetDataStoreDefinitionFile(ctx, args.ID) - dataStoreContent, _, err := a.client.ApiApi.GetDataStoreDefinitionFileExecute(req) - if err != nil { - return fmt.Errorf("could not get data store by its id: %w", err) - } - - file, err := file.New(args.OutputFile, []byte(dataStoreContent)) - if err != nil { - return fmt.Errorf("could not process definition from server: %w", err) - } - - _, err = file.Write() - if err != nil { - return fmt.Errorf("could not save exported definition into file: %w", err) - } - - return nil -} diff --git a/cli/actions/list_datastore_action.go b/cli/actions/list_datastore_action.go deleted file mode 100644 index 20f481a178..0000000000 --- a/cli/actions/list_datastore_action.go +++ /dev/null @@ -1,42 +0,0 @@ -package actions - -import ( - "context" - "fmt" - - "github.com/kubeshop/tracetest/cli/config" - "github.com/kubeshop/tracetest/cli/formatters" - "github.com/kubeshop/tracetest/cli/openapi" - "go.uber.org/zap" -) - -type ListDataStoreConfig struct{} - -type listDataStoreAction struct { - config config.Config - logger *zap.Logger - client *openapi.APIClient -} - -var _ Action[ListDataStoreConfig] = &listDataStoreAction{} - -func NewListDataStoreAction(config config.Config, logger *zap.Logger, client *openapi.APIClient) listDataStoreAction { - return listDataStoreAction{ - logger: logger, - client: client, - } -} - -func (a listDataStoreAction) Run(ctx context.Context, args ListDataStoreConfig) error { - req := a.client.ApiApi.GetDataStores(ctx) - dataStores, _, err := a.client.ApiApi.GetDataStoresExecute(req) - if err != nil { - return fmt.Errorf("could not list data stores: %w", err) - } - - formatter := formatters.DataStoreList(a.config) - output := formatter.Format(dataStores) - fmt.Println(output) - - return nil -} diff --git a/cli/actions/polling.go b/cli/actions/polling.go index 92d89a4d22..637d4c4d35 100644 --- a/cli/actions/polling.go +++ b/cli/actions/polling.go @@ -3,14 +3,12 @@ package actions import ( "context" "fmt" - "io/ioutil" - "net/http" "github.com/kubeshop/tracetest/cli/file" "github.com/kubeshop/tracetest/cli/openapi" "github.com/kubeshop/tracetest/cli/utils" + "github.com/kubeshop/tracetest/server/model/yaml" "github.com/mitchellh/mapstructure" - "go.uber.org/zap" ) type pollingActions struct { @@ -27,119 +25,27 @@ func NewPollingActions(options ...ResourceArgsOption) pollingActions { } } +func (pollingActions) FileType() yaml.FileType { + return yaml.FileTypePollingProfile +} + func (pollingActions) Name() string { return "pollingprofile" } -func (polling pollingActions) Apply(ctx context.Context, args ApplyArgs) error { - if args.File == "" { - return fmt.Errorf("you must specify a file to be applied") - } - - polling.logger.Debug( - "applying analytics config", - zap.String("file", args.File), - ) - - fileContent, err := file.ReadRaw(args.File) - if err != nil { - return fmt.Errorf("could not read file: %w", err) - } - - if fileContent.Definition().Type != "PollingProfile" { - return fmt.Errorf(`file must be of type "Config"`) - } - +func (polling pollingActions) Apply(ctx context.Context, fileContent file.File) error { var pollingProfile openapi.PollingProfile mapstructure.Decode(fileContent.Definition().Spec, &pollingProfile.Spec) - return polling.update(ctx, fileContent, currentConfigID) -} - -// NOT NEEDED AT THE MOMENT -// func (polling pollingActions) create(ctx context.Context, file file.File) error { -// request, err := polling.resourceClient.NewRequest(polling.resourceClient.BaseUrl, http.MethodPost, file.Contents()) -// if err != nil { -// return fmt.Errorf("could not create request: %w", err) -// } - -// resp, err := polling.resourceClient.Client.Do(request) -// if err != nil { -// return fmt.Errorf("could not send request: %w", err) -// } - -// defer resp.Body.Close() -// if resp.StatusCode == http.StatusUnprocessableEntity { -// // validation error -// body, err := ioutil.ReadAll(resp.Body) -// if err != nil { -// return fmt.Errorf("could not send request: %w", err) -// } - -// validationError := string(body) -// return fmt.Errorf("invalid polling profile: %s", validationError) -// } -// if err != nil { -// return fmt.Errorf("could not create polling profile: %w", err) -// } - -// _, err = file.SaveChanges(utils.IOReadCloserToString(resp.Body)) -// return err -// } - -func (polling pollingActions) update(ctx context.Context, file file.File, ID string) error { - url := fmt.Sprintf("%s/%s", polling.resourceClient.BaseUrl, ID) - request, err := polling.resourceClient.NewRequest(url, http.MethodPut, file.Contents()) - if err != nil { - return fmt.Errorf("could not create request: %w", err) - } - - resp, err := polling.resourceClient.Client.Do(request) - if err != nil { - return fmt.Errorf("could not update polling profile: %w", err) - } - - defer resp.Body.Close() - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf("polling profile id doesn't exist on server. Remove it from the definition file and try again") - } - - if resp.StatusCode == http.StatusUnprocessableEntity { - // validation error - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return fmt.Errorf("could not send request: %w", err) - } - - validationError := string(body) - return fmt.Errorf("invalid polling profile: %s", validationError) - } - - _, err = file.SaveChanges(utils.IOReadCloserToString(resp.Body)) - return err + return polling.resourceClient.Update(ctx, fileContent, currentConfigID) } -func (polling pollingActions) List(ctx context.Context, listArgs ListArgs) error { - // url := fmt.Sprintf("%s?skip=%d&take=%d&sortBy=%s&sortDirection=%s", polling.resourceClient.BaseUrl, listArgs.Skip, listArgs.Take, listArgs.SortBy, listArgs.SortDirection) - // request, err := polling.resourceClient.NewRequest(url, http.MethodGet, "") - // if err != nil { - // return fmt.Errorf("could not create request: %w", err) - // } - - // resp, err := polling.resourceClient.Client.Do(request) - // if err != nil { - // return fmt.Errorf("could not send request: %w", err) - // } - - // defer resp.Body.Close() - // fmt.Println(utils.IOReadCloserToString(resp.Body)) - // return nil - - return ErrNotSupportedResourceAction +func (polling pollingActions) List(ctx context.Context, listArgs utils.ListArgs) (string, error) { + return "", ErrNotSupportedResourceAction } func (polling pollingActions) Export(ctx context.Context, ID string, filePath string) error { - pollingProfile, err := polling.get(ctx) + pollingProfile, err := polling.resourceClient.Get(ctx, currentConfigID) if err != nil { return err } @@ -154,49 +60,9 @@ func (polling pollingActions) Export(ctx context.Context, ID string, filePath st } func (polling pollingActions) Delete(ctx context.Context, ID string) error { - // url := fmt.Sprintf("%s/%s", polling.resourceClient.BaseUrl, ID) - // request, err := polling.resourceClient.NewRequest(url, http.MethodDelete, "") - // if err != nil { - // return fmt.Errorf("could not create request: %w", err) - // } - - // _, err = polling.resourceClient.Client.Do(request) - // return err - return ErrNotSupportedResourceAction } -func (polling pollingActions) Get(ctx context.Context, ID string) error { - pollingProfileResponse, err := polling.get(ctx) - if err != nil { - return err - } - - fmt.Println(pollingProfileResponse) - return err -} - -func (polling pollingActions) get(ctx context.Context) (string, error) { - request, err := polling.resourceClient.NewRequest(fmt.Sprintf("%s/%s", polling.resourceClient.BaseUrl, currentConfigID), http.MethodGet, "") - if err != nil { - return "", fmt.Errorf("could not create request: %w", err) - } - - resp, err := polling.resourceClient.Client.Do(request) - if err != nil { - return "", fmt.Errorf("could not get polling profile: %w", err) - } - - defer resp.Body.Close() - if resp.StatusCode != http.StatusOK { - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - return "", err - } - - validationError := string(body) - return "", fmt.Errorf("invalid polling profile: %s", validationError) - } - - return utils.IOReadCloserToString(resp.Body), nil +func (polling pollingActions) Get(ctx context.Context, ID string) (string, error) { + return polling.resourceClient.Get(ctx, currentConfigID) } diff --git a/cli/actions/resource.go b/cli/actions/resource.go index d6bfbcbb11..6ce53bace4 100644 --- a/cli/actions/resource.go +++ b/cli/actions/resource.go @@ -1,7 +1,6 @@ package actions import ( - "context" "errors" "github.com/kubeshop/tracetest/cli/config" @@ -13,30 +12,18 @@ type ApplyArgs struct { File string } -type ListArgs struct { - Take int32 - Skip int32 - SortDirection string - SortBy string -} - -type ResourceActions interface { - Name() string - Apply(context.Context, ApplyArgs) error - List(context.Context, ListArgs) error - Get(context.Context, string) error - Export(context.Context, string, string) error - Delete(context.Context, string) error -} - type resourceArgs struct { logger *zap.Logger resourceClient utils.ResourceClient config config.Config } +func (r resourceArgs) Logger() *zap.Logger { + return r.logger +} + type ResourceArgsOption = func(args *resourceArgs) -type ResourceRegistry map[string]ResourceActions +type ResourceRegistry map[string]resourceActions var ( ErrResourceNotRegistered = errors.New("resource not registered") @@ -48,17 +35,17 @@ func NewResourceRegistry() ResourceRegistry { } func (r ResourceRegistry) Register(actions ResourceActions) { - r[actions.Name()] = actions + r[actions.Name()] = WrapActions(actions) } -func (r ResourceRegistry) Get(name string) (ResourceActions, error) { - resourceActions, found := r[name] +func (r ResourceRegistry) Get(name string) (resourceActions, error) { + actions, found := r[name] if !found { - return nil, ErrResourceNotRegistered + return resourceActions{}, ErrResourceNotRegistered } - return resourceActions, nil + return actions, nil } func WithClient(client utils.ResourceClient) ResourceArgsOption { diff --git a/cli/actions/resource_actions.go b/cli/actions/resource_actions.go new file mode 100644 index 0000000000..58599b4984 --- /dev/null +++ b/cli/actions/resource_actions.go @@ -0,0 +1,96 @@ +package actions + +import ( + "context" + "fmt" + + "github.com/kubeshop/tracetest/cli/file" + "github.com/kubeshop/tracetest/cli/utils" + "github.com/kubeshop/tracetest/server/model/yaml" + "go.uber.org/zap" +) + +type ResourceActions interface { + Logger() *zap.Logger + FileType() yaml.FileType + Name() string + Apply(context.Context, file.File) error + List(context.Context, utils.ListArgs) (string, error) + Get(context.Context, string) (string, error) + Delete(context.Context, string) error +} + +type resourceActions struct { + actions ResourceActions +} + +func WrapActions(actions ResourceActions) resourceActions { + return resourceActions{ + actions: actions, + } +} + +func (r *resourceActions) Name() string { + return r.actions.Name() +} + +func (r *resourceActions) Apply(ctx context.Context, args ApplyArgs) error { + if args.File == "" { + return fmt.Errorf("you must specify a file to be applied") + } + + r.actions.Logger().Debug( + fmt.Sprintf("applying %s", r.Name()), + zap.String("file", args.File), + ) + + fileContent, err := file.ReadRaw(args.File) + if err != nil { + return fmt.Errorf("could not read file: %w", err) + } + + if fileContent.Definition().Type != r.actions.FileType() { + return fmt.Errorf(fmt.Sprintf(`file must be of type "%s"`, r.actions.FileType())) + } + + return r.actions.Apply(ctx, fileContent) +} + +func (r *resourceActions) List(ctx context.Context, args utils.ListArgs) error { + resources, err := r.actions.List(ctx, args) + if err != nil { + return err + } + + fmt.Println(resources) + return nil +} + +func (r *resourceActions) Get(ctx context.Context, id string) error { + resource, err := r.actions.Get(ctx, id) + if err != nil { + return err + } + + fmt.Println(resource) + return nil +} + +func (r *resourceActions) Export(ctx context.Context, id string, filePath string) error { + resource, err := r.actions.Get(ctx, id) + if err != nil { + return err + } + + file, err := file.NewFromRaw(filePath, []byte(resource)) + if err != nil { + return fmt.Errorf("could not create file: %w", err) + } + + _, err = file.WriteRaw() + return err +} + +func (r *resourceActions) Delete(ctx context.Context, id string) error { + return r.actions.Delete(ctx, id) +} diff --git a/cli/actions/run_test_action.go b/cli/actions/run_test_action.go index 6f1cd16dc4..6c64331620 100644 --- a/cli/actions/run_test_action.go +++ b/cli/actions/run_test_action.go @@ -287,10 +287,10 @@ func (a runTestAction) runDefinitionFile(ctx context.Context, f file.File, param } } - runID := body.GetRunId() + runID := getTestRunIDFromString(body.GetRunId()) a.logger.Debug( "executed", - zap.String("runID", runID), + zap.Int32("runID", runID), zap.String("runType", body.GetType()), ) @@ -372,7 +372,7 @@ func (a runTestAction) getTest(ctx context.Context, id string) (openapi.Test, er return *test, nil } -func (a runTestAction) testRun(ctx context.Context, test openapi.Test, runID string, params runDefParams) error { +func (a runTestAction) testRun(ctx context.Context, test openapi.Test, runID int32, params runDefParams) error { a.logger.Debug("run test", zap.Bool("wait-for-results", params.WaitForResult)) testID := test.GetId() testRun, err := a.getTestRun(ctx, testID, runID) @@ -381,14 +381,14 @@ func (a runTestAction) testRun(ctx context.Context, test openapi.Test, runID str } if params.WaitForResult { - updatedTestRun, err := a.waitForTestResult(ctx, testID, testRun.GetId()) + updatedTestRun, err := a.waitForTestResult(ctx, testID, getTestRunID(testRun)) if err != nil { return fmt.Errorf("could not wait for result: %w", err) } testRun = updatedTestRun - if err := a.saveJUnitFile(ctx, testID, testRun.GetId(), params.JunitFile); err != nil { + if err := a.saveJUnitFile(ctx, testID, getTestRunID(testRun), params.JunitFile); err != nil { return fmt.Errorf("could not save junit file: %w", err) } } @@ -412,8 +412,7 @@ func (a runTestAction) testRun(ctx context.Context, test openapi.Test, runID str return nil } -func (a runTestAction) transactionRun(ctx context.Context, transaction openapi.Transaction, runID string, params runDefParams) error { - rid, _ := strconv.Atoi(runID) +func (a runTestAction) transactionRun(ctx context.Context, transaction openapi.Transaction, rid int32, params runDefParams) error { a.logger.Debug("run transaction", zap.Bool("wait-for-results", params.WaitForResult)) transactionID := transaction.GetId() transactionRun, err := a.getTransactionRun(ctx, transactionID, int32(rid)) @@ -457,7 +456,7 @@ func (a runTestAction) transactionRun(ctx context.Context, transaction openapi.T return nil } -func (a runTestAction) saveJUnitFile(ctx context.Context, testId, testRunId, outputFile string) error { +func (a runTestAction) saveJUnitFile(ctx context.Context, testId string, testRunId int32, outputFile string) error { if outputFile == "" { return nil } @@ -479,7 +478,7 @@ func (a runTestAction) saveJUnitFile(ctx context.Context, testId, testRunId, out } -func (a runTestAction) getTestRun(ctx context.Context, testID, runID string) (openapi.TestRun, error) { +func (a runTestAction) getTestRun(ctx context.Context, testID string, runID int32) (openapi.TestRun, error) { run, _, err := a.client.ApiApi. GetTestRun(ctx, testID, runID). Execute() @@ -501,7 +500,7 @@ func (a runTestAction) getTransactionRun(ctx context.Context, transactionID stri return *run, nil } -func (a runTestAction) waitForTestResult(ctx context.Context, testID, testRunID string) (openapi.TestRun, error) { +func (a runTestAction) waitForTestResult(ctx context.Context, testID string, testRunID int32) (openapi.TestRun, error) { var ( testRun openapi.TestRun lastError error @@ -573,7 +572,7 @@ func (a runTestAction) waitForTransactionResult(ctx context.Context, transaction return transactionRun, nil } -func (a runTestAction) isTestReady(ctx context.Context, testID, testRunId string) (*openapi.TestRun, error) { +func (a runTestAction) isTestReady(ctx context.Context, testID string, testRunId int32) (*openapi.TestRun, error) { req := a.client.ApiApi.GetTestRun(ctx, testID, testRunId) run, _, err := a.client.ApiApi.GetTestRunExecute(req) if err != nil { @@ -626,3 +625,12 @@ func (a runTestAction) getMetadata() map[string]string { return metadata } + +func getTestRunIDFromString(testRunIDAsString string) int32 { + testRunID, _ := strconv.Atoi(testRunIDAsString) + return int32(testRunID) +} + +func getTestRunID(testRun openapi.TestRun) int32 { + return getTestRunIDFromString(testRun.GetId()) +} diff --git a/cli/cmd/config.go b/cli/cmd/config.go index 87390468a2..e83e470e38 100644 --- a/cli/cmd/config.go +++ b/cli/cmd/config.go @@ -58,6 +58,10 @@ func setupCommand(options ...setupOption) func(cmd *cobra.Command, args []string demoActions := actions.NewDemoActions(demoOptions...) resourceRegistry.Register(demoActions) + dataStoreOptions := append(baseOptions, actions.WithClient(utils.GetResourceAPIClient("datastores", cliConfig))) + dataStoreActions := actions.NewDataStoreActions(dataStoreOptions...) + resourceRegistry.Register(dataStoreActions) + if config.shouldValidateConfig { validateConfig(cmd, args) } diff --git a/cli/cmd/datastore_apply_cmd.go b/cli/cmd/datastore_apply_cmd.go deleted file mode 100644 index 6d74aef6ca..0000000000 --- a/cli/cmd/datastore_apply_cmd.go +++ /dev/null @@ -1,46 +0,0 @@ -package cmd - -import ( - "context" - "os" - - "github.com/kubeshop/tracetest/cli/actions" - "github.com/kubeshop/tracetest/cli/analytics" - "github.com/kubeshop/tracetest/cli/utils" - "github.com/spf13/cobra" - "go.uber.org/zap" -) - -var dataStoreApplyFile string - -var dataStoreApplyCmd = &cobra.Command{ - Use: "apply", - Short: "Apply (create/update) data store configuration to your Tracetest server", - Long: "Apply (create/update) data store configuration to your Tracetest server", - PreRun: setupCommand(), - Run: func(cmd *cobra.Command, args []string) { - analytics.Track("Datastore Apply", "cmd", map[string]string{}) - - ctx := context.Background() - client := utils.GetAPIClient(cliConfig) - - applyDataStoreAction := actions.NewApplyDataStoreAction(cliLogger, client) - actionArgs := actions.ApplyDataStoreConfig{ - File: dataStoreApplyFile, - } - - err := applyDataStoreAction.Run(ctx, actionArgs) - if err != nil { - cliLogger.Error("failed to run test", zap.Error(err)) - os.Exit(1) - return - } - - }, - PostRun: teardownCommand, -} - -func init() { - dataStoreApplyCmd.PersistentFlags().StringVarP(&dataStoreApplyFile, "file", "f", "", "file containing the data store configuration") - dataStoreCmd.AddCommand(dataStoreApplyCmd) -} diff --git a/cli/cmd/datastore_cmd.go b/cli/cmd/datastore_cmd.go deleted file mode 100644 index 1c880dc850..0000000000 --- a/cli/cmd/datastore_cmd.go +++ /dev/null @@ -1,21 +0,0 @@ -package cmd - -import ( - "github.com/spf13/cobra" -) - -var dataStoreCmd = &cobra.Command{ - GroupID: cmdGroupConfig.ID, - Use: "datastore", - Short: "Manage your tracetest data stores", - Long: "Manage your tracetest data stores", - PreRun: setupCommand(), - Run: func(cmd *cobra.Command, args []string) { - cmd.Help() - }, - PostRun: teardownCommand, -} - -func init() { - rootCmd.AddCommand(dataStoreCmd) -} diff --git a/cli/cmd/datastore_export_cmd.go b/cli/cmd/datastore_export_cmd.go deleted file mode 100644 index 63ead6eabe..0000000000 --- a/cli/cmd/datastore_export_cmd.go +++ /dev/null @@ -1,52 +0,0 @@ -package cmd - -import ( - "context" - "os" - - "github.com/kubeshop/tracetest/cli/actions" - "github.com/kubeshop/tracetest/cli/analytics" - "github.com/kubeshop/tracetest/cli/utils" - "github.com/spf13/cobra" - "go.uber.org/zap" -) - -var ( - exportOutputFile string - dataStoreID string -) - -var dataStoreExportCmd = &cobra.Command{ - Use: "export", - Short: "Exports a data store configuration into a file", - Long: "Exports a data store configuration into a file", - PreRun: setupCommand(), - Run: func(cmd *cobra.Command, args []string) { - analytics.Track("Datastore Export", "cmd", map[string]string{}) - - ctx := context.Background() - client := utils.GetAPIClient(cliConfig) - - exportDataStoreAction := actions.NewExportDataStoreAction(cliLogger, client) - actionArgs := actions.ExportDataStoreConfig{ - ID: dataStoreID, - OutputFile: exportOutputFile, - } - - err := exportDataStoreAction.Run(ctx, actionArgs) - if err != nil { - cliLogger.Error("failed to export data store", zap.Error(err)) - os.Exit(1) - return - } - - }, - PostRun: teardownCommand, -} - -func init() { - dataStoreExportCmd.PersistentFlags().StringVarP(&exportOutputFile, "output", "o", "", "file where data store configuration will be saved") - dataStoreExportCmd.PersistentFlags().StringVarP(&dataStoreID, "id", "", "", "id of the data store that will be exported") - - dataStoreCmd.AddCommand(dataStoreExportCmd) -} diff --git a/cli/cmd/datastore_legacy_cmd.go b/cli/cmd/datastore_legacy_cmd.go new file mode 100644 index 0000000000..b6fd481932 --- /dev/null +++ b/cli/cmd/datastore_legacy_cmd.go @@ -0,0 +1,99 @@ +package cmd + +import ( + "fmt" + + "github.com/spf13/cobra" +) + +var ( + // apply param + dataStoreApplyFile string + // export param + exportOutputFile string + dataStoreID string +) + +var dataStoreCmd = &cobra.Command{ + GroupID: cmdGroupConfig.ID, + Use: "datastore", + Short: "Manage your tracetest data stores", + Long: "Manage your tracetest data stores", + PreRun: setupCommand(), + Run: func(cmd *cobra.Command, args []string) { + fmt.Println("Warning! This is a deprecated command and it will be removed on Tracetest future versions!") + fmt.Println("Please use `tracetest (apply|delete|export|get) datastore` commands instead.") + fmt.Println("") + + cmd.Help() + }, + PostRun: teardownCommand, +} + +var dataStoreApplyCmd = &cobra.Command{ + Use: "apply", + Short: "Apply (create/update) data store configuration to your Tracetest server", + Long: "Apply (create/update) data store configuration to your Tracetest server", + PreRun: setupCommand(), + Run: func(cmd *cobra.Command, args []string) { + fmt.Println("Warning! This is a deprecated command and it will be removed on Tracetest future versions!") + fmt.Println("Please use `tracetest apply datastore --file [path]` command instead.") + fmt.Println("") + + // call new apply command + definitionFile = dataStoreApplyFile + applyCmd.Run(applyCmd, []string{"datastore"}) + }, + PostRun: teardownCommand, +} + +var dataStoreExportCmd = &cobra.Command{ + Use: "export", + Short: "Exports a data store configuration into a file", + Long: "Exports a data store configuration into a file", + PreRun: setupCommand(), + Run: func(cmd *cobra.Command, args []string) { + fmt.Println("Warning! This is a deprecated command and it will be removed on Tracetest future versions!") + fmt.Println("Please use `tracetest export datastore --id current` command instead.") + fmt.Println("") + + // call new export command + exportResourceID = "current" + exportResourceFile = exportOutputFile + exportCmd.Run(exportCmd, []string{"datastore"}) + }, + PostRun: teardownCommand, +} + +var dataStoreListCmd = &cobra.Command{ + Use: "list", + Short: "List data store configurations to your tracetest server", + Long: "List data store configurations to your tracetest server", + PreRun: setupCommand(), + Run: func(cmd *cobra.Command, args []string) { + fmt.Println("Warning! This is a deprecated command and it will be removed on Tracetest future versions!") + fmt.Println("Please use `tracetest get datastore --id current` command instead.") + fmt.Println("") + + // call new get command + resourceID = "current" + getCmd.Run(getCmd, []string{"datastore"}) + }, + PostRun: teardownCommand, +} + +func init() { + rootCmd.AddCommand(dataStoreCmd) + + // apply + dataStoreApplyCmd.PersistentFlags().StringVarP(&dataStoreApplyFile, "file", "f", "", "file containing the data store configuration") + dataStoreCmd.AddCommand(dataStoreApplyCmd) + + // export + dataStoreExportCmd.PersistentFlags().StringVarP(&exportOutputFile, "output", "o", "", "file where data store configuration will be saved") + dataStoreExportCmd.PersistentFlags().StringVarP(&dataStoreID, "id", "", "", "id of the data store that will be exported") + dataStoreCmd.AddCommand(dataStoreExportCmd) + + // list + dataStoreCmd.AddCommand(dataStoreListCmd) +} diff --git a/cli/cmd/datastore_list_cmd.go b/cli/cmd/datastore_list_cmd.go deleted file mode 100644 index 97c8cb689f..0000000000 --- a/cli/cmd/datastore_list_cmd.go +++ /dev/null @@ -1,41 +0,0 @@ -package cmd - -import ( - "context" - "os" - - "github.com/kubeshop/tracetest/cli/actions" - "github.com/kubeshop/tracetest/cli/analytics" - "github.com/kubeshop/tracetest/cli/utils" - "github.com/spf13/cobra" - "go.uber.org/zap" -) - -var dataStoreListCmd = &cobra.Command{ - Use: "list", - Short: "List data store configurations to your tracetest server", - Long: "List data store configurations to your tracetest server", - PreRun: setupCommand(), - Run: func(cmd *cobra.Command, args []string) { - analytics.Track("Datastore List", "cmd", map[string]string{}) - - ctx := context.Background() - client := utils.GetAPIClient(cliConfig) - - applyDataStoreAction := actions.NewListDataStoreAction(cliConfig, cliLogger, client) - actionArgs := actions.ListDataStoreConfig{} - - err := applyDataStoreAction.Run(ctx, actionArgs) - if err != nil { - cliLogger.Error("failed to list data stores", zap.Error(err)) - os.Exit(1) - return - } - - }, - PostRun: teardownCommand, -} - -func init() { - dataStoreCmd.AddCommand(dataStoreListCmd) -} diff --git a/cli/cmd/list_cmd.go b/cli/cmd/list_cmd.go index f414cafd8a..2f3a0a3489 100644 --- a/cli/cmd/list_cmd.go +++ b/cli/cmd/list_cmd.go @@ -5,8 +5,8 @@ import ( "fmt" "os" - "github.com/kubeshop/tracetest/cli/actions" "github.com/kubeshop/tracetest/cli/analytics" + "github.com/kubeshop/tracetest/cli/utils" "github.com/spf13/cobra" "go.uber.org/zap" ) @@ -41,7 +41,7 @@ var listCmd = &cobra.Command{ return } - listArgs := actions.ListArgs{ + listArgs := utils.ListArgs{ Take: listTake, Skip: listSkip, SortDirection: listSortDirection, diff --git a/cli/openapi/api_api.go b/cli/openapi/api_api.go index 67348558f5..750301f2d4 100644 --- a/cli/openapi/api_api.go +++ b/cli/openapi/api_api.go @@ -22,114 +22,6 @@ import ( // ApiApiService ApiApi service type ApiApiService service -type ApiCreateDataStoreRequest struct { - ctx context.Context - ApiService *ApiApiService - dataStore *DataStore -} - -func (r ApiCreateDataStoreRequest) DataStore(dataStore DataStore) ApiCreateDataStoreRequest { - r.dataStore = &dataStore - return r -} - -func (r ApiCreateDataStoreRequest) Execute() (*DataStore, *http.Response, error) { - return r.ApiService.CreateDataStoreExecute(r) -} - -/* -CreateDataStore Create a new Data Store - -Create a new Data Store - - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @return ApiCreateDataStoreRequest -*/ -func (a *ApiApiService) CreateDataStore(ctx context.Context) ApiCreateDataStoreRequest { - return ApiCreateDataStoreRequest{ - ApiService: a, - ctx: ctx, - } -} - -// Execute executes the request -// -// @return DataStore -func (a *ApiApiService) CreateDataStoreExecute(r ApiCreateDataStoreRequest) (*DataStore, *http.Response, error) { - var ( - localVarHTTPMethod = http.MethodPost - localVarPostBody interface{} - formFiles []formFile - localVarReturnValue *DataStore - ) - - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ApiApiService.CreateDataStore") - if err != nil { - return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} - } - - localVarPath := localBasePath + "/datastores" - - localVarHeaderParams := make(map[string]string) - localVarQueryParams := url.Values{} - localVarFormParams := url.Values{} - - // to determine the Content-Type header - localVarHTTPContentTypes := []string{"application/json"} - - // set Content-Type header - localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) - if localVarHTTPContentType != "" { - localVarHeaderParams["Content-Type"] = localVarHTTPContentType - } - - // to determine the Accept header - localVarHTTPHeaderAccepts := []string{"application/json"} - - // set Accept header - localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) - if localVarHTTPHeaderAccept != "" { - localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept - } - // body params - localVarPostBody = r.dataStore - req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) - if err != nil { - return localVarReturnValue, nil, err - } - - localVarHTTPResponse, err := a.client.callAPI(req) - if err != nil || localVarHTTPResponse == nil { - return localVarReturnValue, localVarHTTPResponse, err - } - - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) - localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) - if err != nil { - return localVarReturnValue, localVarHTTPResponse, err - } - - if localVarHTTPResponse.StatusCode >= 300 { - newErr := &GenericOpenAPIError{ - body: localVarBody, - error: localVarHTTPResponse.Status, - } - return localVarReturnValue, localVarHTTPResponse, newErr - } - - err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) - if err != nil { - newErr := &GenericOpenAPIError{ - body: localVarBody, - error: err.Error(), - } - return localVarReturnValue, localVarHTTPResponse, newErr - } - - return localVarReturnValue, localVarHTTPResponse, nil -} - type ApiCreateEnvironmentRequest struct { ctx context.Context ApiService *ApiApiService @@ -454,98 +346,6 @@ func (a *ApiApiService) CreateTransactionExecute(r ApiCreateTransactionRequest) return localVarReturnValue, localVarHTTPResponse, nil } -type ApiDeleteDataStoreRequest struct { - ctx context.Context - ApiService *ApiApiService - dataStoreId string -} - -func (r ApiDeleteDataStoreRequest) Execute() (*http.Response, error) { - return r.ApiService.DeleteDataStoreExecute(r) -} - -/* -DeleteDataStore Delete a Data Store - -Delete a Data Store - - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param dataStoreId - @return ApiDeleteDataStoreRequest -*/ -func (a *ApiApiService) DeleteDataStore(ctx context.Context, dataStoreId string) ApiDeleteDataStoreRequest { - return ApiDeleteDataStoreRequest{ - ApiService: a, - ctx: ctx, - dataStoreId: dataStoreId, - } -} - -// Execute executes the request -func (a *ApiApiService) DeleteDataStoreExecute(r ApiDeleteDataStoreRequest) (*http.Response, error) { - var ( - localVarHTTPMethod = http.MethodDelete - localVarPostBody interface{} - formFiles []formFile - ) - - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ApiApiService.DeleteDataStore") - if err != nil { - return nil, &GenericOpenAPIError{error: err.Error()} - } - - localVarPath := localBasePath + "/datastores/{dataStoreId}" - localVarPath = strings.Replace(localVarPath, "{"+"dataStoreId"+"}", url.PathEscape(parameterValueToString(r.dataStoreId, "dataStoreId")), -1) - - localVarHeaderParams := make(map[string]string) - localVarQueryParams := url.Values{} - localVarFormParams := url.Values{} - - // to determine the Content-Type header - localVarHTTPContentTypes := []string{} - - // set Content-Type header - localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) - if localVarHTTPContentType != "" { - localVarHeaderParams["Content-Type"] = localVarHTTPContentType - } - - // to determine the Accept header - localVarHTTPHeaderAccepts := []string{} - - // set Accept header - localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) - if localVarHTTPHeaderAccept != "" { - localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept - } - req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) - if err != nil { - return nil, err - } - - localVarHTTPResponse, err := a.client.callAPI(req) - if err != nil || localVarHTTPResponse == nil { - return localVarHTTPResponse, err - } - - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) - localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) - if err != nil { - return localVarHTTPResponse, err - } - - if localVarHTTPResponse.StatusCode >= 300 { - newErr := &GenericOpenAPIError{ - body: localVarBody, - error: localVarHTTPResponse.Status, - } - return localVarHTTPResponse, newErr - } - - return localVarHTTPResponse, nil -} - type ApiDeleteEnvironmentRequest struct { ctx context.Context ApiService *ApiApiService @@ -654,7 +454,7 @@ DeleteTest delete a test delete a test @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId + @param testId id of the test @return ApiDeleteTestRequest */ func (a *ApiApiService) DeleteTest(ctx context.Context, testId string) ApiDeleteTestRequest { @@ -734,7 +534,7 @@ type ApiDeleteTestRunRequest struct { ctx context.Context ApiService *ApiApiService testId string - runId string + runId int32 } func (r ApiDeleteTestRunRequest) Execute() (*http.Response, error) { @@ -747,11 +547,11 @@ DeleteTestRun delete a test run delete a test run @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId - @param runId + @param testId id of the test + @param runId id of the run @return ApiDeleteTestRunRequest */ -func (a *ApiApiService) DeleteTestRun(ctx context.Context, testId string, runId string) ApiDeleteTestRunRequest { +func (a *ApiApiService) DeleteTestRun(ctx context.Context, testId string, runId int32) ApiDeleteTestRunRequest { return ApiDeleteTestRunRequest{ ApiService: a, ctx: ctx, @@ -842,7 +642,7 @@ DeleteTransaction delete a transaction delete a transaction @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param transactionId + @param transactionId id of the transaction @return ApiDeleteTransactionRequest */ func (a *ApiApiService) DeleteTransaction(ctx context.Context, transactionId string) ApiDeleteTransactionRequest { @@ -935,8 +735,8 @@ DeleteTransactionRun Delete a specific run from a particular transaction Delete a specific run from a particular transaction @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param transactionId - @param runId + @param transactionId id of the transaction + @param runId id of the run @return ApiDeleteTransactionRunRequest */ func (a *ApiApiService) DeleteTransactionRun(ctx context.Context, transactionId string, runId int32) ApiDeleteTransactionRunRequest { @@ -1008,391 +808,67 @@ func (a *ApiApiService) DeleteTransactionRunExecute(r ApiDeleteTransactionRunReq body: localVarBody, error: localVarHTTPResponse.Status, } - return localVarHTTPResponse, newErr - } - - return localVarHTTPResponse, nil -} - -type ApiDryRunAssertionRequest struct { - ctx context.Context - ApiService *ApiApiService - testId string - runId string - testSpecs *TestSpecs -} - -func (r ApiDryRunAssertionRequest) TestSpecs(testSpecs TestSpecs) ApiDryRunAssertionRequest { - r.testSpecs = &testSpecs - return r -} - -func (r ApiDryRunAssertionRequest) Execute() (*AssertionResults, *http.Response, error) { - return r.ApiService.DryRunAssertionExecute(r) -} - -/* -DryRunAssertion run given assertions against the traces from the given run without persisting anything - -use this method to test a definition against an actual trace without creating a new version or persisting anything - - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId - @param runId - @return ApiDryRunAssertionRequest -*/ -func (a *ApiApiService) DryRunAssertion(ctx context.Context, testId string, runId string) ApiDryRunAssertionRequest { - return ApiDryRunAssertionRequest{ - ApiService: a, - ctx: ctx, - testId: testId, - runId: runId, - } -} - -// Execute executes the request -// -// @return AssertionResults -func (a *ApiApiService) DryRunAssertionExecute(r ApiDryRunAssertionRequest) (*AssertionResults, *http.Response, error) { - var ( - localVarHTTPMethod = http.MethodPut - localVarPostBody interface{} - formFiles []formFile - localVarReturnValue *AssertionResults - ) - - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ApiApiService.DryRunAssertion") - if err != nil { - return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} - } - - localVarPath := localBasePath + "/tests/{testId}/run/{runId}/dry-run" - localVarPath = strings.Replace(localVarPath, "{"+"testId"+"}", url.PathEscape(parameterValueToString(r.testId, "testId")), -1) - localVarPath = strings.Replace(localVarPath, "{"+"runId"+"}", url.PathEscape(parameterValueToString(r.runId, "runId")), -1) - - localVarHeaderParams := make(map[string]string) - localVarQueryParams := url.Values{} - localVarFormParams := url.Values{} - - // to determine the Content-Type header - localVarHTTPContentTypes := []string{"application/json"} - - // set Content-Type header - localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) - if localVarHTTPContentType != "" { - localVarHeaderParams["Content-Type"] = localVarHTTPContentType - } - - // to determine the Accept header - localVarHTTPHeaderAccepts := []string{"application/json"} - - // set Accept header - localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) - if localVarHTTPHeaderAccept != "" { - localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept - } - // body params - localVarPostBody = r.testSpecs - req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) - if err != nil { - return localVarReturnValue, nil, err - } - - localVarHTTPResponse, err := a.client.callAPI(req) - if err != nil || localVarHTTPResponse == nil { - return localVarReturnValue, localVarHTTPResponse, err - } - - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) - localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) - if err != nil { - return localVarReturnValue, localVarHTTPResponse, err - } - - if localVarHTTPResponse.StatusCode >= 300 { - newErr := &GenericOpenAPIError{ - body: localVarBody, - error: localVarHTTPResponse.Status, - } - return localVarReturnValue, localVarHTTPResponse, newErr - } - - err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) - if err != nil { - newErr := &GenericOpenAPIError{ - body: localVarBody, - error: err.Error(), - } - return localVarReturnValue, localVarHTTPResponse, newErr - } - - return localVarReturnValue, localVarHTTPResponse, nil -} - -type ApiExecuteDefinitionRequest struct { - ctx context.Context - ApiService *ApiApiService - textDefinition *TextDefinition -} - -func (r ApiExecuteDefinitionRequest) TextDefinition(textDefinition TextDefinition) ApiExecuteDefinitionRequest { - r.textDefinition = &textDefinition - return r -} - -func (r ApiExecuteDefinitionRequest) Execute() (*ExecuteDefinitionResponse, *http.Response, error) { - return r.ApiService.ExecuteDefinitionExecute(r) -} - -/* -ExecuteDefinition Execute a definition - -Execute a definition - - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @return ApiExecuteDefinitionRequest -*/ -func (a *ApiApiService) ExecuteDefinition(ctx context.Context) ApiExecuteDefinitionRequest { - return ApiExecuteDefinitionRequest{ - ApiService: a, - ctx: ctx, - } -} - -// Execute executes the request -// -// @return ExecuteDefinitionResponse -func (a *ApiApiService) ExecuteDefinitionExecute(r ApiExecuteDefinitionRequest) (*ExecuteDefinitionResponse, *http.Response, error) { - var ( - localVarHTTPMethod = http.MethodPost - localVarPostBody interface{} - formFiles []formFile - localVarReturnValue *ExecuteDefinitionResponse - ) - - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ApiApiService.ExecuteDefinition") - if err != nil { - return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} - } - - localVarPath := localBasePath + "/definition.yaml" - - localVarHeaderParams := make(map[string]string) - localVarQueryParams := url.Values{} - localVarFormParams := url.Values{} - - // to determine the Content-Type header - localVarHTTPContentTypes := []string{"text/json"} - - // set Content-Type header - localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) - if localVarHTTPContentType != "" { - localVarHeaderParams["Content-Type"] = localVarHTTPContentType - } - - // to determine the Accept header - localVarHTTPHeaderAccepts := []string{"application/json"} - - // set Accept header - localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) - if localVarHTTPHeaderAccept != "" { - localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept - } - // body params - localVarPostBody = r.textDefinition - req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) - if err != nil { - return localVarReturnValue, nil, err - } - - localVarHTTPResponse, err := a.client.callAPI(req) - if err != nil || localVarHTTPResponse == nil { - return localVarReturnValue, localVarHTTPResponse, err - } - - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) - localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) - if err != nil { - return localVarReturnValue, localVarHTTPResponse, err - } - - if localVarHTTPResponse.StatusCode >= 300 { - newErr := &GenericOpenAPIError{ - body: localVarBody, - error: localVarHTTPResponse.Status, - } - return localVarReturnValue, localVarHTTPResponse, newErr - } - - err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) - if err != nil { - newErr := &GenericOpenAPIError{ - body: localVarBody, - error: err.Error(), - } - return localVarReturnValue, localVarHTTPResponse, newErr - } - - return localVarReturnValue, localVarHTTPResponse, nil -} - -type ApiExportTestRunRequest struct { - ctx context.Context - ApiService *ApiApiService - testId string - runId string -} - -func (r ApiExportTestRunRequest) Execute() (*ExportedTestInformation, *http.Response, error) { - return r.ApiService.ExportTestRunExecute(r) -} - -/* -ExportTestRun export test and test run information - -export test and test run information for debugging - - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId - @param runId - @return ApiExportTestRunRequest -*/ -func (a *ApiApiService) ExportTestRun(ctx context.Context, testId string, runId string) ApiExportTestRunRequest { - return ApiExportTestRunRequest{ - ApiService: a, - ctx: ctx, - testId: testId, - runId: runId, - } -} - -// Execute executes the request -// -// @return ExportedTestInformation -func (a *ApiApiService) ExportTestRunExecute(r ApiExportTestRunRequest) (*ExportedTestInformation, *http.Response, error) { - var ( - localVarHTTPMethod = http.MethodGet - localVarPostBody interface{} - formFiles []formFile - localVarReturnValue *ExportedTestInformation - ) - - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ApiApiService.ExportTestRun") - if err != nil { - return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} - } - - localVarPath := localBasePath + "/tests/{testId}/run/{runId}/export" - localVarPath = strings.Replace(localVarPath, "{"+"testId"+"}", url.PathEscape(parameterValueToString(r.testId, "testId")), -1) - localVarPath = strings.Replace(localVarPath, "{"+"runId"+"}", url.PathEscape(parameterValueToString(r.runId, "runId")), -1) - - localVarHeaderParams := make(map[string]string) - localVarQueryParams := url.Values{} - localVarFormParams := url.Values{} - - // to determine the Content-Type header - localVarHTTPContentTypes := []string{} - - // set Content-Type header - localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) - if localVarHTTPContentType != "" { - localVarHeaderParams["Content-Type"] = localVarHTTPContentType - } - - // to determine the Accept header - localVarHTTPHeaderAccepts := []string{"application/json"} - - // set Accept header - localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) - if localVarHTTPHeaderAccept != "" { - localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept - } - req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) - if err != nil { - return localVarReturnValue, nil, err - } - - localVarHTTPResponse, err := a.client.callAPI(req) - if err != nil || localVarHTTPResponse == nil { - return localVarReturnValue, localVarHTTPResponse, err - } - - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) - localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) - if err != nil { - return localVarReturnValue, localVarHTTPResponse, err - } - - if localVarHTTPResponse.StatusCode >= 300 { - newErr := &GenericOpenAPIError{ - body: localVarBody, - error: localVarHTTPResponse.Status, - } - return localVarReturnValue, localVarHTTPResponse, newErr - } - - err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) - if err != nil { - newErr := &GenericOpenAPIError{ - body: localVarBody, - error: err.Error(), - } - return localVarReturnValue, localVarHTTPResponse, newErr + return localVarHTTPResponse, newErr } - return localVarReturnValue, localVarHTTPResponse, nil + return localVarHTTPResponse, nil } -type ApiExpressionResolveRequest struct { - ctx context.Context - ApiService *ApiApiService - resolveRequestInfo *ResolveRequestInfo +type ApiDryRunAssertionRequest struct { + ctx context.Context + ApiService *ApiApiService + testId string + runId int32 + testSpecs *TestSpecs } -func (r ApiExpressionResolveRequest) ResolveRequestInfo(resolveRequestInfo ResolveRequestInfo) ApiExpressionResolveRequest { - r.resolveRequestInfo = &resolveRequestInfo +func (r ApiDryRunAssertionRequest) TestSpecs(testSpecs TestSpecs) ApiDryRunAssertionRequest { + r.testSpecs = &testSpecs return r } -func (r ApiExpressionResolveRequest) Execute() (*ResolveResponseInfo, *http.Response, error) { - return r.ApiService.ExpressionResolveExecute(r) +func (r ApiDryRunAssertionRequest) Execute() (*AssertionResults, *http.Response, error) { + return r.ApiService.DryRunAssertionExecute(r) } /* -ExpressionResolve resolves an expression and returns the result string +DryRunAssertion run given assertions against the traces from the given run without persisting anything -resolves an expression and returns the result string +use this method to test a definition against an actual trace without creating a new version or persisting anything @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @return ApiExpressionResolveRequest + @param testId id of the test + @param runId id of the run + @return ApiDryRunAssertionRequest */ -func (a *ApiApiService) ExpressionResolve(ctx context.Context) ApiExpressionResolveRequest { - return ApiExpressionResolveRequest{ +func (a *ApiApiService) DryRunAssertion(ctx context.Context, testId string, runId int32) ApiDryRunAssertionRequest { + return ApiDryRunAssertionRequest{ ApiService: a, ctx: ctx, + testId: testId, + runId: runId, } } // Execute executes the request // -// @return ResolveResponseInfo -func (a *ApiApiService) ExpressionResolveExecute(r ApiExpressionResolveRequest) (*ResolveResponseInfo, *http.Response, error) { +// @return AssertionResults +func (a *ApiApiService) DryRunAssertionExecute(r ApiDryRunAssertionRequest) (*AssertionResults, *http.Response, error) { var ( - localVarHTTPMethod = http.MethodPost + localVarHTTPMethod = http.MethodPut localVarPostBody interface{} formFiles []formFile - localVarReturnValue *ResolveResponseInfo + localVarReturnValue *AssertionResults ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ApiApiService.ExpressionResolve") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ApiApiService.DryRunAssertion") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } - localVarPath := localBasePath + "/expressions/resolve" + localVarPath := localBasePath + "/tests/{testId}/run/{runId}/dry-run" + localVarPath = strings.Replace(localVarPath, "{"+"testId"+"}", url.PathEscape(parameterValueToString(r.testId, "testId")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"runId"+"}", url.PathEscape(parameterValueToString(r.runId, "runId")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -1416,7 +892,7 @@ func (a *ApiApiService) ExpressionResolveExecute(r ApiExpressionResolveRequest) localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept } // body params - localVarPostBody = r.resolveRequestInfo + localVarPostBody = r.testSpecs req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) if err != nil { return localVarReturnValue, nil, err @@ -1454,58 +930,60 @@ func (a *ApiApiService) ExpressionResolveExecute(r ApiExpressionResolveRequest) return localVarReturnValue, localVarHTTPResponse, nil } -type ApiGetDataStoreRequest struct { - ctx context.Context - ApiService *ApiApiService - dataStoreId string +type ApiExecuteDefinitionRequest struct { + ctx context.Context + ApiService *ApiApiService + textDefinition *TextDefinition +} + +func (r ApiExecuteDefinitionRequest) TextDefinition(textDefinition TextDefinition) ApiExecuteDefinitionRequest { + r.textDefinition = &textDefinition + return r } -func (r ApiGetDataStoreRequest) Execute() (*DataStore, *http.Response, error) { - return r.ApiService.GetDataStoreExecute(r) +func (r ApiExecuteDefinitionRequest) Execute() (*ExecuteDefinitionResponse, *http.Response, error) { + return r.ApiService.ExecuteDefinitionExecute(r) } /* -GetDataStore Get a Data Store +ExecuteDefinition Execute a definition -Get a Data Store +Execute a definition @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param dataStoreId - @return ApiGetDataStoreRequest + @return ApiExecuteDefinitionRequest */ -func (a *ApiApiService) GetDataStore(ctx context.Context, dataStoreId string) ApiGetDataStoreRequest { - return ApiGetDataStoreRequest{ - ApiService: a, - ctx: ctx, - dataStoreId: dataStoreId, +func (a *ApiApiService) ExecuteDefinition(ctx context.Context) ApiExecuteDefinitionRequest { + return ApiExecuteDefinitionRequest{ + ApiService: a, + ctx: ctx, } } // Execute executes the request // -// @return DataStore -func (a *ApiApiService) GetDataStoreExecute(r ApiGetDataStoreRequest) (*DataStore, *http.Response, error) { +// @return ExecuteDefinitionResponse +func (a *ApiApiService) ExecuteDefinitionExecute(r ApiExecuteDefinitionRequest) (*ExecuteDefinitionResponse, *http.Response, error) { var ( - localVarHTTPMethod = http.MethodGet + localVarHTTPMethod = http.MethodPost localVarPostBody interface{} formFiles []formFile - localVarReturnValue *DataStore + localVarReturnValue *ExecuteDefinitionResponse ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ApiApiService.GetDataStore") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ApiApiService.ExecuteDefinition") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } - localVarPath := localBasePath + "/datastores/{dataStoreId}" - localVarPath = strings.Replace(localVarPath, "{"+"dataStoreId"+"}", url.PathEscape(parameterValueToString(r.dataStoreId, "dataStoreId")), -1) + localVarPath := localBasePath + "/definition.yaml" localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} // to determine the Content-Type header - localVarHTTPContentTypes := []string{} + localVarHTTPContentTypes := []string{"text/json"} // set Content-Type header localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) @@ -1521,6 +999,8 @@ func (a *ApiApiService) GetDataStoreExecute(r ApiGetDataStoreRequest) (*DataStor if localVarHTTPHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept } + // body params + localVarPostBody = r.textDefinition req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) if err != nil { return localVarReturnValue, nil, err @@ -1558,51 +1038,55 @@ func (a *ApiApiService) GetDataStoreExecute(r ApiGetDataStoreRequest) (*DataStor return localVarReturnValue, localVarHTTPResponse, nil } -type ApiGetDataStoreDefinitionFileRequest struct { - ctx context.Context - ApiService *ApiApiService - dataStoreId string +type ApiExportTestRunRequest struct { + ctx context.Context + ApiService *ApiApiService + testId string + runId int32 } -func (r ApiGetDataStoreDefinitionFileRequest) Execute() (string, *http.Response, error) { - return r.ApiService.GetDataStoreDefinitionFileExecute(r) +func (r ApiExportTestRunRequest) Execute() (*ExportedTestInformation, *http.Response, error) { + return r.ApiService.ExportTestRunExecute(r) } /* -GetDataStoreDefinitionFile Get the data store definition as an YAML file +ExportTestRun export test and test run information -Get the data store as an YAML file +export test and test run information for debugging @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param dataStoreId - @return ApiGetDataStoreDefinitionFileRequest + @param testId id of the test + @param runId id of the run + @return ApiExportTestRunRequest */ -func (a *ApiApiService) GetDataStoreDefinitionFile(ctx context.Context, dataStoreId string) ApiGetDataStoreDefinitionFileRequest { - return ApiGetDataStoreDefinitionFileRequest{ - ApiService: a, - ctx: ctx, - dataStoreId: dataStoreId, +func (a *ApiApiService) ExportTestRun(ctx context.Context, testId string, runId int32) ApiExportTestRunRequest { + return ApiExportTestRunRequest{ + ApiService: a, + ctx: ctx, + testId: testId, + runId: runId, } } // Execute executes the request // -// @return string -func (a *ApiApiService) GetDataStoreDefinitionFileExecute(r ApiGetDataStoreDefinitionFileRequest) (string, *http.Response, error) { +// @return ExportedTestInformation +func (a *ApiApiService) ExportTestRunExecute(r ApiExportTestRunRequest) (*ExportedTestInformation, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} formFiles []formFile - localVarReturnValue string + localVarReturnValue *ExportedTestInformation ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ApiApiService.GetDataStoreDefinitionFile") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ApiApiService.ExportTestRun") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } - localVarPath := localBasePath + "/datastores/{dataStoreId}/definition.yaml" - localVarPath = strings.Replace(localVarPath, "{"+"dataStoreId"+"}", url.PathEscape(parameterValueToString(r.dataStoreId, "dataStoreId")), -1) + localVarPath := localBasePath + "/tests/{testId}/run/{runId}/export" + localVarPath = strings.Replace(localVarPath, "{"+"testId"+"}", url.PathEscape(parameterValueToString(r.testId, "testId")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"runId"+"}", url.PathEscape(parameterValueToString(r.runId, "runId")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -1618,7 +1102,7 @@ func (a *ApiApiService) GetDataStoreDefinitionFileExecute(r ApiGetDataStoreDefin } // to determine the Accept header - localVarHTTPHeaderAccepts := []string{"application/yaml"} + localVarHTTPHeaderAccepts := []string{"application/json"} // set Accept header localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) @@ -1662,60 +1146,31 @@ func (a *ApiApiService) GetDataStoreDefinitionFileExecute(r ApiGetDataStoreDefin return localVarReturnValue, localVarHTTPResponse, nil } -type ApiGetDataStoresRequest struct { - ctx context.Context - ApiService *ApiApiService - take *int32 - skip *int32 - query *string - sortBy *string - sortDirection *string -} - -// indicates how many data stores can be returned by each page -func (r ApiGetDataStoresRequest) Take(take int32) ApiGetDataStoresRequest { - r.take = &take - return r -} - -// indicates how many data stores will be skipped when paginating -func (r ApiGetDataStoresRequest) Skip(skip int32) ApiGetDataStoresRequest { - r.skip = &skip - return r -} - -// query to search data stores, based on data store name -func (r ApiGetDataStoresRequest) Query(query string) ApiGetDataStoresRequest { - r.query = &query - return r -} - -// indicates the sort field for the data stores -func (r ApiGetDataStoresRequest) SortBy(sortBy string) ApiGetDataStoresRequest { - r.sortBy = &sortBy - return r +type ApiExpressionResolveRequest struct { + ctx context.Context + ApiService *ApiApiService + resolveRequestInfo *ResolveRequestInfo } -// indicates the sort direction for the data stores -func (r ApiGetDataStoresRequest) SortDirection(sortDirection string) ApiGetDataStoresRequest { - r.sortDirection = &sortDirection +func (r ApiExpressionResolveRequest) ResolveRequestInfo(resolveRequestInfo ResolveRequestInfo) ApiExpressionResolveRequest { + r.resolveRequestInfo = &resolveRequestInfo return r } -func (r ApiGetDataStoresRequest) Execute() ([]DataStore, *http.Response, error) { - return r.ApiService.GetDataStoresExecute(r) +func (r ApiExpressionResolveRequest) Execute() (*ResolveResponseInfo, *http.Response, error) { + return r.ApiService.ExpressionResolveExecute(r) } /* -GetDataStores Get all Data Stores +ExpressionResolve resolves an expression and returns the result string -Get all Data Stores +resolves an expression and returns the result string @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @return ApiGetDataStoresRequest + @return ApiExpressionResolveRequest */ -func (a *ApiApiService) GetDataStores(ctx context.Context) ApiGetDataStoresRequest { - return ApiGetDataStoresRequest{ +func (a *ApiApiService) ExpressionResolve(ctx context.Context) ApiExpressionResolveRequest { + return ApiExpressionResolveRequest{ ApiService: a, ctx: ctx, } @@ -1723,43 +1178,28 @@ func (a *ApiApiService) GetDataStores(ctx context.Context) ApiGetDataStoresReque // Execute executes the request // -// @return []DataStore -func (a *ApiApiService) GetDataStoresExecute(r ApiGetDataStoresRequest) ([]DataStore, *http.Response, error) { +// @return ResolveResponseInfo +func (a *ApiApiService) ExpressionResolveExecute(r ApiExpressionResolveRequest) (*ResolveResponseInfo, *http.Response, error) { var ( - localVarHTTPMethod = http.MethodGet + localVarHTTPMethod = http.MethodPost localVarPostBody interface{} formFiles []formFile - localVarReturnValue []DataStore + localVarReturnValue *ResolveResponseInfo ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ApiApiService.GetDataStores") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ApiApiService.ExpressionResolve") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } - localVarPath := localBasePath + "/datastores" + localVarPath := localBasePath + "/expressions/resolve" localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if r.take != nil { - parameterAddToQuery(localVarQueryParams, "take", r.take, "") - } - if r.skip != nil { - parameterAddToQuery(localVarQueryParams, "skip", r.skip, "") - } - if r.query != nil { - parameterAddToQuery(localVarQueryParams, "query", r.query, "") - } - if r.sortBy != nil { - parameterAddToQuery(localVarQueryParams, "sortBy", r.sortBy, "") - } - if r.sortDirection != nil { - parameterAddToQuery(localVarQueryParams, "sortDirection", r.sortDirection, "") - } // to determine the Content-Type header - localVarHTTPContentTypes := []string{} + localVarHTTPContentTypes := []string{"application/json"} // set Content-Type header localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) @@ -1775,6 +1215,8 @@ func (a *ApiApiService) GetDataStoresExecute(r ApiGetDataStoresRequest) ([]DataS if localVarHTTPHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept } + // body params + localVarPostBody = r.resolveRequestInfo req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) if err != nil { return localVarReturnValue, nil, err @@ -2030,31 +1472,31 @@ type ApiGetEnvironmentsRequest struct { sortDirection *string } -// indicates how many environments can be returned by each page +// indicates how many resources can be returned by each page func (r ApiGetEnvironmentsRequest) Take(take int32) ApiGetEnvironmentsRequest { r.take = &take return r } -// indicates how many environments will be skipped when paginating +// indicates how many resources will be skipped when paginating func (r ApiGetEnvironmentsRequest) Skip(skip int32) ApiGetEnvironmentsRequest { r.skip = &skip return r } -// query to search environments, based on environments name and description +// query to search resources func (r ApiGetEnvironmentsRequest) Query(query string) ApiGetEnvironmentsRequest { r.query = &query return r } -// indicates the sort field for the environments +// indicates the sort field for the resources func (r ApiGetEnvironmentsRequest) SortBy(sortBy string) ApiGetEnvironmentsRequest { r.sortBy = &sortBy return r } -// indicates the sort direction for the environments +// indicates the sort direction for the resources func (r ApiGetEnvironmentsRequest) SortDirection(sortDirection string) ApiGetEnvironmentsRequest { r.sortDirection = &sortDirection return r @@ -2180,31 +1622,31 @@ type ApiGetResourcesRequest struct { sortDirection *string } -// indicates how many transactions can be returned by each page +// indicates how many resources can be returned by each page func (r ApiGetResourcesRequest) Take(take int32) ApiGetResourcesRequest { r.take = &take return r } -// indicates how many transactions will be skipped when paginating +// indicates how many resources will be skipped when paginating func (r ApiGetResourcesRequest) Skip(skip int32) ApiGetResourcesRequest { r.skip = &skip return r } -// query to search transactions, based on transaction name and description +// query to search resources func (r ApiGetResourcesRequest) Query(query string) ApiGetResourcesRequest { r.query = &query return r } -// indicates the sort field for the transactions +// indicates the sort field for the resources func (r ApiGetResourcesRequest) SortBy(sortBy string) ApiGetResourcesRequest { r.sortBy = &sortBy return r } -// indicates the sort direction for the transactions +// indicates the sort direction for the resources func (r ApiGetResourcesRequest) SortDirection(sortDirection string) ApiGetResourcesRequest { r.sortDirection = &sortDirection return r @@ -2324,7 +1766,7 @@ type ApiGetRunResultJUnitRequest struct { ctx context.Context ApiService *ApiApiService testId string - runId string + runId int32 } func (r ApiGetRunResultJUnitRequest) Execute() (string, *http.Response, error) { @@ -2337,11 +1779,11 @@ GetRunResultJUnit get test run results in JUnit xml format get test run results in JUnit xml format @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId - @param runId + @param testId id of the test + @param runId id of the run @return ApiGetRunResultJUnitRequest */ -func (a *ApiApiService) GetRunResultJUnit(ctx context.Context, testId string, runId string) ApiGetRunResultJUnitRequest { +func (a *ApiApiService) GetRunResultJUnit(ctx context.Context, testId string, runId int32) ApiGetRunResultJUnitRequest { return ApiGetRunResultJUnitRequest{ ApiService: a, ctx: ctx, @@ -2444,7 +1886,7 @@ GetTest get test get test @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId + @param testId id of the test @return ApiGetTestRequest */ func (a *ApiApiService) GetTest(ctx context.Context, testId string) ApiGetTestRequest { @@ -2536,10 +1978,11 @@ type ApiGetTestResultSelectedSpansRequest struct { ctx context.Context ApiService *ApiApiService testId string - runId string + runId int32 query *string } +// query to search resources func (r ApiGetTestResultSelectedSpansRequest) Query(query string) ApiGetTestResultSelectedSpansRequest { r.query = &query return r @@ -2555,11 +1998,11 @@ GetTestResultSelectedSpans retrieve spans that will be selected by selector get the spans ids that would be selected by a specific selector query @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId - @param runId + @param testId id of the test + @param runId id of the run @return ApiGetTestResultSelectedSpansRequest */ -func (a *ApiApiService) GetTestResultSelectedSpans(ctx context.Context, testId string, runId string) ApiGetTestResultSelectedSpansRequest { +func (a *ApiApiService) GetTestResultSelectedSpans(ctx context.Context, testId string, runId int32) ApiGetTestResultSelectedSpansRequest { return ApiGetTestResultSelectedSpansRequest{ ApiService: a, ctx: ctx, @@ -2653,7 +2096,7 @@ type ApiGetTestRunRequest struct { ctx context.Context ApiService *ApiApiService testId string - runId string + runId int32 } func (r ApiGetTestRunRequest) Execute() (*TestRun, *http.Response, error) { @@ -2666,11 +2109,11 @@ GetTestRun get test Run get a particular test Run @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId - @param runId + @param testId id of the test + @param runId id of the run @return ApiGetTestRunRequest */ -func (a *ApiApiService) GetTestRun(ctx context.Context, testId string, runId string) ApiGetTestRunRequest { +func (a *ApiApiService) GetTestRun(ctx context.Context, testId string, runId int32) ApiGetTestRunRequest { return ApiGetTestRunRequest{ ApiService: a, ctx: ctx, @@ -2774,8 +2217,8 @@ GetTestRunEvents get events from a test run get events from a test run @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId - @param runId + @param testId id of the test + @param runId id of the run @return ApiGetTestRunEventsRequest */ func (a *ApiApiService) GetTestRunEvents(ctx context.Context, testId string, runId int32) ApiGetTestRunEventsRequest { @@ -2873,13 +2316,13 @@ type ApiGetTestRunsRequest struct { skip *int32 } -// indicates how many results can be returned by each page +// indicates how many resources can be returned by each page func (r ApiGetTestRunsRequest) Take(take int32) ApiGetTestRunsRequest { r.take = &take return r } -// indicates how many results will be skipped when paginating +// indicates how many resources will be skipped when paginating func (r ApiGetTestRunsRequest) Skip(skip int32) ApiGetTestRunsRequest { r.skip = &skip return r @@ -2895,7 +2338,7 @@ GetTestRuns get the runs for a test get the runs from a particular test @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId + @param testId id of the test @return ApiGetTestRunsRequest */ func (a *ApiApiService) GetTestRuns(ctx context.Context, testId string) ApiGetTestRunsRequest { @@ -3005,7 +2448,7 @@ GetTestSpecs Get definition for a test Gets definition for a test @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId + @param testId id of the test @return ApiGetTestSpecsRequest */ func (a *ApiApiService) GetTestSpecs(ctx context.Context, testId string) ApiGetTestSpecsRequest { @@ -3110,8 +2553,8 @@ GetTestVersion get a test specific version get a test specific version @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId - @param version + @param testId id of the test + @param version version of the test @return ApiGetTestVersionRequest */ func (a *ApiApiService) GetTestVersion(ctx context.Context, testId string, version int32) ApiGetTestVersionRequest { @@ -3218,8 +2661,8 @@ GetTestVersionDefinitionFile Get the test definition as an YAML file Get the test definition as an YAML file @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId - @param version + @param testId id of the test + @param version version of the test @return ApiGetTestVersionDefinitionFileRequest */ func (a *ApiApiService) GetTestVersionDefinitionFile(ctx context.Context, testId string, version int32) ApiGetTestVersionDefinitionFileRequest { @@ -3319,31 +2762,31 @@ type ApiGetTestsRequest struct { sortDirection *string } -// indicates how many tests can be returned by each page +// indicates how many resources can be returned by each page func (r ApiGetTestsRequest) Take(take int32) ApiGetTestsRequest { r.take = &take return r } -// indicates how many tests will be skipped when paginating +// indicates how many resources will be skipped when paginating func (r ApiGetTestsRequest) Skip(skip int32) ApiGetTestsRequest { r.skip = &skip return r } -// query to search tests, based on test name and description +// query to search resources func (r ApiGetTestsRequest) Query(query string) ApiGetTestsRequest { r.query = &query return r } -// indicates the sort field for the tests +// indicates the sort field for the resources func (r ApiGetTestsRequest) SortBy(sortBy string) ApiGetTestsRequest { r.sortBy = &sortBy return r } -// indicates the sort direction for the tests +// indicates the sort direction for the resources func (r ApiGetTestsRequest) SortDirection(sortDirection string) ApiGetTestsRequest { r.sortDirection = &sortDirection return r @@ -3475,7 +2918,7 @@ GetTransaction get transaction get transaction @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param transactionId + @param transactionId id of the transaction @return ApiGetTransactionRequest */ func (a *ApiApiService) GetTransaction(ctx context.Context, transactionId string) ApiGetTransactionRequest { @@ -3580,8 +3023,8 @@ GetTransactionRun Get a specific run from a particular transaction Get a specific run from a particular transaction @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param transactionId - @param runId + @param transactionId id of the transaction + @param runId id of the run @return ApiGetTransactionRunRequest */ func (a *ApiApiService) GetTransactionRun(ctx context.Context, transactionId string, runId int32) ApiGetTransactionRunRequest { @@ -3679,13 +3122,13 @@ type ApiGetTransactionRunsRequest struct { skip *int32 } -// indicates how many results can be returned by each page +// indicates how many resources can be returned by each page func (r ApiGetTransactionRunsRequest) Take(take int32) ApiGetTransactionRunsRequest { r.take = &take return r } -// indicates how many results will be skipped when paginating +// indicates how many resources will be skipped when paginating func (r ApiGetTransactionRunsRequest) Skip(skip int32) ApiGetTransactionRunsRequest { r.skip = &skip return r @@ -3701,7 +3144,7 @@ GetTransactionRuns Get all runs from a particular transaction Get all runs from a particular transaction @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param transactionId + @param transactionId id of the transaction @return ApiGetTransactionRunsRequest */ func (a *ApiApiService) GetTransactionRuns(ctx context.Context, transactionId string) ApiGetTransactionRunsRequest { @@ -3812,8 +3255,8 @@ GetTransactionVersion get a transaction specific version get a transaction specific version @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param transactionId - @param version + @param transactionId id of the transaction + @param version version of the test @return ApiGetTransactionVersionRequest */ func (a *ApiApiService) GetTransactionVersion(ctx context.Context, transactionId string, version int32) ApiGetTransactionVersionRequest { @@ -3920,8 +3363,8 @@ GetTransactionVersionDefinitionFile Get the transaction definition as an YAML fi Get the transaction as an YAML file @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param transactionId - @param version + @param transactionId id of the transaction + @param version version of the test @return ApiGetTransactionVersionDefinitionFileRequest */ func (a *ApiApiService) GetTransactionVersionDefinitionFile(ctx context.Context, transactionId string, version int32) ApiGetTransactionVersionDefinitionFileRequest { @@ -4021,31 +3464,31 @@ type ApiGetTransactionsRequest struct { sortDirection *string } -// indicates how many transactions can be returned by each page +// indicates how many resources can be returned by each page func (r ApiGetTransactionsRequest) Take(take int32) ApiGetTransactionsRequest { r.take = &take return r } -// indicates how many transactions will be skipped when paginating +// indicates how many resources will be skipped when paginating func (r ApiGetTransactionsRequest) Skip(skip int32) ApiGetTransactionsRequest { r.skip = &skip return r } -// query to search transactions, based on transaction name and description +// query to search resources func (r ApiGetTransactionsRequest) Query(query string) ApiGetTransactionsRequest { r.query = &query return r } -// indicates the sort field for the transactions +// indicates the sort field for the resources func (r ApiGetTransactionsRequest) SortBy(sortBy string) ApiGetTransactionsRequest { r.sortBy = &sortBy return r } -// indicates the sort direction for the transactions +// indicates the sort direction for the resources func (r ApiGetTransactionsRequest) SortDirection(sortDirection string) ApiGetTransactionsRequest { r.sortDirection = &sortDirection return r @@ -4273,7 +3716,7 @@ type ApiRerunTestRunRequest struct { ctx context.Context ApiService *ApiApiService testId string - runId string + runId int32 } func (r ApiRerunTestRunRequest) Execute() (*TestRun, *http.Response, error) { @@ -4286,11 +3729,11 @@ RerunTestRun rerun a test run rerun a test run @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId - @param runId + @param testId id of the test + @param runId id of the run @return ApiRerunTestRunRequest */ -func (a *ApiApiService) RerunTestRun(ctx context.Context, testId string, runId string) ApiRerunTestRunRequest { +func (a *ApiApiService) RerunTestRun(ctx context.Context, testId string, runId int32) ApiRerunTestRunRequest { return ApiRerunTestRunRequest{ ApiService: a, ctx: ctx, @@ -4399,7 +3842,7 @@ RunTest run test run a particular test @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId + @param testId id of the test @return ApiRunTestRequest */ func (a *ApiApiService) RunTest(ctx context.Context, testId string) ApiRunTestRequest { @@ -4521,7 +3964,7 @@ RunTransaction run transaction run a particular transaction @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param transactionId + @param transactionId id of the transaction @return ApiRunTransactionRequest */ func (a *ApiApiService) RunTransaction(ctx context.Context, transactionId string) ApiRunTransactionRequest { @@ -4615,7 +4058,7 @@ type ApiStopTestRunRequest struct { ctx context.Context ApiService *ApiApiService testId string - runId string + runId int32 } func (r ApiStopTestRunRequest) Execute() (*http.Response, error) { @@ -4628,11 +4071,11 @@ StopTestRun stops the execution of a test run stops the execution of a test run @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId - @param runId + @param testId id of the test + @param runId id of the run @return ApiStopTestRunRequest */ -func (a *ApiApiService) StopTestRun(ctx context.Context, testId string, runId string) ApiStopTestRunRequest { +func (a *ApiApiService) StopTestRun(ctx context.Context, testId string, runId int32) ApiStopTestRunRequest { return ApiStopTestRunRequest{ ApiService: a, ctx: ctx, @@ -4815,106 +4258,6 @@ func (a *ApiApiService) TestConnectionExecute(r ApiTestConnectionRequest) (*Test return localVarReturnValue, localVarHTTPResponse, nil } -type ApiUpdateDataStoreRequest struct { - ctx context.Context - ApiService *ApiApiService - dataStoreId string - dataStore *DataStore -} - -func (r ApiUpdateDataStoreRequest) DataStore(dataStore DataStore) ApiUpdateDataStoreRequest { - r.dataStore = &dataStore - return r -} - -func (r ApiUpdateDataStoreRequest) Execute() (*http.Response, error) { - return r.ApiService.UpdateDataStoreExecute(r) -} - -/* -UpdateDataStore Update a Data Store - -Update a Data Store - - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param dataStoreId - @return ApiUpdateDataStoreRequest -*/ -func (a *ApiApiService) UpdateDataStore(ctx context.Context, dataStoreId string) ApiUpdateDataStoreRequest { - return ApiUpdateDataStoreRequest{ - ApiService: a, - ctx: ctx, - dataStoreId: dataStoreId, - } -} - -// Execute executes the request -func (a *ApiApiService) UpdateDataStoreExecute(r ApiUpdateDataStoreRequest) (*http.Response, error) { - var ( - localVarHTTPMethod = http.MethodPut - localVarPostBody interface{} - formFiles []formFile - ) - - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ApiApiService.UpdateDataStore") - if err != nil { - return nil, &GenericOpenAPIError{error: err.Error()} - } - - localVarPath := localBasePath + "/datastores/{dataStoreId}" - localVarPath = strings.Replace(localVarPath, "{"+"dataStoreId"+"}", url.PathEscape(parameterValueToString(r.dataStoreId, "dataStoreId")), -1) - - localVarHeaderParams := make(map[string]string) - localVarQueryParams := url.Values{} - localVarFormParams := url.Values{} - - // to determine the Content-Type header - localVarHTTPContentTypes := []string{"application/json"} - - // set Content-Type header - localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) - if localVarHTTPContentType != "" { - localVarHeaderParams["Content-Type"] = localVarHTTPContentType - } - - // to determine the Accept header - localVarHTTPHeaderAccepts := []string{} - - // set Accept header - localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) - if localVarHTTPHeaderAccept != "" { - localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept - } - // body params - localVarPostBody = r.dataStore - req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) - if err != nil { - return nil, err - } - - localVarHTTPResponse, err := a.client.callAPI(req) - if err != nil || localVarHTTPResponse == nil { - return localVarHTTPResponse, err - } - - localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) - localVarHTTPResponse.Body.Close() - localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) - if err != nil { - return localVarHTTPResponse, err - } - - if localVarHTTPResponse.StatusCode >= 300 { - newErr := &GenericOpenAPIError{ - body: localVarBody, - error: localVarHTTPResponse.Status, - } - return localVarHTTPResponse, newErr - } - - return localVarHTTPResponse, nil -} - type ApiUpdateEnvironmentRequest struct { ctx context.Context ApiService *ApiApiService @@ -5037,7 +4380,7 @@ UpdateTest update test update test action @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param testId + @param testId id of the test @return ApiUpdateTestRequest */ func (a *ApiApiService) UpdateTest(ctx context.Context, testId string) ApiUpdateTestRequest { @@ -5137,7 +4480,7 @@ UpdateTransaction update transaction update transaction action @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param transactionId + @param transactionId id of the transaction @return ApiUpdateTransactionRequest */ func (a *ApiApiService) UpdateTransaction(ctx context.Context, transactionId string) ApiUpdateTransactionRequest { diff --git a/cli/openapi/api_resource_api.go b/cli/openapi/api_resource_api.go index 7b31ae83d7..9cfab1b44a 100644 --- a/cli/openapi/api_resource_api.go +++ b/cli/openapi/api_resource_api.go @@ -130,6 +130,98 @@ func (a *ResourceApiApiService) CreateDemoExecute(r ApiCreateDemoRequest) (*Demo return localVarReturnValue, localVarHTTPResponse, nil } +type ApiDeleteDataStoreRequest struct { + ctx context.Context + ApiService *ResourceApiApiService + dataStoreId string +} + +func (r ApiDeleteDataStoreRequest) Execute() (*http.Response, error) { + return r.ApiService.DeleteDataStoreExecute(r) +} + +/* +DeleteDataStore Delete a Data Store + +Delete a Data Store + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param dataStoreId ID of a datastore used on Tracetest to configure how to fetch traces in a test + @return ApiDeleteDataStoreRequest +*/ +func (a *ResourceApiApiService) DeleteDataStore(ctx context.Context, dataStoreId string) ApiDeleteDataStoreRequest { + return ApiDeleteDataStoreRequest{ + ApiService: a, + ctx: ctx, + dataStoreId: dataStoreId, + } +} + +// Execute executes the request +func (a *ResourceApiApiService) DeleteDataStoreExecute(r ApiDeleteDataStoreRequest) (*http.Response, error) { + var ( + localVarHTTPMethod = http.MethodDelete + localVarPostBody interface{} + formFiles []formFile + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ResourceApiApiService.DeleteDataStore") + if err != nil { + return nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/datastores/{dataStoreId}" + localVarPath = strings.Replace(localVarPath, "{"+"dataStoreId"+"}", url.PathEscape(parameterValueToString(r.dataStoreId, "dataStoreId")), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarHTTPResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + return localVarHTTPResponse, newErr + } + + return localVarHTTPResponse, nil +} + type ApiDeleteDemoRequest struct { ctx context.Context ApiService *ResourceApiApiService @@ -146,7 +238,7 @@ DeleteDemo Delete a Demonstration setting Delete a demonstration used on Tracetest as quick start examples. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param demoId ID of a demonstration used on Tracetest as quick start examples. + @param demoId ID of a demonstration used on Tracetest as quick start examples @return ApiDeleteDemoRequest */ func (a *ResourceApiApiService) DeleteDemo(ctx context.Context, demoId string) ApiDeleteDemoRequest { @@ -238,7 +330,7 @@ GetConfiguration Get Tracetest configuration Get Tracetest configuration @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param configId ID of the configuration resource used on Tracetest. It should be set as 'current'. + @param configId ID of the configuration resource used on Tracetest. It should be set as 'current' @return ApiGetConfigurationRequest */ func (a *ResourceApiApiService) GetConfiguration(ctx context.Context, configId string) ApiGetConfigurationRequest { @@ -326,6 +418,110 @@ func (a *ResourceApiApiService) GetConfigurationExecute(r ApiGetConfigurationReq return localVarReturnValue, localVarHTTPResponse, nil } +type ApiGetDataStoreRequest struct { + ctx context.Context + ApiService *ResourceApiApiService + dataStoreId string +} + +func (r ApiGetDataStoreRequest) Execute() (*DataStore, *http.Response, error) { + return r.ApiService.GetDataStoreExecute(r) +} + +/* +GetDataStore Get a Data Store + +Get a Data Store + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param dataStoreId ID of a datastore used on Tracetest to configure how to fetch traces in a test + @return ApiGetDataStoreRequest +*/ +func (a *ResourceApiApiService) GetDataStore(ctx context.Context, dataStoreId string) ApiGetDataStoreRequest { + return ApiGetDataStoreRequest{ + ApiService: a, + ctx: ctx, + dataStoreId: dataStoreId, + } +} + +// Execute executes the request +// +// @return DataStore +func (a *ResourceApiApiService) GetDataStoreExecute(r ApiGetDataStoreRequest) (*DataStore, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodGet + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *DataStore + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ResourceApiApiService.GetDataStore") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/datastores/{dataStoreId}" + localVarPath = strings.Replace(localVarPath, "{"+"dataStoreId"+"}", url.PathEscape(parameterValueToString(r.dataStoreId, "dataStoreId")), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + type ApiGetDemoRequest struct { ctx context.Context ApiService *ResourceApiApiService @@ -342,7 +538,7 @@ GetDemo Get Demonstration setting Get a demonstration used on Tracetest as quick start examples. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param demoId ID of a demonstration used on Tracetest as quick start examples. + @param demoId ID of a demonstration used on Tracetest as quick start examples @return ApiGetDemoRequest */ func (a *ResourceApiApiService) GetDemo(ctx context.Context, demoId string) ApiGetDemoRequest { @@ -446,7 +642,7 @@ GetPollingProfile Get Polling Profile Get a polling profile used on Tracetest to configure how to fetch traces in a test. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param pollingProfileId ID of a polling profile used on Tracetest to configure how to fetch traces in a test. It should be set as 'current'. + @param pollingProfileId ID of a polling profile used on Tracetest to configure how to fetch traces in a test. It should be set as 'current' @return ApiGetPollingProfileRequest */ func (a *ResourceApiApiService) GetPollingProfile(ctx context.Context, pollingProfileId string) ApiGetPollingProfileRequest { @@ -543,25 +739,25 @@ type ApiListDemosRequest struct { sortDirection *string } -// Indicates the maximum number of demos that can be returned on this call. +// indicates how many resources can be returned by each page func (r ApiListDemosRequest) Take(take int32) ApiListDemosRequest { r.take = &take return r } -// Indicates how many demos will be skipped when paginating. +// indicates how many resources will be skipped when paginating func (r ApiListDemosRequest) Skip(skip int32) ApiListDemosRequest { r.skip = &skip return r } -// Indicates the sort field for on which all demos will be sorted. +// indicates the sort field for the resources func (r ApiListDemosRequest) SortBy(sortBy string) ApiListDemosRequest { r.sortBy = &sortBy return r } -// Indicates the sort direction for the demos (ascending or descending). +// indicates the sort direction for the resources func (r ApiListDemosRequest) SortDirection(sortDirection string) ApiListDemosRequest { r.sortDirection = &sortDirection return r @@ -696,7 +892,7 @@ UpdateConfiguration Update Tracetest configuration Update Tracetest configuration @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param configId ID of the configuration resource used on Tracetest. It should be set as 'current'. + @param configId ID of the configuration resource used on Tracetest. It should be set as 'current' @return ApiUpdateConfigurationRequest */ func (a *ResourceApiApiService) UpdateConfiguration(ctx context.Context, configId string) ApiUpdateConfigurationRequest { @@ -786,6 +982,106 @@ func (a *ResourceApiApiService) UpdateConfigurationExecute(r ApiUpdateConfigurat return localVarReturnValue, localVarHTTPResponse, nil } +type ApiUpdateDataStoreRequest struct { + ctx context.Context + ApiService *ResourceApiApiService + dataStoreId string + dataStore *DataStore +} + +func (r ApiUpdateDataStoreRequest) DataStore(dataStore DataStore) ApiUpdateDataStoreRequest { + r.dataStore = &dataStore + return r +} + +func (r ApiUpdateDataStoreRequest) Execute() (*http.Response, error) { + return r.ApiService.UpdateDataStoreExecute(r) +} + +/* +UpdateDataStore Update a Data Store + +Update a Data Store + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param dataStoreId ID of a datastore used on Tracetest to configure how to fetch traces in a test + @return ApiUpdateDataStoreRequest +*/ +func (a *ResourceApiApiService) UpdateDataStore(ctx context.Context, dataStoreId string) ApiUpdateDataStoreRequest { + return ApiUpdateDataStoreRequest{ + ApiService: a, + ctx: ctx, + dataStoreId: dataStoreId, + } +} + +// Execute executes the request +func (a *ResourceApiApiService) UpdateDataStoreExecute(r ApiUpdateDataStoreRequest) (*http.Response, error) { + var ( + localVarHTTPMethod = http.MethodPut + localVarPostBody interface{} + formFiles []formFile + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "ResourceApiApiService.UpdateDataStore") + if err != nil { + return nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/datastores/{dataStoreId}" + localVarPath = strings.Replace(localVarPath, "{"+"dataStoreId"+"}", url.PathEscape(parameterValueToString(r.dataStoreId, "dataStoreId")), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + // body params + localVarPostBody = r.dataStore + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarHTTPResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + return localVarHTTPResponse, newErr + } + + return localVarHTTPResponse, nil +} + type ApiUpdateDemoRequest struct { ctx context.Context ApiService *ResourceApiApiService @@ -808,7 +1104,7 @@ UpdateDemo Update a Demonstration setting Update a demonstration used on Tracetest as quick start examples. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param demoId ID of a demonstration used on Tracetest as quick start examples. + @param demoId ID of a demonstration used on Tracetest as quick start examples @return ApiUpdateDemoRequest */ func (a *ResourceApiApiService) UpdateDemo(ctx context.Context, demoId string) ApiUpdateDemoRequest { @@ -920,7 +1216,7 @@ UpdatePollingProfile Update a Polling Profile Update a polling profile used on Tracetest to configure how to fetch traces in a test. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - @param pollingProfileId ID of a polling profile used on Tracetest to configure how to fetch traces in a test. It should be set as 'current'. + @param pollingProfileId ID of a polling profile used on Tracetest to configure how to fetch traces in a test. It should be set as 'current' @return ApiUpdatePollingProfileRequest */ func (a *ResourceApiApiService) UpdatePollingProfile(ctx context.Context, pollingProfileId string) ApiUpdatePollingProfileRequest { diff --git a/cli/utils/api.go b/cli/utils/api.go index 59af7b7ad3..0efd1a0e09 100644 --- a/cli/utils/api.go +++ b/cli/utils/api.go @@ -1,8 +1,10 @@ package utils import ( + "context" "fmt" "io" + "io/ioutil" "net" "net/http" "strings" @@ -10,9 +12,17 @@ import ( "github.com/kubeshop/tracetest/cli/analytics" "github.com/kubeshop/tracetest/cli/config" + "github.com/kubeshop/tracetest/cli/file" "github.com/kubeshop/tracetest/cli/openapi" ) +type ListArgs struct { + Take int32 + Skip int32 + SortDirection string + SortBy string +} + func GetAPIClient(cliConfig config.Config) *openapi.APIClient { config := openapi.NewConfiguration() config.AddDefaultHeader("x-client-id", analytics.ClientID()) @@ -30,9 +40,10 @@ func GetAPIClient(cliConfig config.Config) *openapi.APIClient { } type ResourceClient struct { - Client http.Client - BaseUrl string - BaseHeader http.Header + Client http.Client + BaseUrl string + BaseHeader http.Header + ResourceType string } func GetResourceAPIClient(resourceType string, cliConfig config.Config) ResourceClient { @@ -54,9 +65,10 @@ func GetResourceAPIClient(resourceType string, cliConfig config.Config) Resource } return ResourceClient{ - Client: client, - BaseUrl: baseUrl, - BaseHeader: baseHeader, + Client: client, + BaseUrl: baseUrl, + BaseHeader: baseHeader, + ResourceType: resourceType, } } @@ -74,3 +86,117 @@ func (resourceClient ResourceClient) NewRequest(url string, method string, body request.Header = resourceClient.BaseHeader return request, err } + +func (resourceClient ResourceClient) Update(ctx context.Context, file file.File, ID string) error { + url := fmt.Sprintf("%s/%s", resourceClient.BaseUrl, ID) + request, err := resourceClient.NewRequest(url, http.MethodPut, file.Contents()) + if err != nil { + return fmt.Errorf("could not create request: %w", err) + } + + resp, err := resourceClient.Client.Do(request) + if err != nil { + return fmt.Errorf("could not update %s: %w", resourceClient.ResourceType, err) + } + + defer resp.Body.Close() + if resp.StatusCode == http.StatusNotFound { + return fmt.Errorf("%s id doesn't exist on server. Remove it from the definition file and try again", resourceClient.ResourceType) + } + + if resp.StatusCode == http.StatusUnprocessableEntity { + // validation error + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("could not send request: %w", err) + } + + validationError := string(body) + return fmt.Errorf("invalid %s: %s", resourceClient.ResourceType, validationError) + } + + _, err = file.SaveChanges(IOReadCloserToString(resp.Body)) + return err +} + +func (resourceClient ResourceClient) Delete(ctx context.Context, ID string) error { + url := fmt.Sprintf("%s/%s", resourceClient.BaseUrl, ID) + request, err := resourceClient.NewRequest(url, http.MethodDelete, "") + if err != nil { + return fmt.Errorf("could not delete resource: %w", err) + } + + _, err = resourceClient.Client.Do(request) + return err +} + +func (resourceClient ResourceClient) Get(ctx context.Context, id string) (string, error) { + request, err := resourceClient.NewRequest(fmt.Sprintf("%s/%s", resourceClient.BaseUrl, id), http.MethodGet, "") + if err != nil { + return "", fmt.Errorf("could not create request: %w", err) + } + + resp, err := resourceClient.Client.Do(request) + if err != nil { + return "", fmt.Errorf("could not get %s: %w", resourceClient.ResourceType, err) + } + + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return "", err + } + + validationError := string(body) + return "", fmt.Errorf("invalid %s: %s", resourceClient.ResourceType, validationError) + } + + return IOReadCloserToString(resp.Body), nil +} + +func (resourceClient ResourceClient) List(ctx context.Context, listArgs ListArgs) (string, error) { + url := fmt.Sprintf("%s?skip=%d&take=%d&sortBy=%s&sortDirection=%s", resourceClient.BaseUrl, listArgs.Skip, listArgs.Take, listArgs.SortBy, listArgs.SortDirection) + request, err := resourceClient.NewRequest(url, http.MethodGet, "") + if err != nil { + return "", fmt.Errorf("could not create request: %w", err) + } + + resp, err := resourceClient.Client.Do(request) + if err != nil { + return "", fmt.Errorf("could not send request: %w", err) + } + + defer resp.Body.Close() + return IOReadCloserToString(resp.Body), nil +} + +func (resourceClient ResourceClient) Create(ctx context.Context, file file.File) error { + request, err := resourceClient.NewRequest(resourceClient.BaseUrl, http.MethodPost, file.Contents()) + if err != nil { + return fmt.Errorf("could not create request: %w", err) + } + + resp, err := resourceClient.Client.Do(request) + if err != nil { + return fmt.Errorf("could not send request: %w", err) + } + + defer resp.Body.Close() + if resp.StatusCode == http.StatusUnprocessableEntity { + // validation error + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("could not send request: %w", err) + } + + validationError := string(body) + return fmt.Errorf("invalid %s: %s", resourceClient.ResourceType, validationError) + } + if err != nil { + return fmt.Errorf("could not create %s: %w", resourceClient.ResourceType, err) + } + + _, err = file.SaveChanges(IOReadCloserToString(resp.Body)) + return err +} diff --git a/docs/docs/cli/creating-data-stores.md b/docs/docs/cli/creating-data-stores.md index 4b05e266f0..78a2866010 100644 --- a/docs/docs/cli/creating-data-stores.md +++ b/docs/docs/cli/creating-data-stores.md @@ -1,26 +1,29 @@ # Defining Data Stores as Text Files + You might have multiple Tracetest instances that need to be connected to the same data stores. An easy way of sharing the configuration is by using a configuration file that can be applied to your Tracetest instance. ### Jaeger + ```yaml type: DataStore spec: - name: development - type: jaeger - isDefault: true - jaeger: - endpoint: 127.0.0.1:16685 - tls: - insecure: true + name: development + type: jaeger + default: true + jaeger: + endpoint: 127.0.0.1:16685 + tls: + insecure: true ``` ### Tempo + ```yaml type: DataStore spec: name: Grafana Tempo type: tempo - isDefault: true + default: true tempo: endpoint: tempo:9095 tls: @@ -28,12 +31,13 @@ spec: ``` ### OpenSearch + ```yaml type: DataStore spec: name: OpenSearch Data Store type: openSearch - isDefault: true + default: true opensearch: addresses: - http://opensearch:9200 @@ -41,24 +45,26 @@ spec: ``` ### SignalFX + ```yaml type: DataStore spec: name: SignalFX type: signalFx - isDefault: true + default: true signalFx: realm: us1 token: mytoken ``` ### Using the OpenTelemetry Collector + ```yaml type: DataStore spec: name: Opentelemetry Collector pipeline type: otlp - isDefault: true + default: true ``` > Consider reading about [how to use the OTEL collector](../configuration/connecting-to-data-stores/opentelemetry-collector.md) to send traces to your Tracetest instance. @@ -68,14 +74,15 @@ spec: To apply the configuration, you need a [configured CLI](./configuring-your-cli.md) pointed to the instance you want to apply the data store. Then you just have to enter: ``` -tracetest datastore apply -f my/data-store/file/location.yaml +tracetest apply datastore -f my/data-store/file/location.yaml ``` ## Additional Information -In the current version, you can only have one active data store at any given time. The flag `isDefault` defines which data store should be used by your tests. So, if you want to add a new data store and make sure it will be used in future test runs, make sure to define `isDefault` as `true` in the data store configuration file. + +In the current version, you can only have one active data store at any given time. The flag `default` defines which data store should be used by your tests. So, if you want to add a new data store and make sure it will be used in future test runs, make sure to define `default` as `true` in the data store configuration file. After a configuration is applied, you can export it using the CLI by using the following command: ``` -tracetest datastore export --output my/file/location.yaml --id my-data-store-id +tracetest export datastore -f my/file/location.yaml --id my-data-store-id ``` diff --git a/docs/docs/configuration/connecting-to-data-stores/awsxray.md b/docs/docs/configuration/connecting-to-data-stores/awsxray.md index 878cdeaaf1..30b8edf9c8 100644 --- a/docs/docs/configuration/connecting-to-data-stores/awsxray.md +++ b/docs/docs/configuration/connecting-to-data-stores/awsxray.md @@ -33,6 +33,7 @@ Or, if you prefer using the CLI, you can use this file config. type: DataStore spec: type: awsxray + default: true awsxray: accessKeyId: secretAccessKey: @@ -43,7 +44,7 @@ spec: Run this command in the terminal and specify the file above. ```bash -tracetest datastore apply -f my/data-store/file/location.yaml +tracetest apply datastore -f my/data-store/file/location.yaml ``` :::tip diff --git a/docs/docs/configuration/connecting-to-data-stores/datadog.md b/docs/docs/configuration/connecting-to-data-stores/datadog.md index de6f179cef..059e5ce04a 100644 --- a/docs/docs/configuration/connecting-to-data-stores/datadog.md +++ b/docs/docs/configuration/connecting-to-data-stores/datadog.md @@ -3,7 +3,7 @@ If you want to use [Datadog](https://www.datadoghq.com/) as the trace data store, you'll configure the OpenTelemetry Collector to receive traces from your system and then send them to both Tracetest and Datadog. And, you don't have to change your existing pipelines to do so. :::tip -Examples of configuring Tracetest with Datadog can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). +Examples of configuring Tracetest with Datadog can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). ::: ## Configuring OpenTelemetry Collector to Send Traces to Both Datadog and Tracetest @@ -37,7 +37,7 @@ receivers: processors: # This configuration is needed to guarantee that the data is sent correctly to Datadog - batch: + batch: send_batch_max_size: 100 send_batch_size: 10 timeout: 10s @@ -50,7 +50,7 @@ exporters: endpoint: tracetest:21321 tls: insecure: true - + # Datadog exporter # One example on how to set up a collector configuration for Datadog can be seen here: # https://docs.datadoghq.com/opentelemetry/otel_collector_datadog_exporter/?tab=onahost @@ -90,14 +90,15 @@ type: DataStore spec: name: Datadog pipeline type: datadog - isDefault: true + default: true ``` Proceed to run this command in the terminal and specify the file above. ```bash -tracetest datastore apply -f my/data-store/file/location.yaml +tracetest apply datastore -f my/data-store/file/location.yaml ``` + :::tip To learn more, [read the recipe for running a sample app with Datadog and Tracetest](../../examples-tutorials/recipes/running-tracetest-with-datadog.md). ::: diff --git a/docs/docs/configuration/connecting-to-data-stores/elasticapm.md b/docs/docs/configuration/connecting-to-data-stores/elasticapm.md index 8c5bc5d540..36793023ef 100644 --- a/docs/docs/configuration/connecting-to-data-stores/elasticapm.md +++ b/docs/docs/configuration/connecting-to-data-stores/elasticapm.md @@ -3,12 +3,12 @@ Tracetest fetches traces from [Elasticsearch's default port](https://discuss.elastic.co/t/what-are-ports-9200-and-9300-used-for/238578) `9200`. :::tip -Examples of configuring Tracetest can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). +Examples of configuring Tracetest can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). ::: ## Configure Tracetest to Use Elastic APM as a Trace Data Store -Configure Tracetest to fetch trace data from Elasticsearch. +Configure Tracetest to fetch trace data from Elasticsearch. Tracetest uses Elasticsearch's **default port** `9200` to fetch trace data. @@ -24,7 +24,7 @@ The defaults can be: To configure Elastic APM you will need to download the CA certificate from the Docker image and upload it to the config under "Upload CA file". - The command to download the `ca.crt` file is: -`docker cp tracetest-elasticapm-with-elastic-agent-es01-1:/usr/share/elasticsearch/config/certs/ca/ca.crt .` + `docker cp tracetest-elasticapm-with-elastic-agent-es01-1:/usr/share/elasticsearch/config/certs/ca/ca.crt .` - Alternatively, you can skip CA certificate validation by setting the `Enable TLS but don't verify the certificate` option. :::tip @@ -43,7 +43,6 @@ https://es01:9200 - ## Connect Tracetest to Elastic with the CLI Or, if you prefer using the CLI, you can use this file config. @@ -53,20 +52,20 @@ type: DataStore spec: name: Elastic Data Store type: elasticapm - isDefault: true - elasticapm: - addresses: - - https://es01:9200 - username: elastic - password: changeme - index: traces-apm-default - insecureSkipVerify: true + default: true + elasticapm: + addresses: + - https://es01:9200 + username: elastic + password: changeme + index: traces-apm-default + insecureSkipVerify: true ``` Proceed to run this command in the terminal and specify the file above. ```bash -tracetest datastore apply -f my/data-store/file/location.yaml +tracetest apply datastore -f my/data-store/file/location.yaml ``` :::tip diff --git a/docs/docs/configuration/connecting-to-data-stores/jaeger.md b/docs/docs/configuration/connecting-to-data-stores/jaeger.md index 4cf091f172..fa98798f10 100644 --- a/docs/docs/configuration/connecting-to-data-stores/jaeger.md +++ b/docs/docs/configuration/connecting-to-data-stores/jaeger.md @@ -1,19 +1,19 @@ # Jaeger -Tracetest fetches traces from [Jaeger's gRPC Protobuf/gRPC QueryService](https://www.jaegertracing.io/docs/1.42/deployment/#query-service--ui) on port `16685`. +Tracetest fetches traces from [Jaeger's gRPC Protobuf/gRPC QueryService](https://www.jaegertracing.io/docs/1.42/deployment/#query-service--ui) on port `16685`. :::tip -Examples of configuring Tracetest can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). +Examples of configuring Tracetest can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). ::: ## Configure Tracetest to Use Jaeger as a Trace Data Store -Configure Tracetest to be aware that it has to fetch trace data from Jaeger. +Configure Tracetest to be aware that it has to fetch trace data from Jaeger. -Tracetest uses [Jaeger's gRPC Protobuf/gRPC QueryService](https://www.jaegertracing.io/docs/1.42/deployment/#query-service--ui) on port `16685` to fetch trace data. +Tracetest uses [Jaeger's gRPC Protobuf/gRPC QueryService](https://www.jaegertracing.io/docs/1.42/deployment/#query-service--ui) on port `16685` to fetch trace data. :::tip -Need help configuring the OpenTelemetry Collector so send trace data from your application to Jaeger? Read more in [the reference page here](../opentelemetry-collector-configuration-file-reference)). +Need help configuring the OpenTelemetry Collector so send trace data from your application to Jaeger? Read more in [the reference page here](../opentelemetry-collector-configuration-file-reference)). ::: ## Connect Tracetest to Jaeger with the Web UI @@ -26,10 +26,8 @@ jaeger:16685 ![Jaeger](../img/Jaeger-settings.png) - - ## Connect Tracetest to Jaeger with the CLI Or, if you prefer using the CLI, you can use this file config. @@ -39,7 +37,7 @@ type: DataStore spec: name: jaeger type: jaeger - isDefault: true + default: true jaeger: endpoint: jaeger:16685 tls: @@ -49,7 +47,7 @@ spec: Proceed to run this command in the terminal, and specify the file above. ```bash -tracetest datastore apply -f my/data-store/file/location.yaml +tracetest apply datastore -f my/data-store/file/location.yaml ``` :::tip diff --git a/docs/docs/configuration/connecting-to-data-stores/lightstep.md b/docs/docs/configuration/connecting-to-data-stores/lightstep.md index 03081649e0..fed4d02495 100644 --- a/docs/docs/configuration/connecting-to-data-stores/lightstep.md +++ b/docs/docs/configuration/connecting-to-data-stores/lightstep.md @@ -3,7 +3,7 @@ If you want to use [Lightstep](https://lightstep.com/) as the trace data store, you'll configure the OpenTelemetry Collector to receive traces from your system and then send them to both Tracetest and Lightstep. And, you don't have to change your existing pipelines to do so. :::tip -Examples of configuring Tracetest with Lightstep can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). +Examples of configuring Tracetest with Lightstep can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). ::: ## Configuring OpenTelemetry Collector to Send Traces to both Lightstep and Tracetest @@ -50,7 +50,7 @@ exporters: otlp/ls: endpoint: ingest.lightstep.com:443 headers: - "lightstep-access-token": "" # Send traces to Lightstep. Read more in docs here: https://docs.lightstep.com/otel/otel-quick-start + "lightstep-access-token": "" # Send traces to Lightstep. Read more in docs here: https://docs.lightstep.com/otel/otel-quick-start service: pipelines: @@ -62,7 +62,7 @@ service: processors: [batch] exporters: [otlp/tt] # your exporter pointing to your tracetest instance traces/ls: - receivers: [otlp] # your receiver + receivers: [otlp] # your receiver processors: [batch] exporters: [logging, otlp/ls] # your exporter pointing to your lighstep account ``` @@ -88,13 +88,13 @@ type: DataStore spec: name: Opentelemetry Collector pipeline type: otlp - isDefault: true + default: true ``` Proceed to run this command in the terminal and specify the file above. ```bash -tracetest datastore apply -f my/data-store/file/location.yaml +tracetest apply datastore -f my/data-store/file/location.yaml ``` :::tip diff --git a/docs/docs/configuration/connecting-to-data-stores/new-relic.md b/docs/docs/configuration/connecting-to-data-stores/new-relic.md index 320bab8fd2..cc88007db2 100644 --- a/docs/docs/configuration/connecting-to-data-stores/new-relic.md +++ b/docs/docs/configuration/connecting-to-data-stores/new-relic.md @@ -3,7 +3,7 @@ If you want to use [New Relic](https://newrelic.com/) as the trace data store, you'll configure the OpenTelemetry Collector to receive traces from your system and then send them to both Tracetest and New Relic. And, you don't have to change your existing pipelines to do so. :::tip -Examples of configuring Tracetest with New Relic can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). +Examples of configuring Tracetest with New Relic can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). ::: ## Configuring OpenTelemetry Collector to Send Traces to B New Relic and Tracetest @@ -66,8 +66,7 @@ service: traces/nr: receivers: [otlp] # your receiver processors: [batch] - exporters: [logging, otlp/nr] # your exporter pointing to your lighstep account - + exporters: [logging, otlp/nr] # your exporter pointing to your lighstep account ``` ## Configure Tracetest to Use New Relic as a Trace Data Store @@ -82,7 +81,6 @@ In the Web UI, (1) open Settings, and, on the (2) Configure Data Store tab, sele - ## Connect Tracetest to New Relic with the CLI Or, if you prefer using the CLI, you can use this file config. @@ -92,13 +90,13 @@ type: DataStore spec: name: Opentelemetry Collector pipeline type: otlp - isDefault: true + default: true ``` Proceed to run this command in the terminal and specify the file above. ```bash -tracetest datastore apply -f my/data-store/file/location.yaml +tracetest apply datastore -f my/data-store/file/location.yaml ``` :::tip diff --git a/docs/docs/configuration/connecting-to-data-stores/opensearch.md b/docs/docs/configuration/connecting-to-data-stores/opensearch.md index 1e2107268f..7ce62c40e5 100644 --- a/docs/docs/configuration/connecting-to-data-stores/opensearch.md +++ b/docs/docs/configuration/connecting-to-data-stores/opensearch.md @@ -3,12 +3,12 @@ Tracetest fetches traces from [OpenSearch's default port](https://logz.io/blog/opensearch-tutorial-installation-configuration/#:~:text=This%20is%20because%20OpenSearch%20runs,use%20port%205601%20by%20default.) `9200`. :::tip -Examples of configuring Tracetest can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). +Examples of configuring Tracetest can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). ::: ## Configure Tracetest to Use OpenSearch as a Trace Data Store -Configure Tracetest to fetch trace data from OpenSearch. +Configure Tracetest to fetch trace data from OpenSearch. Tracetest uses OpenSearch's **default port** `9200` to fetch trace data. @@ -35,7 +35,6 @@ http://opensearch:9200 - ## Connect Tracetest to OpenSearch with the CLI Or, if you prefer using the CLI, you can use this file config. @@ -45,7 +44,7 @@ type: DataStore spec: name: OpenSearch Data Store type: openSearch - isDefault: true + default: true opensearch: addresses: - http://opensearch:9200 @@ -55,7 +54,7 @@ spec: Proceed to run this command in the terminal, and specify the file above. ```bash -tracetest datastore apply -f my/data-store/file/location.yaml +tracetest apply datastore -f my/data-store/file/location.yaml ``` :::tip diff --git a/docs/docs/configuration/connecting-to-data-stores/opentelemetry-collector.md b/docs/docs/configuration/connecting-to-data-stores/opentelemetry-collector.md index 9943749408..7ed80eb45d 100644 --- a/docs/docs/configuration/connecting-to-data-stores/opentelemetry-collector.md +++ b/docs/docs/configuration/connecting-to-data-stores/opentelemetry-collector.md @@ -7,7 +7,7 @@ http://your-tracetest-instance.com:21321 ``` :::tip -Examples of configuring Tracetest can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). +Examples of configuring Tracetest can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). ::: ## Configuring OpenTelemetry Collector to Send Traces to Tracetest @@ -68,7 +68,6 @@ In the Web UI, (1) open Settings, and, on the (2) Configure Data Store tab, sele - ## Connect Tracetest to OpenTelemetry Collector with the CLI Or, if you prefer using the CLI, you can use this file config. @@ -78,13 +77,13 @@ type: DataStore spec: name: Opentelemetry Collector pipeline type: otlp - isDefault: true + default: true ``` Proceed to run this command in the terminal, and specify the file above. ```bash -tracetest datastore apply -f my/data-store/file/location.yaml +tracetest apply datastore -f my/data-store/file/location.yaml ``` :::tip diff --git a/docs/docs/configuration/connecting-to-data-stores/signalfx.md b/docs/docs/configuration/connecting-to-data-stores/signalfx.md index 0083c6e000..ec378d6b37 100644 --- a/docs/docs/configuration/connecting-to-data-stores/signalfx.md +++ b/docs/docs/configuration/connecting-to-data-stores/signalfx.md @@ -3,15 +3,15 @@ Tracetest fetches traces from [SignalFx's realm and token](https://docs.splunk.com/Observability/references/organizations.html). :::tip -Examples of configuring Tracetest can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). +Examples of configuring Tracetest can be found in the [`examples` folder of the Tracetest GitHub repo](https://github.com/kubeshop/tracetest/tree/main/examples). ::: ## Configure Tracetest to Use SignalFx as a Trace Data Store -Configure Tracetest to be aware that it has to fetch trace data from SignalFx. +Configure Tracetest to be aware that it has to fetch trace data from SignalFx. :::tip -Need help configuring the OpenTelemetry Collector so send trace data from your application to SignalFx? Read more in [the reference page here](../opentelemetry-collector-configuration-file-reference)). +Need help configuring the OpenTelemetry Collector so send trace data from your application to SignalFx? Read more in [the reference page here](../opentelemetry-collector-configuration-file-reference)). ::: ## Connect Tracetest to SignalFx with the Web UI @@ -29,7 +29,6 @@ Follow this [guide](https://docs.splunk.com/Observability/references/organizatio - ## Connect Tracetest to SignalFx with the CLI Or, if you prefer using the CLI, you can use this file config. @@ -39,7 +38,7 @@ type: DataStore spec: name: SignalFX type: signalFx - isDefault: true + default: true signalFx: realm: us1 token: mytoken @@ -48,5 +47,5 @@ spec: Proceed to run this command in the terminal, and specify the file above. ```bash -tracetest datastore apply -f my/data-store/file/location.yaml +tracetest apply datastore -f my/data-store/file/location.yaml ``` diff --git a/docs/docs/configuration/connecting-to-data-stores/tempo.md b/docs/docs/configuration/connecting-to-data-stores/tempo.md index 97f4e8175f..e33edbecd1 100644 --- a/docs/docs/configuration/connecting-to-data-stores/tempo.md +++ b/docs/docs/configuration/connecting-to-data-stores/tempo.md @@ -18,11 +18,11 @@ server: http_listen_port: 80 grpc_listen_port: 9095 distributor: - receivers: # This configuration will listen on all ports and protocols that Tempo is capable of. - jaeger: # the receives all come from the OpenTelemetry collector. more configuration information can - protocols: # be found here: https://github.com/open-telemetry/opentelemetry-collector/tree/master/receiver. - thrift_http: # - grpc: # For a production deployment you should only enable the receivers you need! + receivers: # This configuration will listen on all ports and protocols that Tempo is capable of. + jaeger: # the receives all come from the OpenTelemetry collector. more configuration information can + protocols: # be found here: https://github.com/open-telemetry/opentelemetry-collector/tree/master/receiver. + thrift_http: # + grpc: # For a production deployment you should only enable the receivers you need! thrift_binary: thrift_compact: zipkin: @@ -32,29 +32,28 @@ distributor: grpc: opencensus: ingester: - trace_idle_period: 10s # The length of time after a trace has not received spans to consider it complete and flush it. - max_block_bytes: 1_000_000 # Cut the head block when it hits this size or ... + trace_idle_period: 10s # The length of time after a trace has not received spans to consider it complete and flush it. + max_block_bytes: 1_000_000 # Cut the head block when it hits this size or ... #traces_per_block: 1_000_000 - max_block_duration: 5m # this much time passes. + max_block_duration: 5m # this much time passes. compactor: compaction: - compaction_window: 1h # Blocks in this time window will be compacted together. - max_compaction_objects: 1000000 # Maximum size of compacted blocks. + compaction_window: 1h # Blocks in this time window will be compacted together. + max_compaction_objects: 1000000 # Maximum size of compacted blocks. block_retention: 1h compacted_block_retention: 10m storage: trace: - backend: local # Backend configuration to use. + backend: local # Backend configuration to use. wal: - path: /tmp/tempo/wal # Where to store the the wal locally. + path: /tmp/tempo/wal # Where to store the the wal locally. #bloom_filter_false_positive: .05 # Bloom filter false positive rate. Lower values create larger filters but fewer false positives. #index_downsample: 10 # Number of traces per index record. local: path: /tmp/tempo/blocks pool: - max_workers: 100 # The worker pool mainly drives querying, but is also used for polling the blocklist. + max_workers: 100 # The worker pool mainly drives querying, but is also used for polling the blocklist. queue_depth: 10000 - ``` ## Configure Tracetest to Use Tempo as a Trace Data Store @@ -76,6 +75,7 @@ If you are using Docker and the `gRPC` endpoint like in the screenshot below, us ``` tempo:9095 ``` + ![Tempo](../img/Tempo-settings.png) @@ -90,7 +90,6 @@ http://tempo - ## Connect Tracetest to Tempo with the CLI Or, if you prefer using the CLI, you can use this file config. @@ -102,7 +101,7 @@ type: DataStore spec: name: Grafana Tempo type: tempo - isDefault: true + default: true tempo: type: grpc grpc: @@ -118,7 +117,7 @@ type: DataStore spec: name: Grafana Tempo type: tempo - isDefault: true + default: true tempo: type: http http: @@ -130,7 +129,7 @@ spec: Proceed to run this command in the terminal, and specify the file above. ```bash -tracetest datastore apply -f my/data-store/file/location.yaml +tracetest apply datastore -f my/data-store/file/location.yaml ``` :::tip diff --git a/docs/docs/configuration/provisioning.md b/docs/docs/configuration/provisioning.md index a4f4259954..69dfbc9a05 100644 --- a/docs/docs/configuration/provisioning.md +++ b/docs/docs/configuration/provisioning.md @@ -1,10 +1,11 @@ # Provisioning server -Tracetest allows a server to be provisioned the first time a new Tracetest server is installed and launched. Provisioning sets certain resources in the server to the specified values, allowing you to configure the server. It is convenient in a CI/CD flow where you want to launch a server with a specified configuration. +Tracetest allows a server to be provisioned the first time a new Tracetest server is installed and launched. Provisioning sets certain resources in the server to the specified values, allowing you to configure the server. It is convenient in a CI/CD flow where you want to launch a server with a specified configuration. The server is provisioned by specifying a series of YAML snippets which will configure various resources. Each snippet is separated with the YAML separator, `---`. -Currently, the following resources can be provisioned: +Currently, the following resources can be provisioned: + - DataStore - PollingProfile - Config @@ -20,7 +21,7 @@ type: DataStore spec: name: otlp type: otlp - isdefault: true + default: true --- type: Config spec: @@ -46,4 +47,3 @@ spec: ``` Alternatively, we support setting an environment variable called `TRACETEST_PROVISIONING` to provision the server when it is first started. Base64 encode the provisioning YAML you want to utilize and set the `TRACETEST_PROVISIONING` environment variable with the result. The Tracetest server will then provision based on the Base64 encoded provisioning data in this environment variable the first time it is launched. - diff --git a/docs/docs/examples-tutorials/recipes/running-tracetest-with-elasticapm.md b/docs/docs/examples-tutorials/recipes/running-tracetest-with-elasticapm.md index 7b47be2c9a..432d90ff29 100644 --- a/docs/docs/examples-tutorials/recipes/running-tracetest-with-elasticapm.md +++ b/docs/docs/examples-tutorials/recipes/running-tracetest-with-elasticapm.md @@ -1,7 +1,7 @@ # Running Tracetest with Elastic APM :::note -[Check out the source code on GitHub here.](https://github.com/kubeshop/tracetest/tree/main/examples/tracetest-elasticapm-with-elastic-agent) +[Check out the source code on GitHub here.](https://github.com/kubeshop/tracetest/tree/main/examples/tracetest-elasticapm-with-elastic-agent) ::: [Tracetest](https://tracetest.io/) is a testing tool based on [OpenTelemetry](https://opentelemetry.io/) that allows you to test your distributed application. It allows you to use your telemetry data generated by the OpenTelemetry tools to check and assert if your application has the desired behavior defined by your test definitions. @@ -18,12 +18,12 @@ You will need [Docker](https://docs.docker.com/get-docker/) and [Docker Compose] ## Project structure -* `docker-compose.yml` - docker compose file that starts the whole environment - * Elastic stack single node cluster with Elasticsearch, Kibana and, the APM Server. - * OTel collector to support Tracetest. - * Tracetest instance. -* `collector-config.yml` - OTel collector configuration file -* `app.js` - sample NodeJS application listening on port 8080 and instrumented with Elastic Nodejs APM agent. +- `docker-compose.yml` - docker compose file that starts the whole environment + - Elastic stack single node cluster with Elasticsearch, Kibana and, the APM Server. + - OTel collector to support Tracetest. + - Tracetest instance. +- `collector-config.yml` - OTel collector configuration file +- `app.js` - sample NodeJS application listening on port 8080 and instrumented with Elastic Nodejs APM agent. The project is built with Docker Compose. @@ -48,10 +48,10 @@ The Elastic APM tracing is contained in the `elastic-apm-agent.js` file. Traces Here's the content of the `elastic-apm-agent.js` file: ```js -const apm = require('elastic-apm-node').start({ - serviceName: 'sample-app', - serverUrl: 'http://apm-server:8200', -}) +const apm = require("elastic-apm-node").start({ + serviceName: "sample-app", + serverUrl: "http://apm-server:8200", +}); ``` Traces will be sent to either the Elastic APM endpoint. @@ -112,7 +112,7 @@ The `docker-compose.yaml` contains 7 services that configure Tracetest and Elast - **Postgres** - Postgres is a prerequisite for Tracetest to work. It stores trace data when running the trace-based tests. - [**OpenTelemetry Collector**](https://opentelemetry.io/docs/collector/) - A vendor-agnostic implementation of how to receive, process and export telemetry data. -- [**Elasticsearch**](https://www.elastic.co/elasticsearch/) - Data store and search engine. (Also contains a `setup` service to configure Elasticsearch) +- [**Elasticsearch**](https://www.elastic.co/elasticsearch/) - Data store and search engine. (Also contains a `setup` service to configure Elasticsearch) - [**Elastic APM**](https://www.elastic.co/observability/application-performance-monitoring) - Elastic application performance monitoring and traces. - [**Kibana**](https://www.elastic.co/kibana/) - Kibana is a free and open user interface that lets you visualize your Elasticsearch data and navigate the Elastic Stack. - [**Tracetest**](https://tracetest.io/) - Trace-based testing that generates end-to-end tests automatically from traces. @@ -160,6 +160,7 @@ type: DataStore spec: name: elasticapm type: elasticapm + default: true elasticapm: addresses: - https://es01:9200 @@ -167,13 +168,12 @@ spec: password: changeme index: traces-apm-default insecureSkipVerify: true - ``` Proceed to run this command in the terminal, and specify the file above. ```bash -tracetest datastore apply -f my/data-store/file/location.yaml +tracetest apply datastore -f my/data-store/file/location.yaml ``` ### Step-by-step guide @@ -185,9 +185,9 @@ Open `http://localhost:11633/` to configure the connection to Elasticsearch: 3. Add the Address and set it to `https://es01:9200`. 4. Set the Username to `elastic` and password to `changeme`. 5. You will need to download the CA certificate from the docker image and upload it to the config under "Upload CA file". - * The command to download the `ca.crt` file is: - `docker cp tracetest-elasticapm-with-elastic-agent-es01-1:/usr/share/elasticsearch/config/certs/ca/ca.crt .` - * Alternatively, you can skip CA certificate validation by setting the `Enable TLS but don't verify the certificate` option. + - The command to download the `ca.crt` file is: + `docker cp tracetest-elasticapm-with-elastic-agent-es01-1:/usr/share/elasticsearch/config/certs/ca/ca.crt .` + - Alternatively, you can skip CA certificate validation by setting the `Enable TLS but don't verify the certificate` option. 6. Test the connection and Save it, if all is successful. Create a new test: diff --git a/docs/docs/examples-tutorials/recipes/running-tracetest-with-jaeger.md b/docs/docs/examples-tutorials/recipes/running-tracetest-with-jaeger.md index b0270f7c89..7e3b0bc744 100644 --- a/docs/docs/examples-tutorials/recipes/running-tracetest-with-jaeger.md +++ b/docs/docs/examples-tutorials/recipes/running-tracetest-with-jaeger.md @@ -193,7 +193,6 @@ services: interval: 1s timeout: 3s retries: 60 - ``` Tracetest depends on Postgres, Jaeger and the OpenTelemetry Collector. Both Tracetest and the OpenTelemetry Collector require config files to be loaded via a volume. The volumes are mapped from the root directory into the `tracetest` directory and the respective config files. @@ -228,7 +227,6 @@ telemetry: server: telemetry: exporter: collector - ``` The `tracetest.provision.yaml` file defines the trace data store, set to Jaeger, meaning the traces will be stored in Jaeger and Tracetest will fetch them from Jaeger when running tests. @@ -253,12 +251,11 @@ type: DataStore spec: name: Jaeger type: jaeger - isdefault: true + default: true jaeger: endpoint: jaeger:16685 tls: insecure: true - ``` How do traces reach Jaeger? diff --git a/docs/docs/examples-tutorials/recipes/running-tracetest-with-new-relic.md b/docs/docs/examples-tutorials/recipes/running-tracetest-with-new-relic.md index d034956599..68615c2a34 100644 --- a/docs/docs/examples-tutorials/recipes/running-tracetest-with-new-relic.md +++ b/docs/docs/examples-tutorials/recipes/running-tracetest-with-new-relic.md @@ -112,7 +112,6 @@ services: - "/otel-local-config.yaml" volumes: - ./tracetest/collector.config.yaml:/otel-local-config.yaml - ``` Tracetest depends on both Postgres and the OpenTelemetry Collector. Both Tracetest and the OpenTelemetry Collector require config files to be loaded via a volume. The volumes are mapped from the root directory into the `tracetest` directory and the respective config files. @@ -160,7 +159,7 @@ type: DataStore spec: name: New Relic type: newrelic - isdefault: true + default: true --- type: Demo @@ -173,7 +172,6 @@ spec: productCatalogEndpoint: otel-productcatalogservice:3550 cartEndpoint: otel-cartservice:7070 checkoutEndpoint: otel-checkoutservice:5050 - ``` **How to send traces to Tracetest and New Relic?** diff --git a/docs/docs/examples-tutorials/recipes/running-tracetest-without-a-trace-data-store-with-manual-instrumentation.md b/docs/docs/examples-tutorials/recipes/running-tracetest-without-a-trace-data-store-with-manual-instrumentation.md index 9f863cd8a8..26afabe03d 100644 --- a/docs/docs/examples-tutorials/recipes/running-tracetest-without-a-trace-data-store-with-manual-instrumentation.md +++ b/docs/docs/examples-tutorials/recipes/running-tracetest-without-a-trace-data-store-with-manual-instrumentation.md @@ -19,14 +19,17 @@ You will need [Docker](https://docs.docker.com/get-docker/) and [Docker Compose] The project is built with Docker Compose. It contains two distinct `docker-compose.yaml` files. ### 1. Node.js app + The `docker-compose.yaml` file and `Dockerfile` in the root directory are for the Node.js app. ### 2. Tracetest + The `docker-compose.yaml` file, `collector.config.yaml`, `tracetest-provision.yaml`, and `tracetest.config.yaml` in the `tracetest` directory are for the setting up Tracetest and the OpenTelemetry Collector. The `tracetest` directory is self-contained and will run all the prerequisites for enabling OpenTelemetry traces and trace-based testing with Tracetest. ### Docker Compose Network + All `services` in the `docker-compose.yaml` are on the same network and will be reachable by hostname from within other services. E.g. `tracetest:21321` in the `collector.config.yaml` will map to the `tracetest` service, where the port `21321` is the port where Tracetest accepts traces. ## Node.js app @@ -39,33 +42,40 @@ Traces will be sent to the OpenTelemetry Collector. Here's the content of the `tracing.otel.grpc.js` file: ```js -const opentelemetry = require("@opentelemetry/sdk-node") -const { getNodeAutoInstrumentations } = require("@opentelemetry/auto-instrumentations-node") -const { OTLPTraceExporter } = require('@opentelemetry/exporter-trace-otlp-grpc') -const { Resource } = require("@opentelemetry/resources") -const { SemanticResourceAttributes } = require("@opentelemetry/semantic-conventions") -const { NodeTracerProvider } = require("@opentelemetry/sdk-trace-node") -const { BatchSpanProcessor } = require("@opentelemetry/sdk-trace-base") +const opentelemetry = require("@opentelemetry/sdk-node"); +const { + getNodeAutoInstrumentations, +} = require("@opentelemetry/auto-instrumentations-node"); +const { + OTLPTraceExporter, +} = require("@opentelemetry/exporter-trace-otlp-grpc"); +const { Resource } = require("@opentelemetry/resources"); +const { + SemanticResourceAttributes, +} = require("@opentelemetry/semantic-conventions"); +const { NodeTracerProvider } = require("@opentelemetry/sdk-trace-node"); +const { BatchSpanProcessor } = require("@opentelemetry/sdk-trace-base"); const resource = Resource.default().merge( new Resource({ - [SemanticResourceAttributes.SERVICE_NAME]: "quick-start-nodejs-manual-instrumentation", + [SemanticResourceAttributes.SERVICE_NAME]: + "quick-start-nodejs-manual-instrumentation", [SemanticResourceAttributes.SERVICE_VERSION]: "0.0.1", }) -) +); -const provider = new NodeTracerProvider({ resource: resource }) -const exporter = new OTLPTraceExporter({ url: 'http://otel-collector:4317' }) -const processor = new BatchSpanProcessor(exporter) -provider.addSpanProcessor(processor) -provider.register() +const provider = new NodeTracerProvider({ resource: resource }); +const exporter = new OTLPTraceExporter({ url: "http://otel-collector:4317" }); +const processor = new BatchSpanProcessor(exporter); +provider.addSpanProcessor(processor); +provider.register(); const sdk = new opentelemetry.NodeSDK({ traceExporter: exporter, instrumentations: [getNodeAutoInstrumentations()], - serviceName: 'quick-start-nodejs-manual-instrumentation' -}) -sdk.start() + serviceName: "quick-start-nodejs-manual-instrumentation", +}); +sdk.start(); ``` Depending on which of these you choose, traces will be sent to either the `grpc` or `http` endpoint. @@ -122,7 +132,7 @@ EXPOSE 8080 Instead, the `docker-compose.yaml` contains the `CMD` section for both services. ```yaml -version: '3' +version: "3" services: app: image: quick-start-nodejs @@ -204,7 +214,6 @@ services: - "/otel-local-config.yaml" volumes: - ./tracetest/collector.config.yaml:/otel-local-config.yaml - ``` Tracetest depends on both Postgres and the OpenTelemetry Collector. Both Tracetest and the OpenTelemetry Collector require config files to be loaded via a volume. The volumes are mapped from the root directory into the `tracetest` directory and the respective config files. @@ -245,7 +254,7 @@ type: DataStore spec: name: OpenTelemetry Collector type: otlp - isdefault: true + default: true ``` But how are traces sent to Tracetest? @@ -279,7 +288,6 @@ service: receivers: [otlp] processors: [batch] exporters: [otlp/1] - ``` ## Run both the Node.js app and Tracetest @@ -297,7 +305,7 @@ This will start your Tracetest instance on `http://localhost:11633/`. Go ahead a Here's a sample of a failed test run, which happens if you add this assertion: ```css -attr:books.list.count = 4 +attr: books.list.count = 4; ``` ![assertion](https://res.cloudinary.com/djwdcmwdz/image/upload/v1673808310/screely-1673808287031_sol4it.png) @@ -329,15 +337,15 @@ spec: url: http://app:8080/books method: GET headers: - - key: Content-Type - value: application/json + - key: Content-Type + value: application/json specs: - - selector: span[tracetest.span.type="http" name="GET /books" http.target="/books" http.method="GET"] - assertions: - - attr:http.status_code = 200 - - selector: span[tracetest.span.type="general" name="Books List"] - assertions: - - attr:books.list.count = 4 + - selector: span[tracetest.span.type="http" name="GET /books" http.target="/books" http.method="GET"] + assertions: + - attr:http.status_code = 200 + - selector: span[tracetest.span.type="general" name="Books List"] + assertions: + - attr:books.list.count = 4 ``` This file defines the a test the same way you would through the Web UI. @@ -363,7 +371,7 @@ This test will fail just like the sample above due to the `attr:books.list.count The tests will pass if you change the assertion to: ```css -attr:books.list.count = 3 +attr: books.list.count = 3; ``` Feel free to check out our [docs](https://docs.tracetest.io/), and join our [Discord Community](https://discord.gg/8MtcMrQNbX) for more info! diff --git a/examples/collector/tracetest-provision.yaml b/examples/collector/tracetest-provision.yaml index 326599ee60..59f430d7b0 100644 --- a/examples/collector/tracetest-provision.yaml +++ b/examples/collector/tracetest-provision.yaml @@ -13,4 +13,4 @@ type: DataStore spec: name: OpenTelemetry Collector type: otlp - isdefault: true + default: true diff --git a/examples/observability-driven-development-go-tracetest/bookstore/part1/tracetest/tracetest-provision.yaml b/examples/observability-driven-development-go-tracetest/bookstore/part1/tracetest/tracetest-provision.yaml index 52f0b22785..79a1e1c0c7 100644 --- a/examples/observability-driven-development-go-tracetest/bookstore/part1/tracetest/tracetest-provision.yaml +++ b/examples/observability-driven-development-go-tracetest/bookstore/part1/tracetest/tracetest-provision.yaml @@ -35,7 +35,7 @@ type: DataStore spec: name: Jaeger type: jaeger - isdefault: true + default: true jaeger: endpoint: jaeger:16685 tls: diff --git a/examples/observability-driven-development-go-tracetest/bookstore/part2.1/tracetest/tracetest-provision.yaml b/examples/observability-driven-development-go-tracetest/bookstore/part2.1/tracetest/tracetest-provision.yaml index 52f0b22785..79a1e1c0c7 100644 --- a/examples/observability-driven-development-go-tracetest/bookstore/part2.1/tracetest/tracetest-provision.yaml +++ b/examples/observability-driven-development-go-tracetest/bookstore/part2.1/tracetest/tracetest-provision.yaml @@ -35,7 +35,7 @@ type: DataStore spec: name: Jaeger type: jaeger - isdefault: true + default: true jaeger: endpoint: jaeger:16685 tls: diff --git a/examples/observability-driven-development-go-tracetest/bookstore/part2.2/tracetest/tracetest-provision.yaml b/examples/observability-driven-development-go-tracetest/bookstore/part2.2/tracetest/tracetest-provision.yaml index 52f0b22785..79a1e1c0c7 100644 --- a/examples/observability-driven-development-go-tracetest/bookstore/part2.2/tracetest/tracetest-provision.yaml +++ b/examples/observability-driven-development-go-tracetest/bookstore/part2.2/tracetest/tracetest-provision.yaml @@ -35,7 +35,7 @@ type: DataStore spec: name: Jaeger type: jaeger - isdefault: true + default: true jaeger: endpoint: jaeger:16685 tls: diff --git a/examples/observability-driven-development-go-tracetest/bookstore/part3.1/tracetest/tracetest-provision.yaml b/examples/observability-driven-development-go-tracetest/bookstore/part3.1/tracetest/tracetest-provision.yaml index 52f0b22785..79a1e1c0c7 100644 --- a/examples/observability-driven-development-go-tracetest/bookstore/part3.1/tracetest/tracetest-provision.yaml +++ b/examples/observability-driven-development-go-tracetest/bookstore/part3.1/tracetest/tracetest-provision.yaml @@ -35,7 +35,7 @@ type: DataStore spec: name: Jaeger type: jaeger - isdefault: true + default: true jaeger: endpoint: jaeger:16685 tls: diff --git a/examples/observability-driven-development-go-tracetest/bookstore/part3.2/tracetest/tracetest-provision.yaml b/examples/observability-driven-development-go-tracetest/bookstore/part3.2/tracetest/tracetest-provision.yaml index 52f0b22785..79a1e1c0c7 100644 --- a/examples/observability-driven-development-go-tracetest/bookstore/part3.2/tracetest/tracetest-provision.yaml +++ b/examples/observability-driven-development-go-tracetest/bookstore/part3.2/tracetest/tracetest-provision.yaml @@ -35,7 +35,7 @@ type: DataStore spec: name: Jaeger type: jaeger - isdefault: true + default: true jaeger: endpoint: jaeger:16685 tls: diff --git a/examples/quick-start-datadog-nodejs/tracetest/tracetest-provision.yaml b/examples/quick-start-datadog-nodejs/tracetest/tracetest-provision.yaml index 09a15d390c..c21445ec67 100644 --- a/examples/quick-start-datadog-nodejs/tracetest/tracetest-provision.yaml +++ b/examples/quick-start-datadog-nodejs/tracetest/tracetest-provision.yaml @@ -3,4 +3,4 @@ type: DataStore spec: name: Datadog type: datadog - isdefault: true + default: true diff --git a/examples/quick-start-go/tracetest/tracetest-provision.yaml b/examples/quick-start-go/tracetest/tracetest-provision.yaml index 90c851a62f..2f48f42be6 100644 --- a/examples/quick-start-go/tracetest/tracetest-provision.yaml +++ b/examples/quick-start-go/tracetest/tracetest-provision.yaml @@ -3,4 +3,4 @@ type: DataStore spec: name: OpenTelemetry Collector type: otlp - isdefault: true + default: true diff --git a/examples/quick-start-jaeger-nodejs/tracetest/tracetest-provision.yaml b/examples/quick-start-jaeger-nodejs/tracetest/tracetest-provision.yaml index 9f50f1ef33..bcf9a527e0 100644 --- a/examples/quick-start-jaeger-nodejs/tracetest/tracetest-provision.yaml +++ b/examples/quick-start-jaeger-nodejs/tracetest/tracetest-provision.yaml @@ -13,7 +13,7 @@ type: DataStore spec: name: Jaeger type: jaeger - isdefault: true + default: true jaeger: endpoint: jaeger:16685 tls: diff --git a/examples/quick-start-net-core/tracetest-provision.yaml b/examples/quick-start-net-core/tracetest-provision.yaml index 9f50f1ef33..bcf9a527e0 100644 --- a/examples/quick-start-net-core/tracetest-provision.yaml +++ b/examples/quick-start-net-core/tracetest-provision.yaml @@ -13,7 +13,7 @@ type: DataStore spec: name: Jaeger type: jaeger - isdefault: true + default: true jaeger: endpoint: jaeger:16685 tls: diff --git a/examples/quick-start-nodejs-manual-instrumentation/tracetest/tracetest-provision.yaml b/examples/quick-start-nodejs-manual-instrumentation/tracetest/tracetest-provision.yaml index 326599ee60..59f430d7b0 100644 --- a/examples/quick-start-nodejs-manual-instrumentation/tracetest/tracetest-provision.yaml +++ b/examples/quick-start-nodejs-manual-instrumentation/tracetest/tracetest-provision.yaml @@ -13,4 +13,4 @@ type: DataStore spec: name: OpenTelemetry Collector type: otlp - isdefault: true + default: true diff --git a/examples/quick-start-nodejs/tracetest/tracetest-provision.yaml b/examples/quick-start-nodejs/tracetest/tracetest-provision.yaml index 326599ee60..59f430d7b0 100644 --- a/examples/quick-start-nodejs/tracetest/tracetest-provision.yaml +++ b/examples/quick-start-nodejs/tracetest/tracetest-provision.yaml @@ -13,4 +13,4 @@ type: DataStore spec: name: OpenTelemetry Collector type: otlp - isdefault: true + default: true diff --git a/examples/quick-start-python/tracetest/tracetest-provision.yaml b/examples/quick-start-python/tracetest/tracetest-provision.yaml index 90c851a62f..2f48f42be6 100644 --- a/examples/quick-start-python/tracetest/tracetest-provision.yaml +++ b/examples/quick-start-python/tracetest/tracetest-provision.yaml @@ -3,4 +3,4 @@ type: DataStore spec: name: OpenTelemetry Collector type: otlp - isdefault: true + default: true diff --git a/examples/quick-start-ruby-on-rails/tracetest/tracetest-provision.yaml b/examples/quick-start-ruby-on-rails/tracetest/tracetest-provision.yaml index 326599ee60..59f430d7b0 100644 --- a/examples/quick-start-ruby-on-rails/tracetest/tracetest-provision.yaml +++ b/examples/quick-start-ruby-on-rails/tracetest/tracetest-provision.yaml @@ -13,4 +13,4 @@ type: DataStore spec: name: OpenTelemetry Collector type: otlp - isdefault: true + default: true diff --git a/examples/quick-start-ruby-sinatra/tracetest/tracetest-provision.yaml b/examples/quick-start-ruby-sinatra/tracetest/tracetest-provision.yaml index 326599ee60..59f430d7b0 100644 --- a/examples/quick-start-ruby-sinatra/tracetest/tracetest-provision.yaml +++ b/examples/quick-start-ruby-sinatra/tracetest/tracetest-provision.yaml @@ -13,4 +13,4 @@ type: DataStore spec: name: OpenTelemetry Collector type: otlp - isdefault: true + default: true diff --git a/examples/tracetest-new-relic-otel-demo/tracetest/tracetest-provision.yaml b/examples/tracetest-new-relic-otel-demo/tracetest/tracetest-provision.yaml index 539c50ea46..c8c09cd111 100644 --- a/examples/tracetest-new-relic-otel-demo/tracetest/tracetest-provision.yaml +++ b/examples/tracetest-new-relic-otel-demo/tracetest/tracetest-provision.yaml @@ -3,7 +3,7 @@ type: DataStore spec: name: New Relic type: newrelic - isdefault: true + default: true --- type: Demo diff --git a/examples/tracetest-new-relic/tracetest/tracetest-provision.yaml b/examples/tracetest-new-relic/tracetest/tracetest-provision.yaml index 741bed9f63..0d902699f9 100644 --- a/examples/tracetest-new-relic/tracetest/tracetest-provision.yaml +++ b/examples/tracetest-new-relic/tracetest/tracetest-provision.yaml @@ -3,7 +3,7 @@ type: DataStore spec: name: New Relic type: newrelic - isdefault: true + default: true --- type: Demo diff --git a/examples/tracetest-provisioning-env/tracetest-provision.yaml b/examples/tracetest-provisioning-env/tracetest-provision.yaml index 9f50f1ef33..bcf9a527e0 100644 --- a/examples/tracetest-provisioning-env/tracetest-provision.yaml +++ b/examples/tracetest-provisioning-env/tracetest-provision.yaml @@ -13,7 +13,7 @@ type: DataStore spec: name: Jaeger type: jaeger - isdefault: true + default: true jaeger: endpoint: jaeger:16685 tls: diff --git a/k8s/provisioning.yaml b/k8s/provisioning.yaml index b4315b36fd..f27b91e890 100644 --- a/k8s/provisioning.yaml +++ b/k8s/provisioning.yaml @@ -1,13 +1,4 @@ --- -type: DataStore -spec: - name: Jaeger - type: jaeger - jaeger: - endpoint: jaeger-query.tracetest:16685 - tls: - insecure: true ---- type: Config spec: analyticsEnabled: false @@ -40,3 +31,13 @@ spec: productCatalogEndpoint: http://otel-productcatalogservice.otel-demo:3550 cartEndpoint: http://otel-cartservice.otel-demo:7070 checkoutEndpoint: http://otel-checkoutservice.otel-demo:5050 +--- +type: DataStore +spec: + name: Jaeger + type: jaeger + default: true + jaeger: + endpoint: jaeger-query.tracetest:16685 + tls: + insecure: true diff --git a/local-config/tracetest.provision.yaml b/local-config/tracetest.provision.yaml index 0984669a25..cbc454fb29 100644 --- a/local-config/tracetest.provision.yaml +++ b/local-config/tracetest.provision.yaml @@ -3,7 +3,7 @@ type: DataStore spec: name: OpenTelemetry Collector type: otlp - isdefault: true + default: true --- type: Demo spec: diff --git a/server/app/app.go b/server/app/app.go index a43a4c9f19..e6fdb110aa 100644 --- a/server/app/app.go +++ b/server/app/app.go @@ -29,6 +29,7 @@ import ( "github.com/kubeshop/tracetest/server/subscription" "github.com/kubeshop/tracetest/server/testdb" "github.com/kubeshop/tracetest/server/tracedb" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "github.com/kubeshop/tracetest/server/traces" "github.com/kubeshop/tracetest/server/tracing" "go.opentelemetry.io/otel/trace" @@ -187,12 +188,14 @@ func (app *App) Start(opts ...appOption) error { triggerRegistry := getTriggerRegistry(tracer, applicationTracer) pollingProfileRepo := pollingprofile.NewRepository(db) + dataStoreRepo := datastoreresource.NewRepository(db) eventEmitter := executor.NewEventEmitter(testDB, subscriptionManager) - registerOtlpServer(app, testDB, eventEmitter) + registerOtlpServer(app, testDB, eventEmitter, dataStoreRepo) rf := newRunnerFacades( pollingProfileRepo, + dataStoreRepo, testDB, applicationTracer, tracer, @@ -242,13 +245,7 @@ func (app *App) Start(opts ...appOption) error { demoRepo := demoresource.NewRepository(db) registerDemosResource(demoRepo, apiRouter, db, provisioner) - dataStoreManager := resourcemanager.New[testdb.DataStoreResource]( - testdb.DataStoreResourceName, - testdb.DataStoreResourceNamePlural, - testdb.NewDataStoreResourceProvisioner(testDB), - resourcemanager.WithOperations(resourcemanager.OperationNoop), - ) - provisioner.AddResourceProvisioner(dataStoreManager) + registerDataStoreResource(dataStoreRepo, apiRouter, db, provisioner) registerSPAHandler(router, app.cfg, configFromDB.IsAnalyticsEnabled(), serverID) @@ -286,8 +283,8 @@ func registerSPAHandler(router *mux.Router, cfg httpServerConfig, analyticsEnabl ) } -func registerOtlpServer(app *App, testDB model.Repository, eventEmitter executor.EventEmitter) { - ingester := otlp.NewIngester(testDB, eventEmitter) +func registerOtlpServer(app *App, testDB model.Repository, eventEmitter executor.EventEmitter, dsRepo *datastoreresource.Repository) { + ingester := otlp.NewIngester(testDB, eventEmitter, dsRepo) grpcOtlpServer := otlp.NewGrpcServer(":4317", ingester) httpOtlpServer := otlp.NewHttpServer(":4318", ingester) go grpcOtlpServer.Start() @@ -335,6 +332,17 @@ func registerDemosResource(repository *demoresource.Repository, router *mux.Rout provisioner.AddResourceProvisioner(manager) } +func registerDataStoreResource(repository *datastoreresource.Repository, router *mux.Router, db *sql.DB, provisioner *provisioning.Provisioner) { + manager := resourcemanager.New[datastoreresource.DataStore]( + datastoreresource.ResourceName, + datastoreresource.ResourceNamePlural, + repository, + resourcemanager.WithOperations(datastoreresource.Operations...), + ) + manager.RegisterRoutes(router) + provisioner.AddResourceProvisioner(manager) +} + func getTriggerRegistry(tracer, appTracer trace.Tracer) *trigger.Registry { triggerReg := trigger.NewRegsitry(tracer, appTracer) triggerReg.Add(trigger.HTTP()) diff --git a/server/app/facade.go b/server/app/facade.go index 2589b3a8ae..16c4326eeb 100644 --- a/server/app/facade.go +++ b/server/app/facade.go @@ -10,6 +10,7 @@ import ( "github.com/kubeshop/tracetest/server/pkg/id" "github.com/kubeshop/tracetest/server/subscription" "github.com/kubeshop/tracetest/server/tracedb" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "go.opentelemetry.io/otel/trace" ) @@ -47,6 +48,7 @@ func (rf runnerFacade) RunAssertions(ctx context.Context, request executor.Asser func newRunnerFacades( ppRepo *pollingprofile.Repository, + dsRepo *datastoreresource.Repository, testDB model.Repository, appTracer trace.Tracer, tracer trace.Tracer, @@ -72,7 +74,7 @@ func newRunnerFacades( tracer, execTestUpdater, tracedb.Factory(testDB), - testDB, + dsRepo, eventEmitter, ) @@ -93,7 +95,7 @@ func newRunnerFacades( tracer, subscriptionManager, tracedb.Factory(testDB), - testDB, + dsRepo, eventEmitter, ) diff --git a/server/executor/poller_executor.go b/server/executor/poller_executor.go index a68ecd28e8..7fee6406c2 100644 --- a/server/executor/poller_executor.go +++ b/server/executor/poller_executor.go @@ -7,18 +7,20 @@ import ( "github.com/kubeshop/tracetest/server/model" "github.com/kubeshop/tracetest/server/model/events" + "github.com/kubeshop/tracetest/server/resourcemanager" "github.com/kubeshop/tracetest/server/tracedb" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/trace" ) -type traceDBFactoryFn func(ds model.DataStore) (tracedb.TraceDB, error) +type traceDBFactoryFn func(ds datastoreresource.DataStore) (tracedb.TraceDB, error) type DefaultPollerExecutor struct { ppGetter PollingProfileGetter updater RunUpdater newTraceDBFn traceDBFactoryFn - dsRepo model.DataStoreRepository + dsRepo resourcemanager.Current[datastoreresource.DataStore] eventEmitter EventEmitter } @@ -64,7 +66,7 @@ func NewPollerExecutor( tracer trace.Tracer, updater RunUpdater, newTraceDBFn traceDBFactoryFn, - dsRepo model.DataStoreRepository, + dsRepo resourcemanager.Current[datastoreresource.DataStore], eventEmitter EventEmitter, ) PollerExecutor { @@ -83,7 +85,7 @@ func NewPollerExecutor( } func (pe DefaultPollerExecutor) traceDB(ctx context.Context) (tracedb.TraceDB, error) { - ds, err := pe.dsRepo.DefaultDataStore(ctx) + ds, err := pe.dsRepo.Current(ctx) if err != nil { return nil, fmt.Errorf("cannot get default datastore: %w", err) } diff --git a/server/executor/poller_executor_test.go b/server/executor/poller_executor_test.go index 030916e45c..801432a7d4 100644 --- a/server/executor/poller_executor_test.go +++ b/server/executor/poller_executor_test.go @@ -14,6 +14,7 @@ import ( "github.com/kubeshop/tracetest/server/testdb" "github.com/kubeshop/tracetest/server/tracedb" "github.com/kubeshop/tracetest/server/tracedb/connection" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "github.com/kubeshop/tracetest/server/tracing" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" @@ -507,7 +508,8 @@ func (dpc defaultProfileGetter) GetDefault(context.Context) pollingprofile.Polli func getPollerExecutorWithMocks(t *testing.T, retryDelay, maxWaitTimeForTrace time.Duration, tracePerIteration []model.Trace) executor.PollerExecutor { updater := getRunUpdaterMock(t) tracer := getTracerMock(t) - testDB := getDataStoreRepositoryMock(t) + testDB := getRunRepositoryMock(t) + dataStoreRepo := getDataStoreRepositoryMock(t) traceDBFactory := getTraceDBMockFactory(t, tracePerIteration, &traceDBState{}) eventEmitter := getEventEmitterMock(t, testDB) @@ -516,7 +518,7 @@ func getPollerExecutorWithMocks(t *testing.T, retryDelay, maxWaitTimeForTrace ti tracer, updater, traceDBFactory, - testDB, + dataStoreRepo, eventEmitter, ) } @@ -532,18 +534,27 @@ func getRunUpdaterMock(t *testing.T) executor.RunUpdater { return runUpdaterMock{} } -// DataStoreRepository -func getDataStoreRepositoryMock(t *testing.T) model.Repository { +// RunRepository +func getRunRepositoryMock(t *testing.T) model.Repository { t.Helper() testDB := testdb.MockRepository{} - - testDB.Mock.On("DefaultDataStore", mock.Anything).Return(model.DataStore{Type: model.DataStoreTypeOTLP}, nil) testDB.Mock.On("CreateTestRunEvent", mock.Anything).Return(noError) return &testDB } +// DataStoreRepository +type dataStoreRepositoryMock struct{} + +func (m *dataStoreRepositoryMock) Current(ctx context.Context) (datastoreresource.DataStore, error) { + return datastoreresource.DataStore{Type: datastoreresource.DataStoreTypeOTLP}, nil +} + +func getDataStoreRepositoryMock(t *testing.T) *dataStoreRepositoryMock { + return &dataStoreRepositoryMock{} +} + // EventEmitter func getEventEmitterMock(t *testing.T, db model.Repository) executor.EventEmitter { t.Helper() @@ -598,10 +609,10 @@ type traceDBState struct { currentIteration int } -func getTraceDBMockFactory(t *testing.T, tracePerIteration []model.Trace, state *traceDBState) func(model.DataStore) (tracedb.TraceDB, error) { +func getTraceDBMockFactory(t *testing.T, tracePerIteration []model.Trace, state *traceDBState) func(datastoreresource.DataStore) (tracedb.TraceDB, error) { t.Helper() - return func(ds model.DataStore) (tracedb.TraceDB, error) { + return func(ds datastoreresource.DataStore) (tracedb.TraceDB, error) { return &traceDBMock{ tracePerIteration: tracePerIteration, state: state, diff --git a/server/executor/runner.go b/server/executor/runner.go index f47a208034..92dfd52a50 100644 --- a/server/executor/runner.go +++ b/server/executor/runner.go @@ -13,8 +13,10 @@ import ( "github.com/kubeshop/tracetest/server/expression" "github.com/kubeshop/tracetest/server/model" "github.com/kubeshop/tracetest/server/model/events" + "github.com/kubeshop/tracetest/server/resourcemanager" "github.com/kubeshop/tracetest/server/subscription" "github.com/kubeshop/tracetest/server/tracedb" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "go.opentelemetry.io/otel" "go.opentelemetry.io/otel/propagation" "go.opentelemetry.io/otel/trace" @@ -42,7 +44,7 @@ func NewPersistentRunner( tracer trace.Tracer, subscriptionManager *subscription.Manager, newTraceDBFn traceDBFactoryFn, - dsRepo model.DataStoreRepository, + dsRepo resourcemanager.Current[datastoreresource.DataStore], eventEmitter EventEmitter, ) PersistentRunner { return persistentRunner{ @@ -68,7 +70,7 @@ type persistentRunner struct { tracer trace.Tracer subscriptionManager *subscription.Manager newTraceDBFn traceDBFactoryFn - dsRepo model.DataStoreRepository + dsRepo resourcemanager.Current[datastoreresource.DataStore] eventEmitter EventEmitter executeQueue chan execReq @@ -161,7 +163,7 @@ func (r persistentRunner) Run(ctx context.Context, test model.Test, metadata mod } func (r persistentRunner) traceDB(ctx context.Context) (tracedb.TraceDB, error) { - ds, err := r.dsRepo.DefaultDataStore(ctx) + ds, err := r.dsRepo.Current(ctx) if err != nil { return nil, fmt.Errorf("cannot get default datastore: %w", err) } diff --git a/server/executor/runner_test.go b/server/executor/runner_test.go index 46aa3ed3cf..705483ade1 100644 --- a/server/executor/runner_test.go +++ b/server/executor/runner_test.go @@ -141,16 +141,26 @@ func runnerSetup(t *testing.T) runnerFixture { mtp.t = t tracer, _ := tracing.NewTracer(context.Background(), config.Must(config.New())) - testDB := testdb.MockRepository{} - testDB.Mock.On("DefaultDataStore", mock.Anything).Return(model.DataStore{Type: model.DataStoreTypeOTLP}, nil) + testDB := testdb.MockRepository{} testDB.Mock.On("CreateTestRunEvent", mock.Anything).Return(noError) eventEmitter := executor.NewEventEmitter(&testDB, subscription.NewManager()) + persistentRunner := executor.NewPersistentRunner( + reg, + db, + executor.NewDBUpdater(db), + mtp, + tracer, + subscription.NewManager(), + tracedb.Factory(&testDB), + getDataStoreRepositoryMock(t), + eventEmitter) + mtp.Test(t) return runnerFixture{ - runner: executor.NewPersistentRunner(reg, db, executor.NewDBUpdater(db), mtp, tracer, subscription.NewManager(), tracedb.Factory(&testDB), &testDB, eventEmitter), + runner: persistentRunner, mockExecutor: me, mockDB: db, mockTracePoller: mtp, diff --git a/server/http/controller.go b/server/http/controller.go index beeaedf032..241608850f 100644 --- a/server/http/controller.go +++ b/server/http/controller.go @@ -22,6 +22,7 @@ import ( "github.com/kubeshop/tracetest/server/pkg/id" "github.com/kubeshop/tracetest/server/testdb" "github.com/kubeshop/tracetest/server/tracedb" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "go.opentelemetry.io/otel/trace" ) @@ -31,7 +32,7 @@ type controller struct { tracer trace.Tracer testDB model.Repository runner runner - newTraceDBFn func(ds model.DataStore) (tracedb.TraceDB, error) + newTraceDBFn func(ds datastoreresource.DataStore) (tracedb.TraceDB, error) mappers mappings.Mappings triggerRegistry *trigger.Registry } @@ -45,7 +46,7 @@ type runner interface { func NewController( testDB model.Repository, - newTraceDBFn func(ds model.DataStore) (tracedb.TraceDB, error), + newTraceDBFn func(ds datastoreresource.DataStore) (tracedb.TraceDB, error), runner runner, mappers mappings.Mappings, triggerRegistry *trigger.Registry, @@ -1137,106 +1138,7 @@ func takeResources(transactions []openapi.Transaction, tests []openapi.Test, tak return items[skip:upperLimit] } -// DataStores - -func (c *controller) CreateDataStore(ctx context.Context, in openapi.DataStore) (openapi.ImplResponse, error) { - dataStore := c.mappers.In.DataStore(in) - - if dataStore.ID != "" { - exists, err := c.testDB.DataStoreIDExists(ctx, dataStore.ID) - if err != nil { - return handleDBError(err), err - } - - if exists { - err := fmt.Errorf(`cannot create data store with ID "%s: %w`, dataStore.ID, errTestExists) - r := map[string]string{ - "error": err.Error(), - } - return openapi.Response(http.StatusBadRequest, r), err - } - } - - dataStore, err := c.testDB.CreateDataStore(ctx, dataStore) - if err != nil { - return openapi.Response(http.StatusInternalServerError, err.Error()), err - } - - return openapi.Response(200, c.mappers.Out.DataStore(dataStore)), nil -} - -func (c *controller) DeleteDataStore(ctx context.Context, dataStoreId string) (openapi.ImplResponse, error) { - dataStore, err := c.testDB.GetDataStore(ctx, dataStoreId) - if err != nil { - return handleDBError(err), err - } - - err = c.testDB.DeleteDataStore(ctx, dataStore) - if err != nil { - return handleDBError(err), err - } - - return openapi.Response(204, nil), nil -} - -func (c *controller) GetDataStore(ctx context.Context, dataStoreId string) (openapi.ImplResponse, error) { - dataStore, err := c.testDB.GetDataStore(ctx, dataStoreId) - if err != nil { - return handleDBError(err), err - } - - return openapi.Response(200, c.mappers.Out.DataStore(dataStore)), nil -} - -func (c *controller) GetDataStores(ctx context.Context, take, skip int32, query string, sortBy string, sortDirection string) (openapi.ImplResponse, error) { - if take == 0 { - take = 20 - } - - dataStores, err := c.testDB.GetDataStores(ctx, take, skip, query, sortBy, sortDirection) - if err != nil { - return handleDBError(err), err - } - - return openapi.Response(200, paginated[openapi.DataStore]{ - items: c.mappers.Out.DataStores(dataStores.Items), - count: dataStores.TotalCount, - }), nil -} - -func (c *controller) UpdateDataStore(ctx context.Context, dataStoreId string, in openapi.DataStore) (openapi.ImplResponse, error) { - updated := c.mappers.In.DataStore(in) - - dataStore, err := c.testDB.GetDataStore(ctx, dataStoreId) - if err != nil { - return handleDBError(err), err - } - - updated.ID = dataStore.ID - - _, err = c.testDB.UpdateDataStore(ctx, updated) - if err != nil { - return handleDBError(err), err - } - - return openapi.Response(204, nil), nil -} - -func (c *controller) GetDataStoreDefinitionFile(ctx context.Context, dataStoreID string) (openapi.ImplResponse, error) { - dataStore, err := c.testDB.GetDataStore(ctx, dataStoreID) - if err != nil { - return handleDBError(err), err - } - - enc, err := yaml.Encode(yamlconvert.DataStore(dataStore)) - if err != nil { - return openapi.Response(http.StatusUnprocessableEntity, err.Error()), err - } - - return openapi.Response(200, enc), nil -} - -// TestConnection implements openapi.ApiApiServicer +// TestConnection implements openapi.ApiApiService func (c *controller) TestConnection(ctx context.Context, dataStore openapi.DataStore) (openapi.ImplResponse, error) { ds := c.mappers.In.DataStore(dataStore) diff --git a/server/http/custom_routes.go b/server/http/custom_routes.go index 84ba038cdd..18eb0763b7 100644 --- a/server/http/custom_routes.go +++ b/server/http/custom_routes.go @@ -38,7 +38,6 @@ func (c *customController) Routes() openapi.Routes { routes[c.getRouteIndex("GetTestVersionDefinitionFile")].HandlerFunc = c.GetTestVersionDefinitionFile routes[c.getRouteIndex("GetTransactionVersionDefinitionFile")].HandlerFunc = c.GetTransactionVersionDefinitionFile routes[c.getRouteIndex("GetEnvironmentDefinitionFile")].HandlerFunc = c.GetEnvironmentDefinitionFile - routes[c.getRouteIndex("GetDataStoreDefinitionFile")].HandlerFunc = c.GetDataStoreDefinitionFile routes[c.getRouteIndex("GetTestRuns")].HandlerFunc = c.GetTestRuns @@ -46,7 +45,6 @@ func (c *customController) Routes() openapi.Routes { routes[c.getRouteIndex("GetEnvironments")].HandlerFunc = paginatedEndpoint[openapi.Environment](c.service.GetEnvironments, c.errorHandler) routes[c.getRouteIndex("GetTransactions")].HandlerFunc = paginatedEndpoint[openapi.Transaction](c.service.GetTransactions, c.errorHandler) routes[c.getRouteIndex("GetResources")].HandlerFunc = paginatedEndpoint[openapi.Resource](c.service.GetResources, c.errorHandler) - routes[c.getRouteIndex("GetDataStores")].HandlerFunc = paginatedEndpoint[openapi.DataStore](c.service.GetDataStores, c.errorHandler) for index, route := range routes { routeName := fmt.Sprintf("%s %s", route.Method, route.Pattern) @@ -168,20 +166,6 @@ func (c *customController) GetEnvironmentDefinitionFile(w http.ResponseWriter, r w.Write(result.Body.([]byte)) } -func (c *customController) GetDataStoreDefinitionFile(w http.ResponseWriter, r *http.Request) { - params := mux.Vars(r) - dataStoreIdParam := params["dataStoreId"] - - result, err := c.service.GetDataStoreDefinitionFile(r.Context(), dataStoreIdParam) - // If an error occurred, encode the error with the status code - if err != nil { - c.errorHandler(w, r, err, &result) - return - } - w.Header().Set("Content-Type", "application/yaml; charset=UTF-8") - w.Write(result.Body.([]byte)) -} - func paginatedEndpoint[T any]( f func(c context.Context, take, skip int32, query string, sortBy string, sortDirection string) (openapi.ImplResponse, error), errorHandler openapi.ErrorHandler, diff --git a/server/http/mappings/data_store.go b/server/http/mappings/data_store.go deleted file mode 100644 index f924110f6e..0000000000 --- a/server/http/mappings/data_store.go +++ /dev/null @@ -1,127 +0,0 @@ -package mappings - -import ( - "fmt" - - "github.com/fluidtruck/deepcopy" - "github.com/kubeshop/tracetest/server/model" - "github.com/kubeshop/tracetest/server/openapi" -) - -var dataStoreTypesMapping = map[model.DataStoreType]openapi.SupportedDataStores{ - model.DataStoreTypeJaeger: openapi.JAEGER, - model.DataStoreTypeTempo: openapi.TEMPO, - model.DataStoreTypeOpenSearch: openapi.OPEN_SEARCH, - model.DataStoreTypeSignalFX: openapi.SIGNAL_FX, - model.DataStoreTypeOTLP: openapi.OTLP, - model.DataStoreTypeNewRelic: openapi.NEW_RELIC, - model.DataStoreTypeLighStep: openapi.LIGHTSTEP, - model.DataStoreTypeElasticAPM: openapi.ELASTIC_APM, - model.DataStoreTypeDataDog: openapi.DATADOG, - model.DataStoreTypeAwsXRay: openapi.AWSXRAY, -} - -func (m OpenAPI) DataStoreType(in model.DataStoreType) openapi.SupportedDataStores { - dsd, exists := dataStoreTypesMapping[in] - if !exists { - // this should only happen during development, - // so it's more an alert for devs than actual error handling - panic(fmt.Errorf("trying to convert an undefined model.DataStoreType '%s'", in)) - } - return dsd -} - -func (m OpenAPI) DataStore(in model.DataStore) openapi.DataStore { - dataStore := openapi.DataStore{ - Id: in.ID, - Name: in.Name, - Type: m.DataStoreType(in.Type), - IsDefault: in.IsDefault, - Jaeger: openapi.GrpcClientSettings{}, - Tempo: openapi.BaseClient{}, - OpenSearch: openapi.ElasticSearch{}, - ElasticApm: openapi.ElasticSearch{}, - SignalFx: openapi.SignalFx{}, - Awsxray: openapi.AwsXRay{}, - CreatedAt: in.CreatedAt, - } - - if in.Values.Jaeger != nil { - deepcopy.DeepCopy(in.Values.Jaeger, &dataStore.Jaeger) - deepcopy.DeepCopy(in.Values.Jaeger.TLSSetting, &dataStore.Jaeger.Tls) - deepcopy.DeepCopy(in.Values.Jaeger.TLSSetting.TLSSetting, &dataStore.Jaeger.Tls.Settings) - } - - if in.Values.Tempo != nil { - deepcopy.DeepCopy(in.Values.Tempo, &dataStore.Tempo) - deepcopy.DeepCopy(in.Values.Tempo.Grpc.TLSSetting, &dataStore.Tempo.Grpc.Tls) - deepcopy.DeepCopy(in.Values.Tempo.Grpc.TLSSetting.TLSSetting, &dataStore.Tempo.Grpc.Tls.Settings) - deepcopy.DeepCopy(in.Values.Tempo.Http.TLSSetting, &dataStore.Tempo.Http.Tls) - deepcopy.DeepCopy(in.Values.Tempo.Http.TLSSetting, &dataStore.Tempo.Http.Tls.Settings) - } - - if in.Values.OpenSearch != nil { - deepcopy.DeepCopy(in.Values.OpenSearch, &dataStore.OpenSearch) - } - if in.Values.ElasticApm != nil { - deepcopy.DeepCopy(in.Values.ElasticApm, &dataStore.ElasticApm) - } - if in.Values.SignalFx != nil { - deepcopy.DeepCopy(in.Values.SignalFx, &dataStore.SignalFx) - } - - if in.Values.AwsXRay != nil { - deepcopy.DeepCopy(in.Values.AwsXRay, &dataStore.Awsxray) - } - - return dataStore -} - -func (m OpenAPI) DataStores(in []model.DataStore) []openapi.DataStore { - dataStores := make([]openapi.DataStore, len(in)) - for i, t := range in { - dataStores[i] = m.DataStore(t) - } - - return dataStores -} - -func (m Model) DataStoreType(in openapi.SupportedDataStores) model.DataStoreType { - for k, v := range dataStoreTypesMapping { - if v == in { - return k - } - } - - // this should only happen during development, - // so it's more an alert for devs than actual error handling - panic(fmt.Errorf("trying to convert an undefined model.DataStoreType '%s'", in)) - -} - -func (m Model) DataStore(in openapi.DataStore) model.DataStore { - dataStore := model.DataStore{ - ID: in.Id, - Name: in.Name, - Type: m.DataStoreType(in.Type), - IsDefault: in.IsDefault, - CreatedAt: in.CreatedAt, - } - - deepcopy.DeepCopy(in.Jaeger, &dataStore.Values.Jaeger) - deepcopy.DeepCopy(in.Jaeger.Tls, &dataStore.Values.Jaeger.TLSSetting) - deepcopy.DeepCopy(in.Jaeger.Tls.Settings, &dataStore.Values.Jaeger.TLSSetting.TLSSetting) - - deepcopy.DeepCopy(in.Tempo, &dataStore.Values.Tempo) - deepcopy.DeepCopy(in.Tempo.Grpc.Tls, &dataStore.Values.Tempo.Grpc.TLSSetting) - deepcopy.DeepCopy(in.Tempo.Grpc.Tls.Settings, &dataStore.Values.Tempo.Grpc.TLSSetting.TLSSetting) - deepcopy.DeepCopy(in.Tempo.Http.Tls, &dataStore.Values.Tempo.Http.TLSSetting) - deepcopy.DeepCopy(in.Tempo.Http.Tls.Settings, &dataStore.Values.Tempo.Grpc.TLSSetting.TLSSetting) - - deepcopy.DeepCopy(in.OpenSearch, &dataStore.Values.OpenSearch) - deepcopy.DeepCopy(in.ElasticApm, &dataStore.Values.ElasticApm) - deepcopy.DeepCopy(in.SignalFx, &dataStore.Values.SignalFx) - deepcopy.DeepCopy(in.Awsxray, &dataStore.Values.AwsXRay) - - return dataStore -} diff --git a/server/http/mappings/datastore.go b/server/http/mappings/datastore.go index f4f5770dbf..e162d859e9 100644 --- a/server/http/mappings/datastore.go +++ b/server/http/mappings/datastore.go @@ -1,8 +1,13 @@ package mappings import ( + "fmt" + "time" + + "github.com/fluidtruck/deepcopy" "github.com/kubeshop/tracetest/server/model" "github.com/kubeshop/tracetest/server/openapi" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" ) func (m *OpenAPI) ConnectionTestResult(in model.ConnectionResult) openapi.ConnectionResult { @@ -40,3 +45,102 @@ func (m *OpenAPI) ConnectionTestStep(in model.ConnectionTestStep) openapi.Connec Error: errMessage, } } + +var dataStoreTypesMapping = map[datastoreresource.DataStoreType]openapi.SupportedDataStores{ + datastoreresource.DataStoreTypeJaeger: openapi.JAEGER, + datastoreresource.DataStoreTypeTempo: openapi.TEMPO, + datastoreresource.DataStoreTypeOpenSearch: openapi.OPEN_SEARCH, + datastoreresource.DataStoreTypeSignalFX: openapi.SIGNAL_FX, + datastoreresource.DataStoreTypeOTLP: openapi.OTLP, + datastoreresource.DataStoreTypeNewRelic: openapi.NEW_RELIC, + datastoreresource.DataStoreTypeLighStep: openapi.LIGHTSTEP, + datastoreresource.DataStoreTypeElasticAPM: openapi.ELASTIC_APM, + datastoreresource.DataStoreTypeDataDog: openapi.DATADOG, + datastoreresource.DataStoreTypeAwsXRay: openapi.AWSXRAY, +} + +func (m OpenAPI) DataStoreType(in datastoreresource.DataStoreType) openapi.SupportedDataStores { + dsd, exists := dataStoreTypesMapping[in] + if !exists { + // this should only happen during development, + // so it's more an alert for devs than actual error handling + panic(fmt.Errorf("trying to convert an undefined datastoreresource.DataStoreType '%s'", in)) + } + return dsd +} + +func (m Model) DataStoreType(in openapi.SupportedDataStores) datastoreresource.DataStoreType { + for k, v := range dataStoreTypesMapping { + if v == in { + return k + } + } + + // this should only happen during development, + // so it's more an alert for devs than actual error handling + panic(fmt.Errorf("trying to convert an undefined datastoreresource.DataStoreType '%s'", in)) + +} + +func (m Model) DataStore(in openapi.DataStore) datastoreresource.DataStore { + dataStore := datastoreresource.DataStore{ + ID: "current", + Name: in.Name, + Type: m.DataStoreType(in.Type), + Default: in.Default, + CreatedAt: in.CreatedAt.Format(time.RFC3339Nano), + Values: datastoreresource.DataStoreValues{}, + } + + // Jaeger + if dataStore.Type == datastoreresource.DataStoreTypeJaeger { + dataStore.Values.Jaeger = &datastoreresource.GRPCClientSettings{ + TLS: &datastoreresource.TLS{}, + } + + deepcopy.DeepCopy(in.Jaeger, &dataStore.Values.Jaeger) + deepcopy.DeepCopy(in.Jaeger.Tls, &dataStore.Values.Jaeger.TLS) + } + + // Tempo + if dataStore.Type == datastoreresource.DataStoreTypeTempo { + dataStore.Values.Tempo = &datastoreresource.MultiChannelClientConfig{ + Grpc: &datastoreresource.GRPCClientSettings{ + TLS: &datastoreresource.TLS{}, + }, + Http: &datastoreresource.HttpClientConfig{ + TLS: &datastoreresource.TLS{}, + }, + } + + deepcopy.DeepCopy(in.Tempo, &dataStore.Values.Tempo) + deepcopy.DeepCopy(in.Tempo.Grpc.Tls, &dataStore.Values.Tempo.Grpc.TLS) + deepcopy.DeepCopy(in.Tempo.Http.Tls, &dataStore.Values.Tempo.Http.TLS) + } + + // AWS XRay + if dataStore.Type == datastoreresource.DataStoreTypeAwsXRay { + dataStore.Values.AwsXRay = &datastoreresource.AWSXRayConfig{} + deepcopy.DeepCopy(in.Awsxray, &dataStore.Values.AwsXRay) + } + + // OpenSearch + if dataStore.Type == datastoreresource.DataStoreTypeOpenSearch { + dataStore.Values.OpenSearch = &datastoreresource.ElasticSearchConfig{} + deepcopy.DeepCopy(in.OpenSearch, &dataStore.Values.OpenSearch) + } + + // ElasticAPM + if dataStore.Type == datastoreresource.DataStoreTypeElasticAPM { + dataStore.Values.OpenSearch = &datastoreresource.ElasticSearchConfig{} + deepcopy.DeepCopy(in.OpenSearch, &dataStore.Values.ElasticApm) + } + + // SignalFX + if dataStore.Type == datastoreresource.DataStoreTypeSignalFX { + dataStore.Values.SignalFx = &datastoreresource.SignalFXConfig{} + deepcopy.DeepCopy(in.SignalFx, &dataStore.Values.SignalFx) + } + + return dataStore +} diff --git a/server/model/data_stores.go b/server/model/data_stores.go deleted file mode 100644 index 82d3902b9f..0000000000 --- a/server/model/data_stores.go +++ /dev/null @@ -1,123 +0,0 @@ -package model - -import ( - "fmt" - "time" - - "go.opentelemetry.io/collector/config/configgrpc" - "go.opentelemetry.io/collector/config/configtls" - "golang.org/x/exp/slices" -) - -type ( - DataStore struct { - ID string - Name string - Type DataStoreType - IsDefault bool - Values DataStoreValues - CreatedAt time.Time - } - - DataStoreValues struct { - Jaeger *configgrpc.GRPCClientSettings - Tempo *BaseClientConfig - OpenSearch *ElasticSearchDataStoreConfig - ElasticApm *ElasticSearchDataStoreConfig - SignalFx *SignalFXDataStoreConfig - AwsXRay *AWSXRayDataStoreConfig - } - - BaseClientConfig struct { - Type string - Grpc configgrpc.GRPCClientSettings - Http HttpClientConfig - } - - HttpClientConfig struct { - Url string - Headers map[string]string - TLSSetting configtls.TLSClientSetting `mapstructure:"tls"` - } - - OTELCollectorConfig struct { - Endpoint string - } - - ElasticSearchDataStoreConfig struct { - Addresses []string - Username string - Password string - Index string - Certificate string - InsecureSkipVerify bool - } - - SignalFXDataStoreConfig struct { - Realm string - Token string - } - - AWSXRayDataStoreConfig struct { - Region string - AccessKeyID string - SecretAccessKey string - SessionToken string - UseDefaultAuth bool - } -) - -func (ds DataStore) IsZero() bool { - return ds.Type == "" -} - -type DataStoreType string - -const ( - DataStoreTypeJaeger DataStoreType = "jaeger" - DataStoreTypeTempo DataStoreType = "tempo" - DataStoreTypeOpenSearch DataStoreType = "opensearch" - DataStoreTypeSignalFX DataStoreType = "signalfx" - DataStoreTypeOTLP DataStoreType = "otlp" - DataStoreTypeNewRelic DataStoreType = "newrelic" - DataStoreTypeLighStep DataStoreType = "lightstep" - DataStoreTypeElasticAPM DataStoreType = "elasticapm" - DataStoreTypeDataDog DataStoreType = "datadog" - DataStoreTypeAwsXRay DataStoreType = "awsxray" -) - -var validTypes = []DataStoreType{ - DataStoreTypeJaeger, - DataStoreTypeTempo, - DataStoreTypeOpenSearch, - DataStoreTypeSignalFX, - DataStoreTypeOTLP, - DataStoreTypeNewRelic, - DataStoreTypeLighStep, - DataStoreTypeElasticAPM, - DataStoreTypeDataDog, - DataStoreTypeAwsXRay, -} - -var otlpBasedDataStores = []DataStoreType{ - DataStoreTypeOTLP, - DataStoreTypeNewRelic, - DataStoreTypeLighStep, - DataStoreTypeDataDog, -} - -func (ds DataStore) HasID() bool { - return ds.ID != "" -} - -func (ds DataStore) Validate() error { - if !slices.Contains(validTypes, ds.Type) { - return fmt.Errorf("unsupported data store") - } - - return nil -} - -func (ds DataStore) IsOTLPBasedProvider() bool { - return slices.Contains(otlpBasedDataStores, ds.Type) -} diff --git a/server/model/repository.go b/server/model/repository.go index 97247bf9a7..53b0c661d3 100644 --- a/server/model/repository.go +++ b/server/model/repository.go @@ -60,16 +60,6 @@ type TransactionRunRepository interface { GetLatestRunByTransactionVersion(context.Context, id.ID, int) (TransactionRun, error) } -type DataStoreRepository interface { - CreateDataStore(context.Context, DataStore) (DataStore, error) - UpdateDataStore(context.Context, DataStore) (DataStore, error) - DeleteDataStore(context.Context, DataStore) error - DefaultDataStore(_ context.Context) (DataStore, error) - GetDataStore(_ context.Context, id string) (DataStore, error) - GetDataStores(_ context.Context, take, skip int32, query, sortBy, sortDirection string) (List[DataStore], error) - DataStoreIDExists(context.Context, string) (bool, error) -} - type TestRunEventRepository interface { CreateTestRunEvent(context.Context, TestRunEvent) error GetTestRunEvents(context.Context, id.ID, int) ([]TestRunEvent, error) @@ -83,8 +73,6 @@ type Repository interface { TransactionRepository TransactionRunRepository - DataStoreRepository - TestRunEventRepository ServerID() (id string, isNew bool, _ error) diff --git a/server/model/yaml/datastore.go b/server/model/yaml/datastore.go deleted file mode 100644 index b6fbd1ebd4..0000000000 --- a/server/model/yaml/datastore.go +++ /dev/null @@ -1,72 +0,0 @@ -package yaml - -type DataStore struct { - Id string `mapstructure:"id"` - Name string `mapstructure:"name"` - Type string `mapstructure:"type"` - IsDefault bool `mapstructure:"isDefault"` - Jaeger GRPCClientSettings `mapstructure:"jaeger"` - Tempo BaseClientConfig `mapstructure:"tempo"` - OpenSearch OpenSearch `mapstructure:"openSearch"` - SignalFx SignalFX `mapstructure:"signalFx"` - AwsXRay AwsXRay `mapstructure:"awsxray"` -} - -type GRPCClientSettings struct { - Endpoint string `mapstructure:"endpoint"` - ReadBufferSize float32 `mapstructure:"readBufferSize"` - WriteBufferSize float32 `mapstructure:"writeBufferSize"` - WaitForReady bool `mapstructure:"waitForReady"` - Headers map[string]string `mapstructure:"headers"` - BalancerName string `mapstructure:"balancerName"` - Compression string `mapstructure:"compression"` - Tls TLS `mapstructure:"tls"` - Auth HTTPAuthentication `mapstructure:"auth"` -} - -type BaseClientConfig struct { - Type string `mapstructure:"type"` - Grpc GRPCClientSettings `mapstructure:"grpc"` - Http HttpClientConfig `mapstructure:"http"` -} - -type HttpClientConfig struct { - Url string `yaml:",omitempty" mapstructure:"url"` - Headers map[string]string `yaml:",omitempty" mapstructure:"headers"` - Tls TLS `yaml:",omitempty" mapstructure:"tls"` -} - -type TLS struct { - Insecure bool `mapstructure:"insecure"` - InsecureSkipVerify bool `mapstructure:"insecureSkipVerify"` - ServerName string `mapstructure:"serverName"` - Settings TLSSetting `mapstructure:"settings"` -} - -type TLSSetting struct { - CAFile string `mapstructure:"cAFile"` - CertFile string `mapstructure:"certFile"` - KeyFile string `mapstructure:"keyFile"` - MinVersion string `mapstructure:"minVersion"` - MaxVersion string `mapstructure:"maxVersion"` -} - -type SignalFX struct { - Realm string `mapstructure:"realm"` - Token string `mapstructure:"token"` -} - -type AwsXRay struct { - Region string `mapstructure:"region"` - AccessKeyId string `mapstructure:"accessKeyId"` - SecretAccessKey string `mapstructure:"secretAccessKey"` - SessionToken string `mapstructure:"sessionToken"` - UseDefaultAuth bool `mapstructure:"useDefaultAuth"` -} - -type OpenSearch struct { - Addresses []string `mapstructure:"addresses"` - Username string `mapstructure:"username"` - Password string `mapstructure:"password"` - Index string `mapstructure:"index"` -} diff --git a/server/model/yaml/file.go b/server/model/yaml/file.go index 864416a8d1..b7f25b3f78 100644 --- a/server/model/yaml/file.go +++ b/server/model/yaml/file.go @@ -20,6 +20,7 @@ const ( FileTypeEnvironment FileType = "Environment" FileTypeDataStore FileType = "DataStore" FileTypeConfig FileType = "Config" + FileTypeDemo FileType = "Demo" FileTypePollingProfile FileType = "PollingProfile" ) diff --git a/server/model/yaml/yamlconvert/datastore.go b/server/model/yaml/yamlconvert/datastore.go deleted file mode 100644 index 81107e701d..0000000000 --- a/server/model/yaml/yamlconvert/datastore.go +++ /dev/null @@ -1,36 +0,0 @@ -package yamlconvert - -import ( - "github.com/fluidtruck/deepcopy" - "github.com/kubeshop/tracetest/server/model" - "github.com/kubeshop/tracetest/server/model/yaml" -) - -func DataStore(in model.DataStore) yaml.File { - out := yaml.DataStore{} - deepcopy.DeepCopy(in, &out) - if in.Values.Jaeger != nil { - deepcopy.DeepCopy(in.Values.Jaeger, &out.Jaeger) - deepcopy.DeepCopy(in.Values.Jaeger.TLSSetting, &out.Jaeger.Tls) - } - if in.Values.Tempo != nil { - deepcopy.DeepCopy(in.Values.Tempo, &out.Tempo) - deepcopy.DeepCopy(in.Values.Tempo.Grpc.TLSSetting, &out.Tempo.Grpc.Tls) - deepcopy.DeepCopy(in.Values.Tempo.Http.TLSSetting, &out.Tempo.Http.Tls) - } - - if in.Values.OpenSearch != nil { - deepcopy.DeepCopy(in.Values.OpenSearch, &out.OpenSearch) - } - if in.Values.SignalFx != nil { - deepcopy.DeepCopy(in.Values.SignalFx, &out.SignalFx) - } - if in.Values.AwsXRay != nil { - deepcopy.DeepCopy(in.Values.AwsXRay, &out.AwsXRay) - } - - return yaml.File{ - Type: yaml.FileTypeDataStore, - Spec: out, - } -} diff --git a/server/openapi/api.go b/server/openapi/api.go index 0558b44de9..6cd2ac588b 100644 --- a/server/openapi/api.go +++ b/server/openapi/api.go @@ -18,11 +18,9 @@ import ( // The ApiApiRouter implementation should parse necessary information from the http request, // pass the data to a ApiApiServicer to perform the required actions, then write the service results to the http response. type ApiApiRouter interface { - CreateDataStore(http.ResponseWriter, *http.Request) CreateEnvironment(http.ResponseWriter, *http.Request) CreateTest(http.ResponseWriter, *http.Request) CreateTransaction(http.ResponseWriter, *http.Request) - DeleteDataStore(http.ResponseWriter, *http.Request) DeleteEnvironment(http.ResponseWriter, *http.Request) DeleteTest(http.ResponseWriter, *http.Request) DeleteTestRun(http.ResponseWriter, *http.Request) @@ -32,9 +30,6 @@ type ApiApiRouter interface { ExecuteDefinition(http.ResponseWriter, *http.Request) ExportTestRun(http.ResponseWriter, *http.Request) ExpressionResolve(http.ResponseWriter, *http.Request) - GetDataStore(http.ResponseWriter, *http.Request) - GetDataStoreDefinitionFile(http.ResponseWriter, *http.Request) - GetDataStores(http.ResponseWriter, *http.Request) GetEnvironment(http.ResponseWriter, *http.Request) GetEnvironmentDefinitionFile(http.ResponseWriter, *http.Request) GetEnvironments(http.ResponseWriter, *http.Request) @@ -61,7 +56,6 @@ type ApiApiRouter interface { RunTransaction(http.ResponseWriter, *http.Request) StopTestRun(http.ResponseWriter, *http.Request) TestConnection(http.ResponseWriter, *http.Request) - UpdateDataStore(http.ResponseWriter, *http.Request) UpdateEnvironment(http.ResponseWriter, *http.Request) UpdateTest(http.ResponseWriter, *http.Request) UpdateTransaction(http.ResponseWriter, *http.Request) @@ -73,12 +67,15 @@ type ApiApiRouter interface { // pass the data to a ResourceApiApiServicer to perform the required actions, then write the service results to the http response. type ResourceApiApiRouter interface { CreateDemo(http.ResponseWriter, *http.Request) + DeleteDataStore(http.ResponseWriter, *http.Request) DeleteDemo(http.ResponseWriter, *http.Request) GetConfiguration(http.ResponseWriter, *http.Request) + GetDataStore(http.ResponseWriter, *http.Request) GetDemo(http.ResponseWriter, *http.Request) GetPollingProfile(http.ResponseWriter, *http.Request) ListDemos(http.ResponseWriter, *http.Request) UpdateConfiguration(http.ResponseWriter, *http.Request) + UpdateDataStore(http.ResponseWriter, *http.Request) UpdateDemo(http.ResponseWriter, *http.Request) UpdatePollingProfile(http.ResponseWriter, *http.Request) } @@ -88,11 +85,9 @@ type ResourceApiApiRouter interface { // while the service implementation can be ignored with the .openapi-generator-ignore file // and updated with the logic required for the API. type ApiApiServicer interface { - CreateDataStore(context.Context, DataStore) (ImplResponse, error) CreateEnvironment(context.Context, Environment) (ImplResponse, error) CreateTest(context.Context, Test) (ImplResponse, error) CreateTransaction(context.Context, Transaction) (ImplResponse, error) - DeleteDataStore(context.Context, string) (ImplResponse, error) DeleteEnvironment(context.Context, string) (ImplResponse, error) DeleteTest(context.Context, string) (ImplResponse, error) DeleteTestRun(context.Context, string, int32) (ImplResponse, error) @@ -102,9 +97,6 @@ type ApiApiServicer interface { ExecuteDefinition(context.Context, TextDefinition) (ImplResponse, error) ExportTestRun(context.Context, string, int32) (ImplResponse, error) ExpressionResolve(context.Context, ResolveRequestInfo) (ImplResponse, error) - GetDataStore(context.Context, string) (ImplResponse, error) - GetDataStoreDefinitionFile(context.Context, string) (ImplResponse, error) - GetDataStores(context.Context, int32, int32, string, string, string) (ImplResponse, error) GetEnvironment(context.Context, string) (ImplResponse, error) GetEnvironmentDefinitionFile(context.Context, string) (ImplResponse, error) GetEnvironments(context.Context, int32, int32, string, string, string) (ImplResponse, error) @@ -131,7 +123,6 @@ type ApiApiServicer interface { RunTransaction(context.Context, string, RunInformation) (ImplResponse, error) StopTestRun(context.Context, string, int32) (ImplResponse, error) TestConnection(context.Context, DataStore) (ImplResponse, error) - UpdateDataStore(context.Context, string, DataStore) (ImplResponse, error) UpdateEnvironment(context.Context, string, Environment) (ImplResponse, error) UpdateTest(context.Context, string, Test) (ImplResponse, error) UpdateTransaction(context.Context, string, Transaction) (ImplResponse, error) @@ -144,12 +135,15 @@ type ApiApiServicer interface { // and updated with the logic required for the API. type ResourceApiApiServicer interface { CreateDemo(context.Context, Demo) (ImplResponse, error) + DeleteDataStore(context.Context, string) (ImplResponse, error) DeleteDemo(context.Context, string) (ImplResponse, error) GetConfiguration(context.Context, string) (ImplResponse, error) + GetDataStore(context.Context, string) (ImplResponse, error) GetDemo(context.Context, string) (ImplResponse, error) GetPollingProfile(context.Context, string) (ImplResponse, error) ListDemos(context.Context, int32, int32, string, string) (ImplResponse, error) UpdateConfiguration(context.Context, string, ConfigurationResource) (ImplResponse, error) + UpdateDataStore(context.Context, string, DataStore) (ImplResponse, error) UpdateDemo(context.Context, string, Demo) (ImplResponse, error) UpdatePollingProfile(context.Context, string, PollingProfile) (ImplResponse, error) } diff --git a/server/openapi/api_api.go b/server/openapi/api_api.go index ebf912d126..dff14a1553 100644 --- a/server/openapi/api_api.go +++ b/server/openapi/api_api.go @@ -50,12 +50,6 @@ func NewApiApiController(s ApiApiServicer, opts ...ApiApiOption) Router { // Routes returns all the api routes for the ApiApiController func (c *ApiApiController) Routes() Routes { return Routes{ - { - "CreateDataStore", - strings.ToUpper("Post"), - "/api/datastores", - c.CreateDataStore, - }, { "CreateEnvironment", strings.ToUpper("Post"), @@ -74,12 +68,6 @@ func (c *ApiApiController) Routes() Routes { "/api/transactions", c.CreateTransaction, }, - { - "DeleteDataStore", - strings.ToUpper("Delete"), - "/api/datastores/{dataStoreId}", - c.DeleteDataStore, - }, { "DeleteEnvironment", strings.ToUpper("Delete"), @@ -134,24 +122,6 @@ func (c *ApiApiController) Routes() Routes { "/api/expressions/resolve", c.ExpressionResolve, }, - { - "GetDataStore", - strings.ToUpper("Get"), - "/api/datastores/{dataStoreId}", - c.GetDataStore, - }, - { - "GetDataStoreDefinitionFile", - strings.ToUpper("Get"), - "/api/datastores/{dataStoreId}/definition.yaml", - c.GetDataStoreDefinitionFile, - }, - { - "GetDataStores", - strings.ToUpper("Get"), - "/api/datastores", - c.GetDataStores, - }, { "GetEnvironment", strings.ToUpper("Get"), @@ -308,12 +278,6 @@ func (c *ApiApiController) Routes() Routes { "/api/config/connection", c.TestConnection, }, - { - "UpdateDataStore", - strings.ToUpper("Put"), - "/api/datastores/{dataStoreId}", - c.UpdateDataStore, - }, { "UpdateEnvironment", strings.ToUpper("Put"), @@ -341,30 +305,6 @@ func (c *ApiApiController) Routes() Routes { } } -// CreateDataStore - Create a new Data Store -func (c *ApiApiController) CreateDataStore(w http.ResponseWriter, r *http.Request) { - dataStoreParam := DataStore{} - d := json.NewDecoder(r.Body) - d.DisallowUnknownFields() - if err := d.Decode(&dataStoreParam); err != nil { - c.errorHandler(w, r, &ParsingError{Err: err}, nil) - return - } - if err := AssertDataStoreRequired(dataStoreParam); err != nil { - c.errorHandler(w, r, err, nil) - return - } - result, err := c.service.CreateDataStore(r.Context(), dataStoreParam) - // If an error occurred, encode the error with the status code - if err != nil { - c.errorHandler(w, r, err, &result) - return - } - // If no error, encode the body and the result code - EncodeJSONResponse(result.Body, &result.Code, w) - -} - // CreateEnvironment - Create new environment func (c *ApiApiController) CreateEnvironment(w http.ResponseWriter, r *http.Request) { environmentParam := Environment{} @@ -437,22 +377,6 @@ func (c *ApiApiController) CreateTransaction(w http.ResponseWriter, r *http.Requ } -// DeleteDataStore - Delete a Data Store -func (c *ApiApiController) DeleteDataStore(w http.ResponseWriter, r *http.Request) { - params := mux.Vars(r) - dataStoreIdParam := params["dataStoreId"] - - result, err := c.service.DeleteDataStore(r.Context(), dataStoreIdParam) - // If an error occurred, encode the error with the status code - if err != nil { - c.errorHandler(w, r, err, &result) - return - } - // If no error, encode the body and the result code - EncodeJSONResponse(result.Body, &result.Code, w) - -} - // DeleteEnvironment - delete a environment func (c *ApiApiController) DeleteEnvironment(w http.ResponseWriter, r *http.Request) { params := mux.Vars(r) @@ -648,65 +572,6 @@ func (c *ApiApiController) ExpressionResolve(w http.ResponseWriter, r *http.Requ } -// GetDataStore - Get a Data Store -func (c *ApiApiController) GetDataStore(w http.ResponseWriter, r *http.Request) { - params := mux.Vars(r) - dataStoreIdParam := params["dataStoreId"] - - result, err := c.service.GetDataStore(r.Context(), dataStoreIdParam) - // If an error occurred, encode the error with the status code - if err != nil { - c.errorHandler(w, r, err, &result) - return - } - // If no error, encode the body and the result code - EncodeJSONResponse(result.Body, &result.Code, w) - -} - -// GetDataStoreDefinitionFile - Get the data store definition as an YAML file -func (c *ApiApiController) GetDataStoreDefinitionFile(w http.ResponseWriter, r *http.Request) { - params := mux.Vars(r) - dataStoreIdParam := params["dataStoreId"] - - result, err := c.service.GetDataStoreDefinitionFile(r.Context(), dataStoreIdParam) - // If an error occurred, encode the error with the status code - if err != nil { - c.errorHandler(w, r, err, &result) - return - } - // If no error, encode the body and the result code - EncodeJSONResponse(result.Body, &result.Code, w) - -} - -// GetDataStores - Get all Data Stores -func (c *ApiApiController) GetDataStores(w http.ResponseWriter, r *http.Request) { - query := r.URL.Query() - takeParam, err := parseInt32Parameter(query.Get("take"), false) - if err != nil { - c.errorHandler(w, r, &ParsingError{Err: err}, nil) - return - } - skipParam, err := parseInt32Parameter(query.Get("skip"), false) - if err != nil { - c.errorHandler(w, r, &ParsingError{Err: err}, nil) - return - } - queryParam := query.Get("query") - sortByParam := query.Get("sortBy") - sortDirectionParam := query.Get("sortDirection") - result, err := c.service.GetDataStores(r.Context(), takeParam, skipParam, queryParam, sortByParam, sortDirectionParam) - // If an error occurred, encode the error with the status code - if err != nil { - c.errorHandler(w, r, err, &result) - return - } - // If no error, encode the body and the result code - EncodeJSONResponse(result.Body, &result.Code, w) - -} - // GetEnvironment - get environment func (c *ApiApiController) GetEnvironment(w http.ResponseWriter, r *http.Request) { params := mux.Vars(r) @@ -1295,33 +1160,6 @@ func (c *ApiApiController) TestConnection(w http.ResponseWriter, r *http.Request } -// UpdateDataStore - Update a Data Store -func (c *ApiApiController) UpdateDataStore(w http.ResponseWriter, r *http.Request) { - params := mux.Vars(r) - dataStoreIdParam := params["dataStoreId"] - - dataStoreParam := DataStore{} - d := json.NewDecoder(r.Body) - d.DisallowUnknownFields() - if err := d.Decode(&dataStoreParam); err != nil { - c.errorHandler(w, r, &ParsingError{Err: err}, nil) - return - } - if err := AssertDataStoreRequired(dataStoreParam); err != nil { - c.errorHandler(w, r, err, nil) - return - } - result, err := c.service.UpdateDataStore(r.Context(), dataStoreIdParam, dataStoreParam) - // If an error occurred, encode the error with the status code - if err != nil { - c.errorHandler(w, r, err, &result) - return - } - // If no error, encode the body and the result code - EncodeJSONResponse(result.Body, &result.Code, w) - -} - // UpdateEnvironment - update environment func (c *ApiApiController) UpdateEnvironment(w http.ResponseWriter, r *http.Request) { params := mux.Vars(r) diff --git a/server/openapi/api_resource_api.go b/server/openapi/api_resource_api.go index d6e36170e4..b78db1cb53 100644 --- a/server/openapi/api_resource_api.go +++ b/server/openapi/api_resource_api.go @@ -56,6 +56,12 @@ func (c *ResourceApiApiController) Routes() Routes { "/api/demos", c.CreateDemo, }, + { + "DeleteDataStore", + strings.ToUpper("Delete"), + "/api/datastores/{dataStoreId}", + c.DeleteDataStore, + }, { "DeleteDemo", strings.ToUpper("Delete"), @@ -68,6 +74,12 @@ func (c *ResourceApiApiController) Routes() Routes { "/api/configs/{configId}", c.GetConfiguration, }, + { + "GetDataStore", + strings.ToUpper("Get"), + "/api/datastores/{dataStoreId}", + c.GetDataStore, + }, { "GetDemo", strings.ToUpper("Get"), @@ -92,6 +104,12 @@ func (c *ResourceApiApiController) Routes() Routes { "/api/configs/{configId}", c.UpdateConfiguration, }, + { + "UpdateDataStore", + strings.ToUpper("Put"), + "/api/datastores/{dataStoreId}", + c.UpdateDataStore, + }, { "UpdateDemo", strings.ToUpper("Put"), @@ -131,6 +149,22 @@ func (c *ResourceApiApiController) CreateDemo(w http.ResponseWriter, r *http.Req } +// DeleteDataStore - Delete a Data Store +func (c *ResourceApiApiController) DeleteDataStore(w http.ResponseWriter, r *http.Request) { + params := mux.Vars(r) + dataStoreIdParam := params["dataStoreId"] + + result, err := c.service.DeleteDataStore(r.Context(), dataStoreIdParam) + // If an error occurred, encode the error with the status code + if err != nil { + c.errorHandler(w, r, err, &result) + return + } + // If no error, encode the body and the result code + EncodeJSONResponse(result.Body, &result.Code, w) + +} + // DeleteDemo - Delete a Demonstration setting func (c *ResourceApiApiController) DeleteDemo(w http.ResponseWriter, r *http.Request) { params := mux.Vars(r) @@ -163,6 +197,22 @@ func (c *ResourceApiApiController) GetConfiguration(w http.ResponseWriter, r *ht } +// GetDataStore - Get a Data Store +func (c *ResourceApiApiController) GetDataStore(w http.ResponseWriter, r *http.Request) { + params := mux.Vars(r) + dataStoreIdParam := params["dataStoreId"] + + result, err := c.service.GetDataStore(r.Context(), dataStoreIdParam) + // If an error occurred, encode the error with the status code + if err != nil { + c.errorHandler(w, r, err, &result) + return + } + // If no error, encode the body and the result code + EncodeJSONResponse(result.Body, &result.Code, w) + +} + // GetDemo - Get Demonstration setting func (c *ResourceApiApiController) GetDemo(w http.ResponseWriter, r *http.Request) { params := mux.Vars(r) @@ -248,6 +298,33 @@ func (c *ResourceApiApiController) UpdateConfiguration(w http.ResponseWriter, r } +// UpdateDataStore - Update a Data Store +func (c *ResourceApiApiController) UpdateDataStore(w http.ResponseWriter, r *http.Request) { + params := mux.Vars(r) + dataStoreIdParam := params["dataStoreId"] + + dataStoreParam := DataStore{} + d := json.NewDecoder(r.Body) + d.DisallowUnknownFields() + if err := d.Decode(&dataStoreParam); err != nil { + c.errorHandler(w, r, &ParsingError{Err: err}, nil) + return + } + if err := AssertDataStoreRequired(dataStoreParam); err != nil { + c.errorHandler(w, r, err, nil) + return + } + result, err := c.service.UpdateDataStore(r.Context(), dataStoreIdParam, dataStoreParam) + // If an error occurred, encode the error with the status code + if err != nil { + c.errorHandler(w, r, err, &result) + return + } + // If no error, encode the body and the result code + EncodeJSONResponse(result.Body, &result.Code, w) + +} + // UpdateDemo - Update a Demonstration setting func (c *ResourceApiApiController) UpdateDemo(w http.ResponseWriter, r *http.Request) { params := mux.Vars(r) diff --git a/server/openapi/api_resource_api_service.go b/server/openapi/api_resource_api_service.go index cf2c68b807..71efef5948 100644 --- a/server/openapi/api_resource_api_service.go +++ b/server/openapi/api_resource_api_service.go @@ -40,6 +40,20 @@ func (s *ResourceApiApiService) CreateDemo(ctx context.Context, demo Demo) (Impl return Response(http.StatusNotImplemented, nil), errors.New("CreateDemo method not implemented") } +// DeleteDataStore - Delete a Data Store +func (s *ResourceApiApiService) DeleteDataStore(ctx context.Context, dataStoreId string) (ImplResponse, error) { + // TODO - update DeleteDataStore with the required logic for this service method. + // Add api_resource_api_service.go to the .openapi-generator-ignore to avoid overwriting this service implementation when updating open api generation. + + //TODO: Uncomment the next line to return response Response(204, {}) or use other options such as http.Ok ... + //return Response(204, nil),nil + + //TODO: Uncomment the next line to return response Response(500, {}) or use other options such as http.Ok ... + //return Response(500, nil),nil + + return Response(http.StatusNotImplemented, nil), errors.New("DeleteDataStore method not implemented") +} + // DeleteDemo - Delete a Demonstration setting func (s *ResourceApiApiService) DeleteDemo(ctx context.Context, demoId string) (ImplResponse, error) { // TODO - update DeleteDemo with the required logic for this service method. @@ -74,6 +88,23 @@ func (s *ResourceApiApiService) GetConfiguration(ctx context.Context, configId s return Response(http.StatusNotImplemented, nil), errors.New("GetConfiguration method not implemented") } +// GetDataStore - Get a Data Store +func (s *ResourceApiApiService) GetDataStore(ctx context.Context, dataStoreId string) (ImplResponse, error) { + // TODO - update GetDataStore with the required logic for this service method. + // Add api_resource_api_service.go to the .openapi-generator-ignore to avoid overwriting this service implementation when updating open api generation. + + //TODO: Uncomment the next line to return response Response(200, DataStore{}) or use other options such as http.Ok ... + //return Response(200, DataStore{}), nil + + //TODO: Uncomment the next line to return response Response(404, {}) or use other options such as http.Ok ... + //return Response(404, nil),nil + + //TODO: Uncomment the next line to return response Response(500, {}) or use other options such as http.Ok ... + //return Response(500, nil),nil + + return Response(http.StatusNotImplemented, nil), errors.New("GetDataStore method not implemented") +} + // GetDemo - Get Demonstration setting func (s *ResourceApiApiService) GetDemo(ctx context.Context, demoId string) (ImplResponse, error) { // TODO - update GetDemo with the required logic for this service method. @@ -139,6 +170,23 @@ func (s *ResourceApiApiService) UpdateConfiguration(ctx context.Context, configI return Response(http.StatusNotImplemented, nil), errors.New("UpdateConfiguration method not implemented") } +// UpdateDataStore - Update a Data Store +func (s *ResourceApiApiService) UpdateDataStore(ctx context.Context, dataStoreId string, dataStore DataStore) (ImplResponse, error) { + // TODO - update UpdateDataStore with the required logic for this service method. + // Add api_resource_api_service.go to the .openapi-generator-ignore to avoid overwriting this service implementation when updating open api generation. + + //TODO: Uncomment the next line to return response Response(204, {}) or use other options such as http.Ok ... + //return Response(204, nil),nil + + //TODO: Uncomment the next line to return response Response(400, {}) or use other options such as http.Ok ... + //return Response(400, nil),nil + + //TODO: Uncomment the next line to return response Response(500, {}) or use other options such as http.Ok ... + //return Response(500, nil),nil + + return Response(http.StatusNotImplemented, nil), errors.New("UpdateDataStore method not implemented") +} + // UpdateDemo - Update a Demonstration setting func (s *ResourceApiApiService) UpdateDemo(ctx context.Context, demoId string, demo Demo) (ImplResponse, error) { // TODO - update UpdateDemo with the required logic for this service method. diff --git a/server/openapi/model_data_store.go b/server/openapi/model_data_store.go index 04890cd21d..c908e95556 100644 --- a/server/openapi/model_data_store.go +++ b/server/openapi/model_data_store.go @@ -20,7 +20,7 @@ type DataStore struct { Type SupportedDataStores `json:"type"` - IsDefault bool `json:"isDefault,omitempty"` + Default bool `json:"default,omitempty"` Jaeger GrpcClientSettings `json:"jaeger,omitempty"` diff --git a/server/otlp/ingester.go b/server/otlp/ingester.go index d99d3109ea..0ca47204f4 100644 --- a/server/otlp/ingester.go +++ b/server/otlp/ingester.go @@ -8,6 +8,7 @@ import ( "github.com/kubeshop/tracetest/server/executor" "github.com/kubeshop/tracetest/server/model" "github.com/kubeshop/tracetest/server/model/events" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "github.com/kubeshop/tracetest/server/traces" "go.opentelemetry.io/otel/trace" pb "go.opentelemetry.io/proto/otlp/collector/trace/v1" @@ -17,17 +18,19 @@ import ( type ingester struct { db model.Repository eventEmitter executor.EventEmitter + dsRepo *datastoreresource.Repository } -func NewIngester(db model.Repository, eventEmitter executor.EventEmitter) ingester { +func NewIngester(db model.Repository, eventEmitter executor.EventEmitter, dsRepo *datastoreresource.Repository) ingester { return ingester{ db: db, eventEmitter: eventEmitter, + dsRepo: dsRepo, } } func (i ingester) Ingest(ctx context.Context, request *pb.ExportTraceServiceRequest, requestType string) (*pb.ExportTraceServiceResponse, error) { - ds, err := i.db.DefaultDataStore(ctx) + ds, err := i.dsRepo.Current(ctx) if err != nil || !ds.IsOTLPBasedProvider() { fmt.Println("OTLP server is not enabled. Ignoring request") diff --git a/server/provisioning/provisioning_test.go b/server/provisioning/provisioning_test.go index fd95c4e406..384ca1b9d4 100644 --- a/server/provisioning/provisioning_test.go +++ b/server/provisioning/provisioning_test.go @@ -10,15 +10,12 @@ import ( "github.com/kubeshop/tracetest/server/config/configresource" "github.com/kubeshop/tracetest/server/config/demoresource" "github.com/kubeshop/tracetest/server/executor/pollingprofile" - "github.com/kubeshop/tracetest/server/model" "github.com/kubeshop/tracetest/server/provisioning" "github.com/kubeshop/tracetest/server/resourcemanager" - "github.com/kubeshop/tracetest/server/testdb" "github.com/kubeshop/tracetest/server/testmock" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "go.opentelemetry.io/collector/config/configgrpc" - "go.opentelemetry.io/collector/config/configtls" ) func TestFromFile(t *testing.T) { @@ -30,7 +27,8 @@ func TestFromFile(t *testing.T) { assert.ErrorIs(t, err, provisioning.ErrFileNotExists) }) - db := testmock.GetRawTestingDatabase() + db := testmock.CreateMigratedDatabase() + defer db.Close() for _, c := range cases { t.Run(c.name, func(t *testing.T) { @@ -50,7 +48,8 @@ func TestFromFile(t *testing.T) { } func TestFromEnv(t *testing.T) { - db := testmock.GetRawTestingDatabase() + db := testmock.CreateMigratedDatabase() + defer db.Close() t.Run("Empty", func(t *testing.T) { provisioner := provisioning.New() @@ -89,11 +88,10 @@ func TestFromEnv(t *testing.T) { }) }) } - } type expectations struct { - dataStore *model.DataStore + dataStore *datastoreresource.DataStore config *configresource.Config pollingprofile *pollingprofile.PollingProfile demos []demoresource.Demo @@ -104,12 +102,12 @@ type provisioningFixture struct { configs *configresource.Repository pollingProfiles *pollingprofile.Repository demos *demoresource.Repository - dataStores model.DataStoreRepository + dataStores *datastoreresource.Repository } func (f provisioningFixture) assert(t *testing.T, expected expectations) { if expected.dataStore != nil { - actual, err := f.dataStores.DefaultDataStore(context.TODO()) + actual, err := f.dataStores.Current(context.TODO()) require.NoError(t, err) // ignore ID for assertion @@ -163,15 +161,11 @@ func (f provisioningFixture) assert(t *testing.T, expected expectations) { } func setup(db *sql.DB) provisioningFixture { - testDB, err := testdb.Postgres(testdb.WithDB(db)) - if err != nil { - panic(err) - } f := provisioningFixture{ configs: configresource.NewRepository(db), pollingProfiles: pollingprofile.NewRepository(db), demos: demoresource.NewRepository(db), - dataStores: testDB, + dataStores: datastoreresource.NewRepository(db), } configManager := resourcemanager.New[configresource.Config]( @@ -195,11 +189,11 @@ func setup(db *sql.DB) provisioningFixture { resourcemanager.WithOperations(demoresource.Operations...), ) - dataStoreManager := resourcemanager.New[testdb.DataStoreResource]( - testdb.DataStoreResourceName, - testdb.DataStoreResourceNamePlural, - testdb.NewDataStoreResourceProvisioner(f.dataStores), - resourcemanager.WithOperations(resourcemanager.OperationNoop), + dataStoreManager := resourcemanager.New[datastoreresource.DataStore]( + datastoreresource.ResourceName, + datastoreresource.ResourceNamePlural, + f.dataStores, + resourcemanager.WithOperations(datastoreresource.Operations...), ) f.provisioner = provisioning.New(provisioning.WithResourceProvisioners( @@ -221,14 +215,14 @@ var cases = []struct { name: "AllSettings", file: "./testdata/all_settings.yaml", expectations: expectations{ - dataStore: &model.DataStore{ - Name: "Jaeger", - IsDefault: true, - Type: model.DataStoreTypeJaeger, - Values: model.DataStoreValues{ - Jaeger: &configgrpc.GRPCClientSettings{ - Endpoint: "jaeger-query:16685", - TLSSetting: configtls.TLSClientSetting{Insecure: true}, + dataStore: &datastoreresource.DataStore{ + Name: "Jaeger", + Default: true, + Type: datastoreresource.DataStoreTypeJaeger, + Values: datastoreresource.DataStoreValues{ + Jaeger: &datastoreresource.GRPCClientSettings{ + Endpoint: "jaeger-query:16685", + TLS: &datastoreresource.TLS{Insecure: true}, }, }, }, @@ -269,14 +263,14 @@ var cases = []struct { name: "JaegerGRPC", file: "./testdata/jaeger_grpc.yaml", expectations: expectations{ - dataStore: &model.DataStore{ - Name: "Jaeger", - IsDefault: true, - Type: model.DataStoreTypeJaeger, - Values: model.DataStoreValues{ - Jaeger: &configgrpc.GRPCClientSettings{ - Endpoint: "jaeger-query:16685", - TLSSetting: configtls.TLSClientSetting{Insecure: true}, + dataStore: &datastoreresource.DataStore{ + Name: "Jaeger", + Default: true, + Type: datastoreresource.DataStoreTypeJaeger, + Values: datastoreresource.DataStoreValues{ + Jaeger: &datastoreresource.GRPCClientSettings{ + Endpoint: "jaeger-query:16685", + TLS: &datastoreresource.TLS{Insecure: true}, }, }, }, @@ -286,15 +280,15 @@ var cases = []struct { name: "TempoGRPC", file: "./testdata/tempo_grpc.yaml", expectations: expectations{ - dataStore: &model.DataStore{ - Name: "Tempo (gRPC)", - IsDefault: true, - Type: model.DataStoreTypeTempo, - Values: model.DataStoreValues{ - Tempo: &model.BaseClientConfig{ - Grpc: configgrpc.GRPCClientSettings{ - Endpoint: "tempo:9095", - TLSSetting: configtls.TLSClientSetting{Insecure: true}, + dataStore: &datastoreresource.DataStore{ + Name: "Tempo (gRPC)", + Default: true, + Type: datastoreresource.DataStoreTypeTempo, + Values: datastoreresource.DataStoreValues{ + Tempo: &datastoreresource.MultiChannelClientConfig{ + Grpc: &datastoreresource.GRPCClientSettings{ + Endpoint: "tempo:9095", + TLS: &datastoreresource.TLS{Insecure: true}, }, }, }, @@ -305,15 +299,15 @@ var cases = []struct { name: "TempoHTTP", file: "./testdata/tempo_http.yaml", expectations: expectations{ - dataStore: &model.DataStore{ - Name: "Tempo (HTTP)", - IsDefault: true, - Type: model.DataStoreTypeTempo, - Values: model.DataStoreValues{ - Tempo: &model.BaseClientConfig{ - Http: model.HttpClientConfig{ - Url: "tempo:80", - TLSSetting: configtls.TLSClientSetting{Insecure: true}, + dataStore: &datastoreresource.DataStore{ + Name: "Tempo (HTTP)", + Default: true, + Type: datastoreresource.DataStoreTypeTempo, + Values: datastoreresource.DataStoreValues{ + Tempo: &datastoreresource.MultiChannelClientConfig{ + Http: &datastoreresource.HttpClientConfig{ + Url: "tempo:80", + TLS: &datastoreresource.TLS{Insecure: true}, }, }, }, @@ -324,12 +318,12 @@ var cases = []struct { name: "OpenSearch", file: "./testdata/opensearch.yaml", expectations: expectations{ - dataStore: &model.DataStore{ - Name: "OpenSearch", - IsDefault: true, - Type: model.DataStoreTypeOpenSearch, - Values: model.DataStoreValues{ - OpenSearch: &model.ElasticSearchDataStoreConfig{ + dataStore: &datastoreresource.DataStore{ + Name: "OpenSearch", + Default: true, + Type: datastoreresource.DataStoreTypeOpenSearch, + Values: datastoreresource.DataStoreValues{ + OpenSearch: &datastoreresource.ElasticSearchConfig{ Addresses: []string{"http://opensearch:9200"}, Index: "traces", }, @@ -341,12 +335,12 @@ var cases = []struct { name: "SignalFX", file: "./testdata/signalfx.yaml", expectations: expectations{ - dataStore: &model.DataStore{ - Name: "SignalFX", - IsDefault: true, - Type: model.DataStoreTypeSignalFX, - Values: model.DataStoreValues{ - SignalFx: &model.SignalFXDataStoreConfig{ + dataStore: &datastoreresource.DataStore{ + Name: "SignalFX", + Default: true, + Type: datastoreresource.DataStoreTypeSignalFX, + Values: datastoreresource.DataStoreValues{ + SignalFx: &datastoreresource.SignalFXConfig{ Token: "thetoken", Realm: "us1", }, @@ -355,15 +349,15 @@ var cases = []struct { }, }, { - name: "ElasitcAPM", + name: "ElasticAPM", file: "./testdata/elastic_apm.yaml", expectations: expectations{ - dataStore: &model.DataStore{ - Name: "elastic APM", - IsDefault: true, - Type: model.DataStoreTypeElasticAPM, - Values: model.DataStoreValues{ - ElasticApm: &model.ElasticSearchDataStoreConfig{ + dataStore: &datastoreresource.DataStore{ + Name: "elastic APM", + Default: true, + Type: datastoreresource.DataStoreTypeElasticAPM, + Values: datastoreresource.DataStoreValues{ + ElasticApm: &datastoreresource.ElasticSearchConfig{ Addresses: []string{"https://es01:9200"}, Username: "elastic", Password: "changeme", diff --git a/server/resourcemanager/operations.go b/server/resourcemanager/operations.go index 44b660182b..9a30a27a97 100644 --- a/server/resourcemanager/operations.go +++ b/server/resourcemanager/operations.go @@ -63,6 +63,10 @@ type Provision[T ResourceSpec] interface { IDSetter[T] } +type Current[T ResourceSpec] interface { + Current(context.Context) (T, error) +} + type resourceHandler[T ResourceSpec] struct { SetID func(T, id.ID) T List func(_ context.Context, take, skip int, query, sortBy, sortDirection string) ([]T, error) diff --git a/server/resourcemanager/testutil/operations_delete.go b/server/resourcemanager/testutil/operations_delete.go index e4c0c3ff58..7698800b29 100644 --- a/server/resourcemanager/testutil/operations_delete.go +++ b/server/resourcemanager/testutil/operations_delete.go @@ -9,6 +9,7 @@ import ( rm "github.com/kubeshop/tracetest/server/resourcemanager" "github.com/stretchr/testify/require" + "golang.org/x/exp/slices" ) func buildDeleteRequest(rt ResourceTypeTest, ct contentTypeConverter, testServer *httptest.Server, t *testing.T) *http.Request { @@ -31,6 +32,10 @@ var deleteSuccessOperation = buildSingleStepOperation(singleStepOperationTester{ name: OperationDeleteSuccess, neededForOperation: rm.OperationDelete, postAssert: func(t *testing.T, ct contentTypeConverter, rt ResourceTypeTest, testServer *httptest.Server) { + if slices.Contains(rt.operationsWithoutPostAssert, OperationDeleteSuccess) { + return + } + req := buildGetRequest(rt, ct, testServer, t) resp := doRequest(t, req, ct.contentType, testServer) require.Equal(t, 404, resp.StatusCode) diff --git a/server/resourcemanager/testutil/test_resource.go b/server/resourcemanager/testutil/test_resource.go index 1ad7ce3ad0..aaafaa369b 100644 --- a/server/resourcemanager/testutil/test_resource.go +++ b/server/resourcemanager/testutil/test_resource.go @@ -24,13 +24,15 @@ type ResourceTypeTest struct { SampleJSONUpdated string // private fields - sortFields []string - customJSONComparer func(t require.TestingT, operation Operation, firstValue, secondValue string) + sortFields []string + customJSONComparer func(t require.TestingT, operation Operation, firstValue, secondValue string) + operationsWithoutPostAssert []Operation } type config struct { - operations operationTesters - customJSONComparer func(t require.TestingT, operation Operation, firstValue, secondValue string) + operations operationTesters + operationsWithoutPostAssert []Operation + customJSONComparer func(t require.TestingT, operation Operation, firstValue, secondValue string) } type testOption func(*config) @@ -41,6 +43,12 @@ func ExcludeOperations(ops ...Operation) testOption { } } +func IgnorePostAssertForOperations(ops ...Operation) testOption { + return func(c *config) { + c.operationsWithoutPostAssert = ops + } +} + func JSONComparer(comparer func(t require.TestingT, operation Operation, firstValue, secondValue string)) testOption { return func(c *config) { c.customJSONComparer = comparer @@ -68,6 +76,9 @@ func TestResourceType(t *testing.T, rt ResourceTypeTest, opts ...testOption) { } rt.customJSONComparer = cfg.customJSONComparer + // consider operationsWithoutPostAssert option + rt.operationsWithoutPostAssert = cfg.operationsWithoutPostAssert + TestResourceTypeOperations(t, rt, cfg.operations) } diff --git a/server/testdb/data_stores.go b/server/testdb/data_stores.go deleted file mode 100644 index 17e077db01..0000000000 --- a/server/testdb/data_stores.go +++ /dev/null @@ -1,410 +0,0 @@ -package testdb - -import ( - "context" - "database/sql" - "encoding/json" - "fmt" - "strings" - "time" - - "github.com/fluidtruck/deepcopy" - "github.com/kubeshop/tracetest/server/model" - "github.com/kubeshop/tracetest/server/pkg/id" -) - -var _ model.DataStoreRepository = &postgresDB{} - -const ( - insertIntoDataStoresQuery = ` - INSERT INTO data_stores ( - "id", - "name", - "type", - "is_default", - "values", - "created_at" - ) VALUES ($1, $2, $3, $4, $5, $6)` - - updateIntoDataStoresQuery = ` - UPDATE data_stores SET - "id" = $2, - "name" = $3, - "type" = $4, - "is_default" = $5, - "values" = $6, - "created_at" = $7 - WHERE id = $1 - ` - - updateAllDefaultDataStoresQuery = ` - UPDATE data_stores SET "is_default" = false - ` - - getFromDataStoresQuery = ` - SELECT - d.id, - d.name, - d.type, - d.is_default, - d.values, - d.created_at - FROM data_stores d -` - - deleteFromDataStoresQuery = "DELETE FROM data_stores WHERE id = $1" - - idExistsFromDataStoresQuery = ` - SELECT COUNT(*) > 0 as exists FROM data_stores WHERE id = $1 - ` - - countFromDataStoresQuery = ` - SELECT COUNT(*) FROM data_stores d - ` -) - -func (td *postgresDB) CreateDataStore(ctx context.Context, dataStore model.DataStore) (model.DataStore, error) { - dataStore.ID = IDGen.ID().String() - dataStore.CreatedAt = time.Now() - - return td.insertIntoDataStores(ctx, dataStore) -} - -func (td *postgresDB) UpdateDataStore(ctx context.Context, dataStore model.DataStore) (model.DataStore, error) { - oldDataStore, err := td.GetDataStore(ctx, dataStore.ID) - if err != nil { - return model.DataStore{}, fmt.Errorf("could not get the data store while updating: %w", err) - } - - // keep the same creation date to keep sort order - dataStore.CreatedAt = oldDataStore.CreatedAt - dataStore.ID = oldDataStore.ID - - return td.updateIntoDataStores(ctx, dataStore, oldDataStore.ID) -} - -func (td *postgresDB) DeleteDataStore(ctx context.Context, dataStore model.DataStore) error { - tx, err := td.db.BeginTx(ctx, nil) - if err != nil { - return fmt.Errorf("sql BeginTx: %w", err) - } - - _, err = tx.ExecContext(ctx, deleteFromDataStoresQuery, dataStore.ID) - - if err != nil { - tx.Rollback() - return fmt.Errorf("sql error: %w", err) - } - - err = tx.Commit() - if err != nil { - return fmt.Errorf("sql Commit: %w", err) - } - - return nil -} - -func (td *postgresDB) DefaultDataStore(ctx context.Context) (model.DataStore, error) { - stmt, err := td.db.Prepare(getFromDataStoresQuery + " WHERE d.is_default = true ORDER BY created_at DESC LIMIT 1") - - if err != nil { - return model.DataStore{}, fmt.Errorf("prepare: %w", err) - } - defer stmt.Close() - - dataStore, err := td.readDataStoreRow(ctx, stmt.QueryRowContext(ctx)) - // if no default is found, assume nothing is configured, return empty DS without error - if err != nil && err != ErrNotFound { - return model.DataStore{}, err - } - - return dataStore, nil -} - -func (td *postgresDB) GetDataStore(ctx context.Context, id string) (model.DataStore, error) { - stmt, err := td.db.Prepare(getFromDataStoresQuery + " WHERE d.id = $1") - - if err != nil { - return model.DataStore{}, fmt.Errorf("prepare: %w", err) - } - defer stmt.Close() - - dataStore, err := td.readDataStoreRow(ctx, stmt.QueryRowContext(ctx, id)) - if err != nil { - return model.DataStore{}, err - } - - return dataStore, nil -} - -func (td *postgresDB) GetDataStores(ctx context.Context, take, skip int32, query, sortBy, sortDirection string) (model.List[model.DataStore], error) { - hasSearchQuery := query != "" - cleanSearchQuery := "%" + strings.ReplaceAll(query, " ", "%") + "%" - params := []any{take, skip} - - sql := getFromDataStoresQuery - - const condition = "WHERE (d.name ilike $3) " - if hasSearchQuery { - params = append(params, cleanSearchQuery) - sql += condition - } - - sortingFields := map[string]string{ - "created": "d.created_at", - "name": "d.name", - } - - sql = sortQuery(sql, sortBy, sortDirection, sortingFields) - sql += ` LIMIT $1 OFFSET $2 ` - - stmt, err := td.db.Prepare(sql) - if err != nil { - return model.List[model.DataStore]{}, err - } - defer stmt.Close() - - rows, err := stmt.QueryContext(ctx, params...) - if err != nil { - return model.List[model.DataStore]{}, err - } - - dataStores := []model.DataStore{} - - for rows.Next() { - dataStore, err := td.readDataStoreRow(ctx, rows) - if err != nil { - return model.List[model.DataStore]{}, err - } - - dataStores = append(dataStores, dataStore) - } - - count, err := td.countDataStores(ctx, condition, cleanSearchQuery) - if err != nil { - return model.List[model.DataStore]{}, err - } - - return model.List[model.DataStore]{ - Items: dataStores, - TotalCount: count, - }, nil -} - -func (td *postgresDB) DataStoreIDExists(ctx context.Context, id string) (bool, error) { - exists := false - - row := td.db.QueryRowContext(ctx, idExistsFromDataStoresQuery, id) - - err := row.Scan(&exists) - - return exists, err -} - -func (td *postgresDB) readDataStoreRow(ctx context.Context, row scanner) (model.DataStore, error) { - dataStore := model.DataStore{} - - var ( - jsonValues []byte - ) - err := row.Scan( - &dataStore.ID, - &dataStore.Name, - &dataStore.Type, - &dataStore.IsDefault, - &jsonValues, - &dataStore.CreatedAt, - ) - - switch err { - case sql.ErrNoRows: - return model.DataStore{}, ErrNotFound - case nil: - err = json.Unmarshal(jsonValues, &dataStore.Values) - if err != nil { - return model.DataStore{}, fmt.Errorf("cannot parse data store: %w", err) - } - - return dataStore, nil - default: - return model.DataStore{}, err - } -} - -func (td *postgresDB) countDataStores(ctx context.Context, condition, cleanSearchQuery string) (int, error) { - var ( - count int - params []any - ) - - sql := countFromDataStoresQuery - if cleanSearchQuery != "" { - params = []any{cleanSearchQuery} - sql += strings.ReplaceAll(condition, "$3", "$1") - } - - err := td.db. - QueryRowContext(ctx, sql, params...). - Scan(&count) - - if err != nil { - return 0, err - } - return count, nil -} - -func (td *postgresDB) insertIntoDataStores(ctx context.Context, dataStore model.DataStore) (model.DataStore, error) { - tx, err := td.db.BeginTx(ctx, nil) - if err != nil { - return model.DataStore{}, fmt.Errorf("sql BeginTx: %w", err) - } - - if dataStore.IsDefault { - _, err = tx.ExecContext(ctx, updateAllDefaultDataStoresQuery) - - if err != nil { - tx.Rollback() - return model.DataStore{}, fmt.Errorf("sql exec: %w", err) - } - } - - jsonValues, err := json.Marshal(dataStore.Values) - if err != nil { - return model.DataStore{}, fmt.Errorf("encoding error: %w", err) - } - - _, err = tx.ExecContext(ctx, insertIntoDataStoresQuery, - dataStore.ID, - dataStore.Name, - dataStore.Type, - dataStore.IsDefault, - jsonValues, - dataStore.CreatedAt) - - if err != nil { - tx.Rollback() - return model.DataStore{}, fmt.Errorf("sql exec: %w", err) - } - - err = tx.Commit() - if err != nil { - return model.DataStore{}, fmt.Errorf("commit: %w", err) - } - - return dataStore, nil -} - -func (td *postgresDB) updateIntoDataStores(ctx context.Context, dataStore model.DataStore, oldId string) (model.DataStore, error) { - tx, err := td.db.BeginTx(ctx, nil) - if err != nil { - return model.DataStore{}, fmt.Errorf("sql BeginTx: %w", err) - } - - if dataStore.IsDefault { - _, err = tx.ExecContext(ctx, updateAllDefaultDataStoresQuery) - - if err != nil { - tx.Rollback() - return model.DataStore{}, fmt.Errorf("sql exec: %w", err) - } - } - - jsonValues, err := json.Marshal(dataStore.Values) - if err != nil { - return model.DataStore{}, fmt.Errorf("encoding error: %w", err) - } - - _, err = tx.ExecContext(ctx, updateIntoDataStoresQuery, - oldId, - dataStore.ID, - dataStore.Name, - dataStore.Type, - dataStore.IsDefault, - jsonValues, - dataStore.CreatedAt) - - if err != nil { - tx.Rollback() - return model.DataStore{}, fmt.Errorf("sql exec: %w", err) - } - - err = tx.Commit() - if err != nil { - return model.DataStore{}, fmt.Errorf("commit: %w", err) - } - - return dataStore, nil -} - -type DataStoreResource struct { - ID id.ID - Name string - Type model.DataStoreType - IsDefault bool - - model.DataStoreValues `mapstructure:",squash"` -} - -func (dsr DataStoreResource) toModel() model.DataStore { - actual := model.DataStore{} - - deepcopy.DeepCopy(dsr, &actual) - if dsr.Jaeger != nil { - deepcopy.DeepCopy(dsr.Jaeger, &actual.Values.Jaeger) - deepcopy.DeepCopy(dsr.Jaeger.TLSSetting, &actual.Values.Jaeger.TLSSetting) - } - if dsr.Tempo != nil { - deepcopy.DeepCopy(dsr.Tempo, &actual.Values.Tempo) - deepcopy.DeepCopy(dsr.Tempo.Grpc.TLSSetting, &actual.Values.Tempo.Grpc.TLSSetting) - deepcopy.DeepCopy(dsr.Tempo.Http.TLSSetting, &actual.Values.Tempo.Http.TLSSetting) - } - if dsr.OpenSearch != nil { - deepcopy.DeepCopy(dsr.OpenSearch, &actual.Values.OpenSearch) - } - if dsr.SignalFx != nil { - deepcopy.DeepCopy(dsr.SignalFx, &actual.Values.SignalFx) - } - - if dsr.ElasticApm != nil { - deepcopy.DeepCopy(dsr.ElasticApm, &actual.Values.ElasticApm) - } - if dsr.AwsXRay != nil { - deepcopy.DeepCopy(dsr.AwsXRay, &actual.Values.AwsXRay) - } - - return actual -} - -func (dsr DataStoreResource) HasID() bool { - return dsr.toModel().HasID() -} - -func (dsr DataStoreResource) Validate() error { - return dsr.toModel().Validate() -} - -const ( - DataStoreResourceName = "DataStore" - DataStoreResourceNamePlural = "DataStores" -) - -// at the moment only used for provisioning -type DataStoreResourceProvisioner struct { - repo model.DataStoreRepository -} - -func NewDataStoreResourceProvisioner(repo model.DataStoreRepository) DataStoreResourceProvisioner { - return DataStoreResourceProvisioner{repo} -} - -func (dsp DataStoreResourceProvisioner) SetID(dsr DataStoreResource, id id.ID) DataStoreResource { - dsr.ID = id - return dsr -} - -func (dsp DataStoreResourceProvisioner) Provision(ctx context.Context, ds DataStoreResource) error { - ds.IsDefault = true - - _, err := dsp.repo.CreateDataStore(ctx, ds.toModel()) - return err -} diff --git a/server/testdb/data_stores_test.go b/server/testdb/data_stores_test.go deleted file mode 100644 index fb34369f2c..0000000000 --- a/server/testdb/data_stores_test.go +++ /dev/null @@ -1,208 +0,0 @@ -package testdb_test - -import ( - "context" - "database/sql" - "testing" - - "github.com/gorilla/mux" - "github.com/kubeshop/tracetest/server/model" - "github.com/kubeshop/tracetest/server/resourcemanager" - rmtests "github.com/kubeshop/tracetest/server/resourcemanager/testutil" - "github.com/kubeshop/tracetest/server/testdb" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.opentelemetry.io/collector/config/configgrpc" -) - -func TestCreateDataStore(t *testing.T) { - db, clean := getDB() - defer clean() - - dataStore := model.DataStore{ - Name: "datastore", - Type: "jaeger", - IsDefault: true, - Values: model.DataStoreValues{ - Jaeger: &configgrpc.GRPCClientSettings{}, - Tempo: &model.BaseClientConfig{}, - SignalFx: &model.SignalFXDataStoreConfig{}, - OpenSearch: &model.ElasticSearchDataStoreConfig{}, - ElasticApm: &model.ElasticSearchDataStoreConfig{}, - AwsXRay: &model.AWSXRayDataStoreConfig{}, - }, - } - - created, err := db.CreateDataStore(context.TODO(), dataStore) - require.NoError(t, err) - - actual, err := db.GetDataStore(context.TODO(), created.ID) - require.NoError(t, err) - assert.Equal(t, dataStore.Name, actual.Name) - assert.Equal(t, dataStore.Type, actual.Type) - assert.Equal(t, dataStore.IsDefault, actual.IsDefault) - assert.Equal(t, dataStore.Values, actual.Values) - assert.False(t, actual.CreatedAt.IsZero()) -} - -func TestCreateMultipleDataStores(t *testing.T) { - db, clean := getDB() - defer clean() - - createDataStore(t, db, "datastore1") - createDataStore(t, db, "datastore2") - createDataStore(t, db, "datastore3") - - actual, err := db.GetDataStores(context.TODO(), 20, 0, "", "", "") - require.NoError(t, err) - - assert.Len(t, actual.Items, 3) - assert.Equal(t, actual.TotalCount, 3) - - // test one default data store - assert.Equal(t, actual.TotalCount, 3) - assert.Equal(t, "datastore3", actual.Items[0].Name) - assert.True(t, actual.Items[0].IsDefault) - assert.Equal(t, "datastore2", actual.Items[1].Name) - assert.False(t, actual.Items[1].IsDefault) - assert.Equal(t, "datastore1", actual.Items[2].Name) - assert.False(t, actual.Items[2].IsDefault) -} - -func TestDeleteDataStore(t *testing.T) { - db, clean := getDB() - defer clean() - - dataStore := createDataStore(t, db, "datastore1") - - err := db.DeleteDataStore(context.TODO(), dataStore) - require.NoError(t, err) - - actual, err := db.GetDataStore(context.TODO(), dataStore.ID) - assert.ErrorIs(t, err, testdb.ErrNotFound) - assert.Empty(t, actual) -} - -func TestUpdateDataStore(t *testing.T) { - db, clean := getDB() - defer clean() - - dataStore := createDataStore(t, db, "datastore1") - dataStore.Name = "1 v2" - dataStore.Type = "openSearch" - - _, err := db.UpdateDataStore(context.TODO(), dataStore) - require.NoError(t, err) - - latestDataStore, err := db.GetDataStore(context.TODO(), dataStore.ID) - assert.NoError(t, err) - assert.Equal(t, "1 v2", latestDataStore.Name) - assert.Equal(t, "openSearch", string(latestDataStore.Type)) -} - -func TestGetDataStores(t *testing.T) { - db, clean := getDB() - defer clean() - - createDataStore(t, db, "datastore1") - createDataStore(t, db, "datastore2") - createDataStore(t, db, "datastore3") - - t.Run("Order", func(t *testing.T) { - actual, err := db.GetDataStores(context.TODO(), 20, 0, "", "", "") - require.NoError(t, err) - - assert.Len(t, actual.Items, 3) - assert.Equal(t, actual.TotalCount, 3) - - // test order - assert.Equal(t, actual.TotalCount, 3) - assert.Equal(t, "datastore3", actual.Items[0].Name) - assert.Equal(t, "datastore2", actual.Items[1].Name) - assert.Equal(t, "datastore1", actual.Items[2].Name) - }) - - t.Run("Pagination", func(t *testing.T) { - actual, err := db.GetDataStores(context.TODO(), 20, 10, "", "", "") - require.NoError(t, err) - - assert.Equal(t, actual.TotalCount, 3) - assert.Len(t, actual.Items, 0) - }) - - t.Run("SortByCreated", func(t *testing.T) { - actual, err := db.GetDataStores(context.TODO(), 20, 0, "", "created", "") - require.NoError(t, err) - - // test order - assert.Equal(t, "datastore3", actual.Items[0].Name) - assert.Equal(t, "datastore2", actual.Items[1].Name) - assert.Equal(t, "datastore1", actual.Items[2].Name) - }) - - t.Run("SortByNameAsc", func(t *testing.T) { - actual, err := db.GetDataStores(context.TODO(), 20, 0, "", "name", "asc") - require.NoError(t, err) - - // test order - assert.Equal(t, "datastore1", actual.Items[0].Name) - assert.Equal(t, "datastore2", actual.Items[1].Name) - assert.Equal(t, "datastore3", actual.Items[2].Name) - }) - - t.Run("SortByNameDesc", func(t *testing.T) { - actual, err := db.GetDataStores(context.TODO(), 20, 0, "", "name", "desc") - require.NoError(t, err) - - // test order - assert.Equal(t, "datastore3", actual.Items[0].Name) - assert.Equal(t, "datastore2", actual.Items[1].Name) - assert.Equal(t, "datastore1", actual.Items[2].Name) - }) - - t.Run("SearchByName", func(t *testing.T) { - createDataStore(t, db, "VerySpecificName") - - actual, err := db.GetDataStores(context.TODO(), 10, 0, "specif", "", "") - require.NoError(t, err) - assert.Len(t, actual.Items, 1) - assert.Equal(t, actual.TotalCount, 1) - - assert.Equal(t, "VerySpecificName", actual.Items[0].Name) - }) - -} - -func TestDataStoreProvisioner(t *testing.T) { - rmtests.TestResourceType(t, rmtests.ResourceTypeTest{ - ResourceTypeSingular: testdb.DataStoreResourceName, - ResourceTypePlural: testdb.DataStoreResourceName, - RegisterManagerFn: func(router *mux.Router, db *sql.DB) resourcemanager.Manager { - dsRepo, err := testdb.Postgres(testdb.WithDB(db)) - require.NoError(t, err) - - manager := resourcemanager.New[testdb.DataStoreResource]( - testdb.DataStoreResourceName, - testdb.DataStoreResourceNamePlural, - testdb.NewDataStoreResourceProvisioner(dsRepo), - // this resource exists only for provisioning at the moment - resourcemanager.WithOperations(resourcemanager.OperationNoop), - ) - manager.RegisterRoutes(router) - - return manager - }, - SampleJSON: `{ - "type": "DataStore", - "spec": { - "id": "signalfx", - "name": "SignalFX", - "type": "signalfx", - "signalfx": { - "token": "thetoken", - "realm": "us1" - } - } - }`, - }) -} diff --git a/server/testdb/mock.go b/server/testdb/mock.go index 59db5c2136..5c287c76a6 100644 --- a/server/testdb/mock.go +++ b/server/testdb/mock.go @@ -240,49 +240,6 @@ func (m *MockRepository) UpdateTransactionRun(ctx context.Context, run model.Tra return args.Error(0) } -// data stores - -func (m *MockRepository) CreateDataStore(_ context.Context, dataStore model.DataStore) (model.DataStore, error) { - args := m.Called(dataStore) - return args.Get(0).(model.DataStore), args.Error(1) -} - -func (m *MockRepository) UpdateDataStore(_ context.Context, dataStore model.DataStore) (model.DataStore, error) { - args := m.Called(dataStore) - return args.Get(0).(model.DataStore), args.Error(1) -} - -func (m *MockRepository) DeleteDataStore(_ context.Context, dataStore model.DataStore) error { - args := m.Called(dataStore) - return args.Error(0) -} - -func (m *MockRepository) DataStoreIDExists(_ context.Context, id string) (bool, error) { - args := m.Called(id) - return args.Bool(0), args.Error(1) -} - -func (m *MockRepository) GetDataStore(_ context.Context, id string) (model.DataStore, error) { - args := m.Called(id) - return args.Get(0).(model.DataStore), args.Error(1) -} - -func (m *MockRepository) DefaultDataStore(_ context.Context) (model.DataStore, error) { - args := m.Called() - return args.Get(0).(model.DataStore), args.Error(1) -} - -func (m *MockRepository) GetDataStores(_ context.Context, take, skip int32, query, sortBy, sortDirection string) (model.List[model.DataStore], error) { - args := m.Called(take, skip, query, sortBy, sortDirection) - dataStores := args.Get(0).([]model.DataStore) - - list := model.List[model.DataStore]{ - Items: dataStores, - TotalCount: len(dataStores), - } - return list, args.Error(1) -} - func (m *MockRepository) CreateTestRunEvent(_ context.Context, event model.TestRunEvent) error { args := m.Called(event) return args.Error(0) diff --git a/server/testdb/postgres_test.go b/server/testdb/postgres_test.go index cfab8c1247..7a12cf4288 100644 --- a/server/testdb/postgres_test.go +++ b/server/testdb/postgres_test.go @@ -106,20 +106,3 @@ func createEnvironment(t *testing.T, db model.Repository, name string) model.Env return updated } - -func createDataStore(t *testing.T, db model.Repository, name string) model.DataStore { - t.Helper() - dataStore := model.DataStore{ - Name: name, - Type: "jaeger", - IsDefault: true, - Values: model.DataStoreValues{}, - } - - created, err := db.CreateDataStore(context.TODO(), dataStore) - if err != nil { - panic(err) - } - - return created -} diff --git a/server/tracedb/awsxray.go b/server/tracedb/awsxray.go index 90c00846b5..53c3f7d849 100644 --- a/server/tracedb/awsxray.go +++ b/server/tracedb/awsxray.go @@ -19,6 +19,7 @@ import ( "github.com/aws/aws-sdk-go/service/xray" "github.com/kubeshop/tracetest/server/model" "github.com/kubeshop/tracetest/server/tracedb/connection" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" conventions "go.opentelemetry.io/collector/semconv/v1.6.1" "go.opentelemetry.io/otel/trace" ) @@ -33,7 +34,7 @@ type awsxrayDB struct { useDefaultAuth bool } -func NewAwsXRayDB(cfg *model.AWSXRayDataStoreConfig) (TraceDB, error) { +func NewAwsXRayDB(cfg *datastoreresource.AWSXRayConfig) (TraceDB, error) { sessionCredentials := credentials.NewStaticCredentials(cfg.AccessKeyID, cfg.SecretAccessKey, cfg.SessionToken) return &awsxrayDB{ diff --git a/server/tracedb/datasource/datasource.go b/server/tracedb/datasource/datasource.go index e4cf032250..8b3fec519d 100644 --- a/server/tracedb/datasource/datasource.go +++ b/server/tracedb/datasource/datasource.go @@ -4,6 +4,7 @@ import ( "context" "github.com/kubeshop/tracetest/server/model" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "google.golang.org/grpc" ) @@ -44,15 +45,15 @@ func (db *noopDataSource) TestConnection(ctx context.Context) model.ConnectionTe return model.ConnectionTestStep{} } -func New(name string, cfg *model.BaseClientConfig, callbacks Callbacks) DataSource { +func New(name string, cfg *datastoreresource.MultiChannelClientConfig, callbacks Callbacks) DataSource { sourceType := SupportedDataSource(cfg.Type) switch sourceType { default: case GRPC: - return NewGrpcClient(name, &cfg.Grpc, callbacks.GRPC) + return NewGrpcClient(name, cfg.Grpc, callbacks.GRPC) case HTTP: - return NewHttpClient(name, &cfg.Http, callbacks.HTTP) + return NewHttpClient(name, cfg.Http, callbacks.HTTP) } return &noopDataSource{} diff --git a/server/tracedb/datasource/grpc.go b/server/tracedb/datasource/grpc.go index a2ca625779..9834a043ad 100644 --- a/server/tracedb/datasource/grpc.go +++ b/server/tracedb/datasource/grpc.go @@ -6,9 +6,12 @@ import ( "github.com/kubeshop/tracetest/server/model" "github.com/kubeshop/tracetest/server/tracedb/connection" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "github.com/pkg/errors" "go.opentelemetry.io/collector/component/componenttest" + "go.opentelemetry.io/collector/config/configcompression" "go.opentelemetry.io/collector/config/configgrpc" + "go.opentelemetry.io/collector/config/configtls" "google.golang.org/grpc" ) @@ -19,10 +22,51 @@ type GrpcClient struct { callback GrpcCallback } -func NewGrpcClient(name string, config *configgrpc.GRPCClientSettings, callback GrpcCallback) DataSource { +func convertDomainConfigToOpenTelemetryConfig(config *datastoreresource.GRPCClientSettings) *configgrpc.GRPCClientSettings { + // manual map domain fields to OTel fields + + otelConfig := &configgrpc.GRPCClientSettings{ + Endpoint: config.Endpoint, + ReadBufferSize: config.ReadBufferSize, + WriteBufferSize: config.WriteBufferSize, + WaitForReady: config.WaitForReady, + Headers: config.Headers, + BalancerName: config.BalancerName, + + Compression: configcompression.CompressionType(config.Compression), + } + + if config.TLS == nil { + return otelConfig + } + + otelConfig.TLSSetting = configtls.TLSClientSetting{ + Insecure: config.TLS.Insecure, + InsecureSkipVerify: config.TLS.InsecureSkipVerify, + ServerName: config.TLS.ServerName, + } + + if config.TLS.Settings == nil { + return otelConfig + } + + otelConfig.TLSSetting.TLSSetting = configtls.TLSSetting{ + CAFile: config.TLS.Settings.CAFile, + CertFile: config.TLS.Settings.CertFile, + KeyFile: config.TLS.Settings.KeyFile, + MinVersion: config.TLS.Settings.MinVersion, + MaxVersion: config.TLS.Settings.MaxVersion, + } + + return otelConfig +} + +func NewGrpcClient(name string, config *datastoreresource.GRPCClientSettings, callback GrpcCallback) DataSource { + otelConfig := convertDomainConfigToOpenTelemetryConfig(config) + return &GrpcClient{ name: name, - config: config, + config: otelConfig, callback: callback, } } diff --git a/server/tracedb/datasource/http.go b/server/tracedb/datasource/http.go index 72ecdc3164..09dd2074b4 100644 --- a/server/tracedb/datasource/http.go +++ b/server/tracedb/datasource/http.go @@ -14,6 +14,7 @@ import ( "github.com/kubeshop/tracetest/server/model" "github.com/kubeshop/tracetest/server/tracedb/connection" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" ) type HttpClient struct { @@ -23,11 +24,11 @@ type HttpClient struct { callback HttpCallback } -func NewHttpClient(name string, config *model.HttpClientConfig, callback HttpCallback) DataSource { +func NewHttpClient(name string, config *datastoreresource.HttpClientConfig, callback HttpCallback) DataSource { endpoint, _ := url.Parse(config.Url) client := &http.Client{ Transport: &http.Transport{ - TLSClientConfig: getTlsConfig(config.TLSSetting.CAFile, config.TLSSetting.Insecure), + TLSClientConfig: getTlsConfig(config.TLS.Settings.CAFile, config.TLS.Insecure), }, } diff --git a/server/tracedb/datastoreresource/main_test.go b/server/tracedb/datastoreresource/main_test.go new file mode 100644 index 0000000000..789a1eabdc --- /dev/null +++ b/server/tracedb/datastoreresource/main_test.go @@ -0,0 +1,18 @@ +package datastoreresource_test + +import ( + "os" + "testing" + + "github.com/kubeshop/tracetest/server/testmock" +) + +func TestMain(m *testing.M) { + testmock.StartTestEnvironment() + + exitVal := m.Run() + + testmock.StopTestEnvironment() + + os.Exit(exitVal) +} diff --git a/server/tracedb/datastoreresource/repository.go b/server/tracedb/datastoreresource/repository.go new file mode 100644 index 0000000000..ba842ea57e --- /dev/null +++ b/server/tracedb/datastoreresource/repository.go @@ -0,0 +1,215 @@ +package datastoreresource + +import ( + "context" + "database/sql" + "encoding/json" + "errors" + "fmt" + "time" + + "github.com/kubeshop/tracetest/server/pkg/id" + "github.com/kubeshop/tracetest/server/resourcemanager" +) + +var Operations = []resourcemanager.Operation{ + resourcemanager.OperationGet, + resourcemanager.OperationUpdate, + resourcemanager.OperationDelete, +} + +func NewRepository(db *sql.DB) *Repository { + return &Repository{db} +} + +type Repository struct { + db *sql.DB +} + +func (r *Repository) SetID(dataStore DataStore, id id.ID) DataStore { + dataStore.ID = id + return dataStore +} + +const dataStoreSingleID id.ID = "current" + +const insertQuery = ` +INSERT INTO data_stores ( + "id", + "name", + "type", + "is_default", + "values", + "created_at" +) VALUES ($1, $2, $3, $4, $5, $6)` + +const deleteQuery = `DELETE FROM data_stores WHERE "id" = $1` + +func newCreateAtDateString() string { + return time.Now().UTC().Format(time.RFC3339Nano) +} + +func (r *Repository) getCreatedAt(ctx context.Context, dataStore DataStore) (string, error) { + if dataStore.CreatedAt != "" { + // client passed date, keeping it + return dataStore.CreatedAt, nil + } + + // get datastore on the database or the default one + oldDataStore, err := r.Get(ctx, dataStore.ID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + // record not found, return a new date + return newCreateAtDateString(), nil + } + + return "", err + } + + // record found, return old date + return oldDataStore.CreatedAt, nil +} + +func (r *Repository) Update(ctx context.Context, dataStore DataStore) (DataStore, error) { + // enforce ID and default + dataStore.ID = dataStoreSingleID + dataStore.Default = true + + // reuse the created_at field for auditing purposes, + // unless the client explicitly send it + createdAt, err := r.getCreatedAt(ctx, dataStore) + if err != nil { + return DataStore{}, err + } + + dataStore.CreatedAt = createdAt + + // since we allow only one datastore, delete the table and keep one record + tx, err := r.db.BeginTx(ctx, nil) + if err != nil { + return DataStore{}, err + } + defer tx.Rollback() + + _, err = tx.ExecContext(ctx, deleteQuery, dataStoreSingleID) + if err != nil { + return DataStore{}, fmt.Errorf("datastore repository sql exec delete: %w", err) + } + + valuesJSON, err := json.Marshal(dataStore.Values) + if err != nil { + return DataStore{}, fmt.Errorf("could not marshal values field configuration: %w", err) + } + + _, err = tx.ExecContext(ctx, insertQuery, + dataStore.ID, + dataStore.Name, + dataStore.Type, + dataStore.Default, + valuesJSON, + dataStore.CreatedAt, + ) + if err != nil { + return DataStore{}, fmt.Errorf("datastore repository sql exec create: %w", err) + } + + err = tx.Commit() + if err != nil { + return DataStore{}, fmt.Errorf("datastore repository commit: %w", err) + } + + return dataStore, nil +} + +func (r *Repository) Delete(ctx context.Context, id id.ID) error { + tx, err := r.db.BeginTx(ctx, nil) + if err != nil { + return err + } + defer tx.Rollback() + + _, err = tx.ExecContext(ctx, deleteQuery, dataStoreSingleID) + if err != nil { + return fmt.Errorf("datastore repository sql exec delete: %w", err) + } + + err = tx.Commit() + if err != nil { + return fmt.Errorf("commit: %w", err) + } + + return nil +} + +const getQuery = ` +SELECT + "id", + "name", + "type", + "is_default", + "values", + "created_at" +FROM data_stores +WHERE "id" = $1` + +func (r *Repository) Current(ctx context.Context) (DataStore, error) { + dataStore, err := r.Get(ctx, "current") + if err != nil { + return DataStore{}, fmt.Errorf("datastore repository get current: %w", err) + } + + return dataStore, nil +} + +func (r *Repository) Get(ctx context.Context, id id.ID) (DataStore, error) { + row := r.db.QueryRowContext(ctx, getQuery, id) + + dataStore, err := r.readRow(row) + if err != nil && errors.Is(err, sql.ErrNoRows) { + return DataStore{ + CreatedAt: newCreateAtDateString(), + }, nil // Assumes an empty datastore + } + if err != nil { + return DataStore{}, fmt.Errorf("datastore repository get sql query: %w", err) + } + + return dataStore, nil +} + +type scanner interface { + Scan(dest ...interface{}) error +} + +func (r *Repository) readRow(rowScanner scanner) (DataStore, error) { + var valuesJSON []byte + + dataStore := DataStore{} + + err := rowScanner.Scan( + &dataStore.ID, + &dataStore.Name, + &dataStore.Type, + &dataStore.Default, + &valuesJSON, + &dataStore.CreatedAt, + ) + + if err != nil { + return DataStore{}, err + } + + if string(valuesJSON) != "null" { + err = json.Unmarshal(valuesJSON, &dataStore.Values) + if err != nil { + return DataStore{}, fmt.Errorf("unable to parse data store values: %w", err) + } + } + + return dataStore, nil +} + +func (r *Repository) Provision(ctx context.Context, dataStore DataStore) error { + _, err := r.Update(ctx, dataStore) + return err +} diff --git a/server/tracedb/datastoreresource/repository_test.go b/server/tracedb/datastoreresource/repository_test.go new file mode 100644 index 0000000000..19c0ae75c1 --- /dev/null +++ b/server/tracedb/datastoreresource/repository_test.go @@ -0,0 +1,474 @@ +package datastoreresource_test + +import ( + "context" + "database/sql" + "testing" + + "github.com/gorilla/mux" + "github.com/kubeshop/tracetest/server/pkg/id" + "github.com/kubeshop/tracetest/server/resourcemanager" + rmtests "github.com/kubeshop/tracetest/server/resourcemanager/testutil" + datastore "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" + "github.com/stretchr/testify/require" +) + +var ( + excludedOperations = rmtests.ExcludeOperations( + rmtests.OperationUpdateNotFound, + rmtests.OperationGetNotFound, + rmtests.OperationDeleteNotFound, + ) + operationsWithoutPostAssert = rmtests.IgnorePostAssertForOperations(rmtests.OperationDeleteSuccess) + jsonComparer = rmtests.JSONComparer(compareJSONDataStores) +) + +func compareJSONDataStores(t require.TestingT, operation rmtests.Operation, firstValue, secondValue string) { + if operation == rmtests.OperationUpdateSuccess { + require.JSONEq(t, firstValue, secondValue) + return + } + + expected := rmtests.RemoveFieldFromJSONResource("createdAt", firstValue) + obtained := rmtests.RemoveFieldFromJSONResource("createdAt", secondValue) + + require.JSONEq(t, expected, obtained) +} + +func registerManagerFn(router *mux.Router, db *sql.DB) resourcemanager.Manager { + dataStoreRepository := datastore.NewRepository(db) + + manager := resourcemanager.New[datastore.DataStore]( + datastore.ResourceName, + datastore.ResourceNamePlural, + dataStoreRepository, + resourcemanager.WithOperations(datastore.Operations...), + resourcemanager.WithIDGen(id.GenerateID), + ) + manager.RegisterRoutes(router) + + return manager +} + +func getScenarioPreparation(sample datastore.DataStore) func(t *testing.T, op rmtests.Operation, manager resourcemanager.Manager) { + return func(t *testing.T, op rmtests.Operation, manager resourcemanager.Manager) { + repository := manager.Handler().(*datastore.Repository) + + if op == rmtests.OperationGetSuccess { + // on get scenario we need to have one data store registered + repository.Update(context.TODO(), sample) + } + } +} + +func TestDataStoreResource_AWSXRay(t *testing.T) { + sample := datastore.DataStore{ + ID: "current", + Name: "default", + Type: datastore.DataStoreTypeAwsXRay, + Default: true, + CreatedAt: "2023-03-09T17:53:10.256383Z", + Values: datastore.DataStoreValues{ + AwsXRay: &datastore.AWSXRayConfig{ + Region: "some-region", + AccessKeyID: "some-access-key", + SecretAccessKey: "some-secret-access-key", + SessionToken: "some-session-token", + UseDefaultAuth: true, + }, + }, + } + + testSpec := rmtests.ResourceTypeTest{ + ResourceTypeSingular: datastore.ResourceName, + ResourceTypePlural: datastore.ResourceNamePlural, + RegisterManagerFn: registerManagerFn, + Prepare: getScenarioPreparation(sample), + SampleJSON: `{ + "type": "DataStore", + "spec": { + "id": "current", + "name": "default", + "type": "awsxray", + "default": true, + "createdAt": "2023-03-09T17:53:10.256383Z", + "awsxray": { + "region": "some-region", + "accessKeyID": "some-access-key", + "secretAccessKey": "some-secret-access-key", + "sessionToken": "some-session-token", + "useDefaultAuth": true + } + } + }`, + SampleJSONUpdated: `{ + "type": "DataStore", + "spec": { + "id": "current", + "name": "another data store", + "type": "awsxray", + "default": true, + "createdAt": "2023-03-09T17:53:10.256383Z", + "awsxray": { + "region": "some-region-updated", + "accessKeyID": "some-access-key-updated", + "secretAccessKey": "some-access-key-updated", + "sessionToken": "some-session-token-updated", + "useDefaultAuth": true + } + } + }`, + } + + rmtests.TestResourceType(t, testSpec, excludedOperations, jsonComparer, operationsWithoutPostAssert) +} + +func TestDataStoreResource_ElasticAPM(t *testing.T) { + sample := datastore.DataStore{ + ID: "current", + Name: "default", + Type: datastore.DataStoreTypeElasticAPM, + Default: true, + CreatedAt: "2023-03-09T17:53:10.256383Z", + Values: datastore.DataStoreValues{ + ElasticApm: &datastore.ElasticSearchConfig{ + Addresses: []string{"1.2.3.4"}, + Username: "some-user", + Password: "some-password", + Index: "an-index", + Certificate: "certificate.cert", + InsecureSkipVerify: true, + }, + }, + } + + testSpec := rmtests.ResourceTypeTest{ + ResourceTypeSingular: datastore.ResourceName, + ResourceTypePlural: datastore.ResourceNamePlural, + RegisterManagerFn: registerManagerFn, + Prepare: getScenarioPreparation(sample), + SampleJSON: `{ + "type": "DataStore", + "spec": { + "id": "current", + "name": "default", + "type": "elasticapm", + "default": true, + "createdAt": "2023-03-09T17:53:10.256383Z", + "elasticApm": { + "addresses": ["1.2.3.4"], + "username": "some-user", + "password": "some-password", + "index": "an-index", + "certificate": "certificate.cert", + "insecureSkipVerify": true + } + } + }`, + SampleJSONUpdated: `{ + "type": "DataStore", + "spec": { + "id": "current", + "name": "another data store", + "type": "elasticapm", + "default": true, + "createdAt": "2023-03-09T17:53:10.256383Z", + "elasticApm": { + "addresses": ["4.3.2.1"], + "username": "some-user-updated", + "password": "some-password-updated", + "index": "an-index-updated", + "certificate": "certificate.cert-updated", + "insecureSkipVerify": true + } + } + }`, + } + + rmtests.TestResourceType(t, testSpec, excludedOperations, jsonComparer, operationsWithoutPostAssert) +} + +func TestDataStoreResource_Jaeger(t *testing.T) { + sample := datastore.DataStore{ + ID: "current", + Name: "default", + Type: datastore.DataStoreTypeJaeger, + Default: true, + CreatedAt: "2023-03-09T17:53:10.256383Z", + Values: datastore.DataStoreValues{ + Jaeger: &datastore.GRPCClientSettings{ + Endpoint: "some-endpoint", + TLS: &datastore.TLS{ + Insecure: true, + }, + }, + }, + } + + testSpec := rmtests.ResourceTypeTest{ + ResourceTypeSingular: datastore.ResourceName, + ResourceTypePlural: datastore.ResourceNamePlural, + RegisterManagerFn: registerManagerFn, + Prepare: getScenarioPreparation(sample), + SampleJSON: `{ + "type": "DataStore", + "spec": { + "id": "current", + "name": "default", + "type": "jaeger", + "default": true, + "createdAt": "2023-03-09T17:53:10.256383Z", + "jaeger": { + "endpoint": "some-endpoint", + "tls": { + "insecure": true + } + } + } + }`, + SampleJSONUpdated: `{ + "type": "DataStore", + "spec": { + "id": "current", + "name": "another data store", + "type": "jaeger", + "default": true, + "createdAt": "2023-03-09T17:53:10.256383Z", + "jaeger": { + "endpoint": "some-endpoint", + "tls": { + "insecure": true + } + } + } + }`, + } + + rmtests.TestResourceType(t, testSpec, excludedOperations, jsonComparer, operationsWithoutPostAssert) +} + +func TestDataStoreResource_OTLP(t *testing.T) { + sample := datastore.DataStore{ + ID: "current", + Name: "default", + Type: datastore.DataStoreTypeOTLP, + Default: true, + CreatedAt: "2023-03-09T17:53:10.256383Z", + Values: datastore.DataStoreValues{}, + } + + testSpec := rmtests.ResourceTypeTest{ + ResourceTypeSingular: datastore.ResourceName, + ResourceTypePlural: datastore.ResourceNamePlural, + RegisterManagerFn: registerManagerFn, + Prepare: getScenarioPreparation(sample), + SampleJSON: `{ + "type": "DataStore", + "spec": { + "id": "current", + "name": "default", + "type": "otlp", + "default": true, + "createdAt": "2023-03-09T17:53:10.256383Z" + } + }`, + SampleJSONUpdated: `{ + "type": "DataStore", + "spec": { + "id": "current", + "name": "another data store", + "type": "otlp", + "default": true, + "createdAt": "2023-03-09T17:53:10.256383Z" + } + }`, + } + + rmtests.TestResourceType(t, testSpec, excludedOperations, jsonComparer, operationsWithoutPostAssert) +} + +func TestDataStoreResource_OpenSearch(t *testing.T) { + sample := datastore.DataStore{ + ID: "current", + Name: "default", + Type: datastore.DataStoreTypeOpenSearch, + Default: true, + CreatedAt: "2023-03-09T17:53:10.256383Z", + Values: datastore.DataStoreValues{ + OpenSearch: &datastore.ElasticSearchConfig{ + Addresses: []string{"1.2.3.4"}, + Username: "some-user", + Password: "some-password", + Index: "an-index", + Certificate: "certificate.cert", + InsecureSkipVerify: true, + }, + }, + } + + testSpec := rmtests.ResourceTypeTest{ + ResourceTypeSingular: datastore.ResourceName, + ResourceTypePlural: datastore.ResourceNamePlural, + RegisterManagerFn: registerManagerFn, + Prepare: getScenarioPreparation(sample), + SampleJSON: `{ + "type": "DataStore", + "spec": { + "id": "current", + "name": "default", + "type": "opensearch", + "default": true, + "createdAt": "2023-03-09T17:53:10.256383Z", + "openSearch": { + "addresses": ["1.2.3.4"], + "username": "some-user", + "password": "some-password", + "index": "an-index", + "certificate": "certificate.cert", + "insecureSkipVerify": true + } + } + }`, + SampleJSONUpdated: `{ + "type": "DataStore", + "spec": { + "id": "current", + "name": "another data store", + "type": "opensearch", + "default": true, + "createdAt": "2023-03-09T17:53:10.256383Z", + "openSearch": { + "addresses": ["4.3.2.1"], + "username": "some-user-updated", + "password": "some-password-updated", + "index": "an-index-updated", + "certificate": "certificate.cert-updated", + "insecureSkipVerify": true + } + } + }`, + } + + rmtests.TestResourceType(t, testSpec, excludedOperations, jsonComparer, operationsWithoutPostAssert) +} + +func TestDataStoreResource_SignalFX(t *testing.T) { + sample := datastore.DataStore{ + ID: "current", + Name: "default", + Type: datastore.DataStoreTypeSignalFX, + Default: true, + CreatedAt: "2023-03-09T17:53:10.256383Z", + Values: datastore.DataStoreValues{ + SignalFx: &datastore.SignalFXConfig{ + Realm: "some-realm", + Token: "some-token", + }, + }, + } + + testSpec := rmtests.ResourceTypeTest{ + ResourceTypeSingular: datastore.ResourceName, + ResourceTypePlural: datastore.ResourceNamePlural, + RegisterManagerFn: registerManagerFn, + Prepare: getScenarioPreparation(sample), + SampleJSON: `{ + "type": "DataStore", + "spec": { + "id": "current", + "name": "default", + "type": "signalfx", + "default": true, + "createdAt": "2023-03-09T17:53:10.256383Z", + "signalFx": { + "realm": "some-realm", + "token": "some-token" + } + } + }`, + SampleJSONUpdated: `{ + "type": "DataStore", + "spec": { + "id": "current", + "name": "another data store", + "type": "signalfx", + "default": true, + "createdAt": "2023-03-09T17:53:10.256383Z", + "signalFx": { + "realm": "some-realm-updated", + "token": "some-token-updated" + } + } + }`, + } + + rmtests.TestResourceType(t, testSpec, excludedOperations, jsonComparer, operationsWithoutPostAssert) +} + +func TestDataStoreResource_Tempo(t *testing.T) { + sample := datastore.DataStore{ + ID: "current", + Name: "default", + Type: datastore.DataStoreTypeTempo, + Default: true, + CreatedAt: "2023-03-09T17:53:10.256383Z", + Values: datastore.DataStoreValues{ + Tempo: &datastore.MultiChannelClientConfig{ + Type: datastore.MultiChannelClientTypeGRPC, + Grpc: &datastore.GRPCClientSettings{ + Endpoint: "some-endpoint", + TLS: &datastore.TLS{ + Insecure: true, + }, + }, + }, + }, + } + + testSpec := rmtests.ResourceTypeTest{ + ResourceTypeSingular: datastore.ResourceName, + ResourceTypePlural: datastore.ResourceNamePlural, + RegisterManagerFn: registerManagerFn, + Prepare: getScenarioPreparation(sample), + SampleJSON: `{ + "type": "DataStore", + "spec": { + "id": "current", + "name": "default", + "type": "tempo", + "default": true, + "createdAt": "2023-03-09T17:53:10.256383Z", + "tempo": { + "type": "grpc", + "grpc": { + "endpoint": "some-endpoint", + "tls": { + "insecure": true + } + } + } + } + }`, + SampleJSONUpdated: `{ + "type": "DataStore", + "spec": { + "id": "current", + "name": "another data store", + "type": "tempo", + "default": true, + "createdAt": "2023-03-09T17:53:10.256383Z", + "tempo": { + "type": "http", + "http": { + "url": "some-url", + "headers": { + "authorization": "something" + } + } + } + } + }`, + } + + rmtests.TestResourceType(t, testSpec, excludedOperations, jsonComparer, operationsWithoutPostAssert) +} diff --git a/server/tracedb/datastoreresource/resource_types.go b/server/tracedb/datastoreresource/resource_types.go new file mode 100644 index 0000000000..b594c6b302 --- /dev/null +++ b/server/tracedb/datastoreresource/resource_types.go @@ -0,0 +1,197 @@ +package datastoreresource + +import ( + "fmt" + "time" + + "github.com/kubeshop/tracetest/server/pkg/id" + "golang.org/x/exp/slices" +) + +const ResourceName = "DataStore" +const ResourceNamePlural = "DataStores" + +type DataStoreType string + +type DataStore struct { + ID id.ID `mapstructure:"id"` + Name string `mapstructure:"name"` + Type DataStoreType `mapstructure:"type"` + Default bool `mapstructure:"default"` + Values DataStoreValues `mapstructure:"values,squash"` + CreatedAt string `mapstructure:"createdAt"` +} + +type DataStoreValues struct { + AwsXRay *AWSXRayConfig `mapstructure:"awsxray,omitempty"` + ElasticApm *ElasticSearchConfig `mapstructure:"elasticApm,omitempty"` + Jaeger *GRPCClientSettings `mapstructure:"jaeger,omitempty"` + OpenSearch *ElasticSearchConfig `mapstructure:"openSearch,omitempty"` + SignalFx *SignalFXConfig `mapstructure:"signalFx,omitempty"` + Tempo *MultiChannelClientConfig `mapstructure:"tempo,omitempty"` +} + +type AWSXRayConfig struct { + Region string `mapstructure:"region"` + AccessKeyID string `mapstructure:"accessKeyID"` + SecretAccessKey string `mapstructure:"secretAccessKey"` + SessionToken string `mapstructure:"sessionToken"` + UseDefaultAuth bool `mapstructure:"useDefaultAuth"` +} + +type ElasticSearchConfig struct { + Addresses []string `mapstructure:"addresses"` + Username string `mapstructure:"username"` + Password string `mapstructure:"password"` + Index string `mapstructure:"index"` + Certificate string `mapstructure:"certificate"` + InsecureSkipVerify bool `mapstructure:"insecureSkipVerify"` +} + +type GRPCClientSettings struct { + Endpoint string `mapstructure:"endpoint,omitempty"` + ReadBufferSize int `mapstructure:"readBufferSize,omitempty"` + WriteBufferSize int `mapstructure:"writeBufferSize,omitempty"` + WaitForReady bool `mapstructure:"waitForReady,omitempty"` + Headers map[string]string `mapstructure:"headers,omitempty"` + BalancerName string `mapstructure:"balancerName,omitempty"` + Compression GRPCCompression `mapstructure:"compression,omitempty"` + TLS *TLS `mapstructure:"tls,omitempty"` +} + +type GRPCCompression string + +const ( + GRPCCompressionGZip GRPCCompression = "gzip" + GRPCCompressionZlib GRPCCompression = "zlib" + GRPCCompressionDeflate GRPCCompression = "deflate" + GRPCCompressionSnappy GRPCCompression = "snappy" + GRPCCompressionZstd GRPCCompression = "zstd" + GRPCCompressionNone GRPCCompression = "none" +) + +type TLS struct { + Insecure bool `mapstructure:"insecure,omitempty"` + InsecureSkipVerify bool `mapstructure:"insecureSkipVerify,omitempty"` + ServerName string `mapstructure:"serverName,omitempty"` + Settings *TLSSetting `mapstructure:"settings,omitempty"` +} + +type TLSSetting struct { + CAFile string `mapstructure:"cAFile,omitempty"` + CertFile string `mapstructure:"certFile,omitempty"` + KeyFile string `mapstructure:"keyFile,omitempty"` + MinVersion string `mapstructure:"minVersion,omitempty"` + MaxVersion string `mapstructure:"maxVersion,omitempty"` +} + +type MultiChannelClientType string + +const ( + MultiChannelClientTypeGRPC MultiChannelClientType = "grpc" + MultiChannelClientTypeHTTP MultiChannelClientType = "http" +) + +type MultiChannelClientConfig struct { + Type MultiChannelClientType `mapstructure:"type"` + Grpc *GRPCClientSettings `mapstructure:"grpc,omitempty"` + Http *HttpClientConfig `mapstructure:"http,omitempty"` +} + +type HttpClientConfig struct { + Url string `mapstructure:"url"` + Headers map[string]string `mapstructure:"headers,omitempty"` + TLS *TLS `mapstructure:"tls,omitempty"` +} + +type SignalFXConfig struct { + Realm string `mapstructure:"realm"` + Token string `mapstructure:"token"` +} + +const ( + DataStoreTypeJaeger DataStoreType = "jaeger" + DataStoreTypeTempo DataStoreType = "tempo" + DataStoreTypeOpenSearch DataStoreType = "opensearch" + DataStoreTypeSignalFX DataStoreType = "signalfx" + DataStoreTypeOTLP DataStoreType = "otlp" + DataStoreTypeNewRelic DataStoreType = "newrelic" + DataStoreTypeLighStep DataStoreType = "lightstep" + DataStoreTypeElasticAPM DataStoreType = "elasticapm" + DataStoreTypeDataDog DataStoreType = "datadog" + DataStoreTypeAwsXRay DataStoreType = "awsxray" +) + +var validTypes = []DataStoreType{ + DataStoreTypeJaeger, + DataStoreTypeTempo, + DataStoreTypeOpenSearch, + DataStoreTypeSignalFX, + DataStoreTypeOTLP, + DataStoreTypeNewRelic, + DataStoreTypeLighStep, + DataStoreTypeElasticAPM, + DataStoreTypeDataDog, + DataStoreTypeAwsXRay, +} + +var otlpBasedDataStores = []DataStoreType{ + DataStoreTypeOTLP, + DataStoreTypeNewRelic, + DataStoreTypeLighStep, + DataStoreTypeDataDog, +} + +func (ds DataStore) Validate() error { + if ds.Type == "" { + return fmt.Errorf("data store should have a type") + } + + if !slices.Contains(validTypes, ds.Type) { + return fmt.Errorf("unsupported data store type %s", ds.Type) + } + + if ds.Name == "" { + return fmt.Errorf("data store should have a name") + } + + if ds.CreatedAt != "" { + if _, err := time.Parse(time.RFC3339Nano, ds.CreatedAt); err != nil { + return fmt.Errorf("data store should have the createdAt field in a valid format") + } + } + + if ds.Type == DataStoreTypeAwsXRay && ds.Values.AwsXRay == nil { + return fmt.Errorf("data store should have AWSXRay config values set up") + } + + if ds.Type == DataStoreTypeElasticAPM && ds.Values.ElasticApm == nil { + return fmt.Errorf("data store should have ElasticApm config values set up") + } + + if ds.Type == DataStoreTypeJaeger && ds.Values.Jaeger == nil { + return fmt.Errorf("data store should have Jaeger config values set up") + } + + if ds.Type == DataStoreTypeOpenSearch && ds.Values.OpenSearch == nil { + return fmt.Errorf("data store should have OpenSearch config values set up") + } + + if ds.Type == DataStoreTypeSignalFX && ds.Values.SignalFx == nil { + return fmt.Errorf("data store should have SignalFx config values set up") + } + + if ds.Type == DataStoreTypeTempo && ds.Values.Tempo == nil { + return fmt.Errorf("data store should have Tempo config values set up") + } + + return nil +} + +func (ds DataStore) HasID() bool { + return ds.ID.String() != "" +} + +func (ds DataStore) IsOTLPBasedProvider() bool { + return slices.Contains(otlpBasedDataStores, ds.Type) +} diff --git a/server/tracedb/elasticsearchdb.go b/server/tracedb/elasticsearchdb.go index 2290d2dc04..5eaa97552e 100644 --- a/server/tracedb/elasticsearchdb.go +++ b/server/tracedb/elasticsearchdb.go @@ -16,6 +16,7 @@ import ( "github.com/elastic/go-elasticsearch/v8/esapi" "github.com/kubeshop/tracetest/server/model" "github.com/kubeshop/tracetest/server/tracedb/connection" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "go.opentelemetry.io/otel/trace" ) @@ -25,7 +26,7 @@ func elasticSearchDefaultPorts() []string { type elasticsearchDB struct { realTraceDB - config *model.ElasticSearchDataStoreConfig + config *datastoreresource.ElasticSearchConfig client *elasticsearch.Client } @@ -98,7 +99,7 @@ func (db *elasticsearchDB) GetTraceByID(ctx context.Context, traceID string) (mo return convertElasticSearchFormatIntoTrace(traceID, searchResponse), nil } -func newElasticSearchDB(cfg *model.ElasticSearchDataStoreConfig) (TraceDB, error) { +func newElasticSearchDB(cfg *datastoreresource.ElasticSearchConfig) (TraceDB, error) { var caCert []byte if cfg.Certificate != "" { caCert = []byte(cfg.Certificate) diff --git a/server/tracedb/jaegerdb.go b/server/tracedb/jaegerdb.go index 5a6ebf84ae..c592b14b38 100644 --- a/server/tracedb/jaegerdb.go +++ b/server/tracedb/jaegerdb.go @@ -11,8 +11,8 @@ import ( "github.com/kubeshop/tracetest/server/pkg/id" "github.com/kubeshop/tracetest/server/tracedb/connection" "github.com/kubeshop/tracetest/server/tracedb/datasource" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "github.com/kubeshop/tracetest/server/traces" - "go.opentelemetry.io/collector/config/configgrpc" v1 "go.opentelemetry.io/proto/otlp/trace/v1" "google.golang.org/grpc" "google.golang.org/grpc/status" @@ -27,10 +27,10 @@ type jaegerTraceDB struct { dataSource datasource.DataSource } -func newJaegerDB(grpcConfig *configgrpc.GRPCClientSettings) (TraceDB, error) { - baseConfig := &model.BaseClientConfig{ - Type: string(datasource.GRPC), - Grpc: *grpcConfig, +func newJaegerDB(grpcConfig *datastoreresource.GRPCClientSettings) (TraceDB, error) { + baseConfig := &datastoreresource.MultiChannelClientConfig{ + Type: datastoreresource.MultiChannelClientTypeGRPC, + Grpc: grpcConfig, } dataSource := datasource.New("Jaeger", baseConfig, datasource.Callbacks{ diff --git a/server/tracedb/opensearchdb.go b/server/tracedb/opensearchdb.go index 257deba9bd..63fbd8ad5c 100644 --- a/server/tracedb/opensearchdb.go +++ b/server/tracedb/opensearchdb.go @@ -12,6 +12,7 @@ import ( "github.com/kubeshop/tracetest/server/model" "github.com/kubeshop/tracetest/server/tracedb/connection" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "github.com/opensearch-project/opensearch-go" "github.com/opensearch-project/opensearch-go/opensearchapi" "go.opentelemetry.io/otel/trace" @@ -23,7 +24,7 @@ func opensearchDefaultPorts() []string { type opensearchDB struct { realTraceDB - config *model.ElasticSearchDataStoreConfig + config *datastoreresource.ElasticSearchConfig client *opensearch.Client } @@ -94,7 +95,7 @@ func (db *opensearchDB) GetTraceByID(ctx context.Context, traceID string) (model return convertOpensearchFormatIntoTrace(traceID, searchResponse), nil } -func newOpenSearchDB(cfg *model.ElasticSearchDataStoreConfig) (TraceDB, error) { +func newOpenSearchDB(cfg *datastoreresource.ElasticSearchConfig) (TraceDB, error) { var caCert []byte if cfg.Certificate != "" { caCert = []byte(cfg.Certificate) diff --git a/server/tracedb/signalfxdb.go b/server/tracedb/signalfxdb.go index fade5635e2..93e8a8e8ee 100644 --- a/server/tracedb/signalfxdb.go +++ b/server/tracedb/signalfxdb.go @@ -12,6 +12,7 @@ import ( "github.com/kubeshop/tracetest/server/model" "github.com/kubeshop/tracetest/server/tracedb/connection" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "go.opentelemetry.io/otel/trace" ) @@ -189,7 +190,7 @@ func convertSignalFXSpan(in signalFXSpan) model.Span { } } -func newSignalFXDB(cfg *model.SignalFXDataStoreConfig) (TraceDB, error) { +func newSignalFXDB(cfg *datastoreresource.SignalFXConfig) (TraceDB, error) { return &signalfxDB{ Realm: cfg.Realm, Token: cfg.Token, diff --git a/server/tracedb/tempodb.go b/server/tracedb/tempodb.go index 19d4944f36..2b1d2b2aed 100644 --- a/server/tracedb/tempodb.go +++ b/server/tracedb/tempodb.go @@ -13,6 +13,7 @@ import ( "github.com/kubeshop/tracetest/server/pkg/id" "github.com/kubeshop/tracetest/server/tracedb/connection" "github.com/kubeshop/tracetest/server/tracedb/datasource" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "github.com/kubeshop/tracetest/server/traces" "github.com/pkg/errors" "go.opentelemetry.io/otel/trace" @@ -30,7 +31,7 @@ type tempoTraceDB struct { dataSource datasource.DataSource } -func newTempoDB(config *model.BaseClientConfig) (TraceDB, error) { +func newTempoDB(config *datastoreresource.MultiChannelClientConfig) (TraceDB, error) { dataSource := datasource.New("Tempo", config, datasource.Callbacks{ HTTP: httpGetTraceByID, GRPC: grpcGetTraceByID, diff --git a/server/tracedb/tracedb.go b/server/tracedb/tracedb.go index 82fb4a9c2e..75d280a6d4 100644 --- a/server/tracedb/tracedb.go +++ b/server/tracedb/tracedb.go @@ -6,6 +6,7 @@ import ( "github.com/kubeshop/tracetest/server/model" "github.com/kubeshop/tracetest/server/pkg/id" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "go.opentelemetry.io/otel/trace" ) @@ -42,48 +43,39 @@ func (db *noopTraceDB) TestConnection(ctx context.Context) model.ConnectionResul return model.ConnectionResult{} } -func WithFallback(fn func(ds model.DataStore) (TraceDB, error), fallbackDS model.DataStore) func(ds model.DataStore) (TraceDB, error) { - return func(ds model.DataStore) (TraceDB, error) { - if ds.IsZero() { - ds = fallbackDS - } - return fn(ds) - } -} - type traceDBFactory struct { - repo model.Repository + runRepository model.RunRepository } -func Factory(repo model.Repository) func(ds model.DataStore) (TraceDB, error) { +func Factory(runRepository model.RunRepository) func(ds datastoreresource.DataStore) (TraceDB, error) { f := traceDBFactory{ - repo: repo, + runRepository: runRepository, } return f.New } -func (f *traceDBFactory) getTraceDBInstance(ds model.DataStore) (TraceDB, error) { +func (f *traceDBFactory) getTraceDBInstance(ds datastoreresource.DataStore) (TraceDB, error) { var tdb TraceDB var err error if ds.IsOTLPBasedProvider() { - tdb, err = newCollectorDB(f.repo) + tdb, err = newCollectorDB(f.runRepository) return tdb, err } switch ds.Type { - case model.DataStoreTypeJaeger: + case datastoreresource.DataStoreTypeJaeger: tdb, err = newJaegerDB(ds.Values.Jaeger) - case model.DataStoreTypeTempo: + case datastoreresource.DataStoreTypeTempo: tdb, err = newTempoDB(ds.Values.Tempo) - case model.DataStoreTypeElasticAPM: + case datastoreresource.DataStoreTypeElasticAPM: tdb, err = newElasticSearchDB(ds.Values.ElasticApm) - case model.DataStoreTypeOpenSearch: + case datastoreresource.DataStoreTypeOpenSearch: tdb, err = newOpenSearchDB(ds.Values.OpenSearch) - case model.DataStoreTypeSignalFX: + case datastoreresource.DataStoreTypeSignalFX: tdb, err = newSignalFXDB(ds.Values.SignalFx) - case model.DataStoreTypeAwsXRay: + case datastoreresource.DataStoreTypeAwsXRay: tdb, err = NewAwsXRayDB(ds.Values.AwsXRay) default: return &noopTraceDB{}, nil @@ -100,7 +92,7 @@ func (f *traceDBFactory) getTraceDBInstance(ds model.DataStore) (TraceDB, error) return tdb, err } -func (f *traceDBFactory) New(ds model.DataStore) (TraceDB, error) { +func (f *traceDBFactory) New(ds datastoreresource.DataStore) (TraceDB, error) { tdb, err := f.getTraceDBInstance(ds) if err != nil { diff --git a/server/tracedb/tracedb_test.go b/server/tracedb/tracedb_test.go index ce040053e1..6383aa7d6d 100644 --- a/server/tracedb/tracedb_test.go +++ b/server/tracedb/tracedb_test.go @@ -4,26 +4,25 @@ import ( "fmt" "testing" - "github.com/kubeshop/tracetest/server/model" "github.com/kubeshop/tracetest/server/tracedb" + "github.com/kubeshop/tracetest/server/tracedb/datastoreresource" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "go.opentelemetry.io/collector/config/configgrpc" ) func TestCreateClient(t *testing.T) { cases := []struct { name string - ds model.DataStore + ds datastoreresource.DataStore expectedType string expectedError error }{ { name: "Jaeger", - ds: model.DataStore{ - Type: model.DataStoreTypeJaeger, - Values: model.DataStoreValues{ - Jaeger: &configgrpc.GRPCClientSettings{ + ds: datastoreresource.DataStore{ + Type: datastoreresource.DataStoreTypeJaeger, + Values: datastoreresource.DataStoreValues{ + Jaeger: &datastoreresource.GRPCClientSettings{ Endpoint: "notexists:123", }, }, @@ -32,89 +31,89 @@ func TestCreateClient(t *testing.T) { }, { name: "Tempo", - ds: model.DataStore{ - Type: model.DataStoreTypeTempo, - Values: model.DataStoreValues{ - Tempo: &model.BaseClientConfig{}, + ds: datastoreresource.DataStore{ + Type: datastoreresource.DataStoreTypeTempo, + Values: datastoreresource.DataStoreValues{ + Tempo: &datastoreresource.MultiChannelClientConfig{}, }, }, expectedType: "*tracedb.tempoTraceDB", }, { name: "ElasticSearch", - ds: model.DataStore{ - Type: model.DataStoreTypeElasticAPM, - Values: model.DataStoreValues{ - ElasticApm: &model.ElasticSearchDataStoreConfig{}, + ds: datastoreresource.DataStore{ + Type: datastoreresource.DataStoreTypeElasticAPM, + Values: datastoreresource.DataStoreValues{ + ElasticApm: &datastoreresource.ElasticSearchConfig{}, }, }, expectedType: "*tracedb.elasticsearchDB", }, { name: "OpenSearch", - ds: model.DataStore{ - Type: model.DataStoreTypeOpenSearch, - Values: model.DataStoreValues{ - OpenSearch: &model.ElasticSearchDataStoreConfig{}, + ds: datastoreresource.DataStore{ + Type: datastoreresource.DataStoreTypeOpenSearch, + Values: datastoreresource.DataStoreValues{ + OpenSearch: &datastoreresource.ElasticSearchConfig{}, }, }, expectedType: "*tracedb.opensearchDB", }, { name: "SignalFX", - ds: model.DataStore{ - Type: model.DataStoreTypeSignalFX, - Values: model.DataStoreValues{ - SignalFx: &model.SignalFXDataStoreConfig{}, + ds: datastoreresource.DataStore{ + Type: datastoreresource.DataStoreTypeSignalFX, + Values: datastoreresource.DataStoreValues{ + SignalFx: &datastoreresource.SignalFXConfig{}, }, }, expectedType: "*tracedb.signalfxDB", }, { name: "AWSXRay", - ds: model.DataStore{ - Type: model.DataStoreTypeAwsXRay, - Values: model.DataStoreValues{ - AwsXRay: &model.AWSXRayDataStoreConfig{}, + ds: datastoreresource.DataStore{ + Type: datastoreresource.DataStoreTypeAwsXRay, + Values: datastoreresource.DataStoreValues{ + AwsXRay: &datastoreresource.AWSXRayConfig{}, }, }, expectedType: "*tracedb.awsxrayDB", }, { name: "OTLP", - ds: model.DataStore{ - Type: model.DataStoreTypeOTLP, - Values: model.DataStoreValues{}, + ds: datastoreresource.DataStore{ + Type: datastoreresource.DataStoreTypeOTLP, + Values: datastoreresource.DataStoreValues{}, }, expectedType: "*tracedb.OTLPTraceDB", }, { name: "NewRelic", - ds: model.DataStore{ - Type: model.DataStoreTypeNewRelic, - Values: model.DataStoreValues{}, + ds: datastoreresource.DataStore{ + Type: datastoreresource.DataStoreTypeNewRelic, + Values: datastoreresource.DataStoreValues{}, }, expectedType: "*tracedb.OTLPTraceDB", }, { name: "Lightstep", - ds: model.DataStore{ - Type: model.DataStoreTypeLighStep, - Values: model.DataStoreValues{}, + ds: datastoreresource.DataStore{ + Type: datastoreresource.DataStoreTypeLighStep, + Values: datastoreresource.DataStoreValues{}, }, expectedType: "*tracedb.OTLPTraceDB", }, { name: "DataDog", - ds: model.DataStore{ - Type: model.DataStoreTypeDataDog, - Values: model.DataStoreValues{}, + ds: datastoreresource.DataStore{ + Type: datastoreresource.DataStoreTypeDataDog, + Values: datastoreresource.DataStoreValues{}, }, expectedType: "*tracedb.OTLPTraceDB", }, { name: "EmptyConfig", - ds: model.DataStore{}, + ds: datastoreresource.DataStore{}, expectedType: "*tracedb.noopTraceDB", }, } diff --git a/web/src/components/Settings/DataStoreForm/DataStoreForm.tsx b/web/src/components/Settings/DataStoreForm/DataStoreForm.tsx index 92cc5e9ae7..203be9b9eb 100644 --- a/web/src/components/Settings/DataStoreForm/DataStoreForm.tsx +++ b/web/src/components/Settings/DataStoreForm/DataStoreForm.tsx @@ -2,7 +2,6 @@ import {Button, Form} from 'antd'; import {useCallback, useEffect, useMemo} from 'react'; import DataStoreService from 'services/DataStore.service'; import {TDraftDataStore, TDataStoreForm, SupportedDataStores} from 'types/DataStore.types'; -import DataStore from 'models/DataStore.model'; import {SupportedDataStoresToExplanation, SupportedDataStoresToName} from 'constants/DataStore.constants'; import DataStoreConfig from 'models/DataStoreConfig.model'; import DataStoreDocsBanner from '../DataStoreDocsBanner/DataStoreDocsBanner'; @@ -20,7 +19,7 @@ interface IProps { onTestConnection(): void; isConfigReady: boolean; isTestConnectionLoading: boolean; - onDeleteConfig(dataStore: DataStore): void; + onDeleteConfig(): void; isLoading: boolean; isFormValid: boolean; } @@ -83,13 +82,7 @@ const DataStoreForm = ({ {isConfigReady ? ( - ) : ( diff --git a/web/src/models/DataStore.model.ts b/web/src/models/DataStore.model.ts index 4a0c159abd..75d8a22967 100644 --- a/web/src/models/DataStore.model.ts +++ b/web/src/models/DataStore.model.ts @@ -1,41 +1,35 @@ import {SupportedDataStores} from 'types/DataStore.types'; import {Model, TDataStoreSchemas} from 'types/Common.types'; -export type TRawDataStore = TDataStoreSchemas['DataStore']; -type DataStore = Model< - TRawDataStore, - { - otlp?: {}; - newRelic?: {}; - lightstep?: {}; - datadog?: {}; - } ->; +export type TRawDataStore = TDataStoreSchemas['DataStoreResource']; +type DataStore = Model['spec'] & {otlp?: {}; newRelic?: {}; lightstep?: {}; datadog?: {}}; const DataStore = ({ - id = '', - name = '', - type = SupportedDataStores.JAEGER, - isDefault = false, - openSearch = {}, - elasticApm = {}, - signalFx = {}, - jaeger = {}, - tempo = {}, - awsxray = {}, - createdAt = '', + spec: { + id = '', + name = '', + type = SupportedDataStores.JAEGER, + default: isDefault = false, + createdAt = '', + openSearch = {}, + elasticApm = {}, + signalFx = {}, + jaeger = {}, + tempo = {}, + awsxray = {}, + } = {id: '', name: '', type: SupportedDataStores.JAEGER}, }: TRawDataStore): DataStore => ({ id, name, type, - isDefault, + default: isDefault, + createdAt, openSearch, - signalFx, elasticApm, + signalFx, jaeger, tempo, awsxray, - createdAt, }); export default DataStore; diff --git a/web/src/models/DataStoreConfig.model.ts b/web/src/models/DataStoreConfig.model.ts index 550854c4c8..dc18d88987 100644 --- a/web/src/models/DataStoreConfig.model.ts +++ b/web/src/models/DataStoreConfig.model.ts @@ -1,4 +1,4 @@ -import {ConfigMode, SupportedDataStores} from 'types/DataStore.types'; +import {ConfigMode} from 'types/DataStore.types'; import DataStore, {TRawDataStore} from './DataStore.model'; type DataStoreConfig = { @@ -6,18 +6,13 @@ type DataStoreConfig = { mode: ConfigMode; }; -const DataStoreConfig = (dataStores: TRawDataStore[] = []): DataStoreConfig => { - const dataStoreList = dataStores.map(rawDataStore => DataStore(rawDataStore)); - const defaultDataStore = dataStoreList.find(({isDefault}) => isDefault); - const mode = (!!defaultDataStore && ConfigMode.READY) || ConfigMode.NO_TRACING_MODE; +const DataStoreConfig = (rawDataStore: TRawDataStore): DataStoreConfig => { + const defaultDataStore = DataStore(rawDataStore); + const isDefaultDataStore = defaultDataStore.default; + const mode = isDefaultDataStore ? ConfigMode.READY : ConfigMode.NO_TRACING_MODE; return { - defaultDataStore: - defaultDataStore ?? - DataStore({ - name: 'default', - type: SupportedDataStores.JAEGER, - }), + defaultDataStore, mode, }; }; diff --git a/web/src/models/__mocks__/DataStoreConfig.mock.ts b/web/src/models/__mocks__/DataStoreConfig.mock.ts deleted file mode 100644 index a248254d43..0000000000 --- a/web/src/models/__mocks__/DataStoreConfig.mock.ts +++ /dev/null @@ -1,19 +0,0 @@ -import {IMockFactory} from 'types/Common.types'; -import {TRawDataStore} from '../DataStore.model'; -import DataStoreConfig from '../DataStoreConfig.model'; - -const DataStoreConfigMock: IMockFactory< - DataStoreConfig, - { - dataStores: TRawDataStore[]; - } -> = () => ({ - raw({dataStores = []} = {}) { - return {dataStores}; - }, - model({dataStores = []} = {}) { - return DataStoreConfig(this.raw({dataStores}).dataStores); - }, -}); - -export default DataStoreConfigMock(); diff --git a/web/src/providers/DataStore/DataStore.provider.tsx b/web/src/providers/DataStore/DataStore.provider.tsx index 0cb39bdca7..2e93d4c7b8 100644 --- a/web/src/providers/DataStore/DataStore.provider.tsx +++ b/web/src/providers/DataStore/DataStore.provider.tsx @@ -5,7 +5,6 @@ import {NoTestConnectionDataStoreList, SupportedDataStoresToName} from 'constant import ConnectionResult from 'models/ConnectionResult.model'; import { useTestConnectionMutation, - useCreateDataStoreMutation, useUpdateDataStoreMutation, useDeleteDataStoreMutation, } from 'redux/apis/TraceTest.api'; @@ -20,7 +19,7 @@ interface IContext { isFormValid: boolean; isLoading: boolean; isTestConnectionLoading: boolean; - onDeleteConfig(defaultDataStore: DataStore): void; + onDeleteConfig(): void; onSaveConfig(draft: TDraftDataStore, defaultDataStore: DataStore): void; onTestConnection(draft: TDraftDataStore, defaultDataStore: DataStore): void; onIsFormValid(isValid: boolean): void; @@ -44,7 +43,6 @@ export const useDataStore = () => useContext(Context); const DataStoreProvider = ({children}: IProps) => { const {isFetching} = useSettingsValues(); - const [createDataStore, {isLoading: isLoadingCreate}] = useCreateDataStoreMutation(); const [updateDataStore, {isLoading: isLoadingUpdate}] = useUpdateDataStoreMutation(); const [deleteDataStore] = useDeleteDataStoreMutation(); const [testConnection, {isLoading: isTestConnectionLoading}] = useTestConnectionMutation(); @@ -72,32 +70,25 @@ const DataStoreProvider = ({children}: IProps) => { okText: 'Save', onConfirm: async () => { const dataStore = await DataStoreService.getRequest(draft, defaultDataStore); - if (dataStore.id) { - await updateDataStore({dataStore, dataStoreId: dataStore.id}).unwrap(); - } else { - await createDataStore(dataStore).unwrap(); - } + await updateDataStore({dataStore}).unwrap(); showSuccessNotification(); }, }); }, - [createDataStore, onOpen, showSuccessNotification, updateDataStore] + [onOpen, showSuccessNotification, updateDataStore] ); - const onDeleteConfig = useCallback( - async (defaultDataStore: DataStore) => { - onOpen({ - title: - "Tracetest will remove the trace data store configuration information and enter the 'No-Tracing Mode'. You can still run tests against the responses until you configure a new trace data store.", - heading: 'Save Confirmation', - okText: 'Save', - onConfirm: async () => { - await deleteDataStore({dataStoreId: defaultDataStore.id}).unwrap(); - }, - }); - }, - [deleteDataStore, onOpen] - ); + const onDeleteConfig = useCallback(async () => { + onOpen({ + title: + "Tracetest will remove the trace data store configuration information and enter the 'No-Tracing Mode'. You can still run tests against the responses until you configure a new trace data store.", + heading: 'Save Confirmation', + okText: 'Save', + onConfirm: async () => { + await deleteDataStore().unwrap(); + }, + }); + }, [deleteDataStore, onOpen]); const onIsFormValid = useCallback((isValid: boolean) => { setIsFormValid(isValid); @@ -112,7 +103,7 @@ const DataStoreProvider = ({children}: IProps) => { } try { - const result = await testConnection(dataStore!).unwrap(); + const result = await testConnection(dataStore.spec!).unwrap(); showTestConnectionNotification(result, draft.dataStoreType!); } catch (err) { showTestConnectionNotification(err as TConnectionResult, draft.dataStoreType!); @@ -123,7 +114,7 @@ const DataStoreProvider = ({children}: IProps) => { const value = useMemo( () => ({ - isLoading: isLoadingCreate || isLoadingUpdate, + isLoading: isLoadingUpdate, isFormValid, isTestConnectionLoading, onSaveConfig, @@ -132,7 +123,6 @@ const DataStoreProvider = ({children}: IProps) => { onDeleteConfig, }), [ - isLoadingCreate, isLoadingUpdate, isFormValid, isTestConnectionLoading, diff --git a/web/src/providers/SettingsValues/SettingsValues.provider.tsx b/web/src/providers/SettingsValues/SettingsValues.provider.tsx index 1530f08ca1..208413bf01 100644 --- a/web/src/providers/SettingsValues/SettingsValues.provider.tsx +++ b/web/src/providers/SettingsValues/SettingsValues.provider.tsx @@ -5,7 +5,7 @@ import Config from 'models/Config.model'; import DataStoreConfig from 'models/DataStoreConfig.model'; import Demo from 'models/Demo.model'; import Polling from 'models/Polling.model'; -import {useGetDataStoresQuery, useGetConfigQuery, useGetDemoQuery, useGetPollingQuery} from 'redux/apis/TraceTest.api'; +import {useGetDataStoreQuery, useGetConfigQuery, useGetDemoQuery, useGetPollingQuery} from 'redux/apis/TraceTest.api'; import {useAppDispatch, useAppSelector} from 'redux/hooks'; import {setUserPreference} from 'redux/slices/User.slice'; import UserSelectors from 'selectors/User.selectors'; @@ -28,7 +28,7 @@ interface IContext { } const Context = createContext({ - dataStoreConfig: DataStoreConfig([]), + dataStoreConfig: DataStoreConfig({}), skipConfigSetup: noop, skipConfigSetupFromTest: noop, isLoading: false, @@ -50,7 +50,7 @@ export const useSettingsValues = () => useContext(Context); const SettingsValuesProvider = ({children}: IProps) => { // DataStore const dispatch = useAppDispatch(); - const {data: dataStoreConfig = DataStoreConfig([]), isLoading, isError, isFetching} = useGetDataStoresQuery({}); + const {data: dataStoreConfig = DataStoreConfig({}), isLoading, isError, isFetching} = useGetDataStoreQuery({}); const initConfigSetup = useAppSelector(state => UserSelectors.selectUserPreference(state, 'initConfigSetup')); const initConfigSetupFromTest = useAppSelector(state => UserSelectors.selectUserPreference(state, 'initConfigSetupFromTest') diff --git a/web/src/redux/apis/TraceTest.api.ts b/web/src/redux/apis/TraceTest.api.ts index 49939f1d56..168df7c680 100644 --- a/web/src/redux/apis/TraceTest.api.ts +++ b/web/src/redux/apis/TraceTest.api.ts @@ -72,8 +72,7 @@ export const { useGetTransactionVersionByIdQuery, useGetResourceDefinitionQuery, useLazyGetResourceDefinitionQuery, - useGetDataStoresQuery, - useCreateDataStoreMutation, + useGetDataStoreQuery, useUpdateDataStoreMutation, useDeleteDataStoreMutation, useTestConnectionMutation, diff --git a/web/src/redux/apis/endpoints/DataStore.endpoint.ts b/web/src/redux/apis/endpoints/DataStore.endpoint.ts index 4e2f2e78b0..ec96b640a4 100644 --- a/web/src/redux/apis/endpoints/DataStore.endpoint.ts +++ b/web/src/redux/apis/endpoints/DataStore.endpoint.ts @@ -1,37 +1,34 @@ import {HTTP_METHOD} from 'constants/Common.constants'; import {TracetestApiTags} from 'constants/Test.constants'; import ConnectionResult from 'models/ConnectionResult.model'; +import {TRawDataStore} from 'models/DataStore.model'; import DataStoreConfig from 'models/DataStoreConfig.model'; import {TConnectionResult, TRawConnectionResult, TTestConnectionRequest} from 'types/DataStore.types'; import {TTestApiEndpointBuilder} from 'types/Test.types'; -import {TRawDataStore} from 'models/DataStore.model'; const DataStoreEndpoint = (builder: TTestApiEndpointBuilder) => ({ - getDataStores: builder.query({ - query: () => '/datastores?take=50', - providesTags: () => [{type: TracetestApiTags.DATA_STORE, id: 'datastore'}], - transformResponse: (rawDataStores: TRawDataStore[]) => DataStoreConfig(rawDataStores), - }), - createDataStore: builder.mutation({ - query: dataStore => ({ - url: '/datastores', - method: HTTP_METHOD.POST, - body: dataStore, + getDataStore: builder.query({ + query: () => ({ + url: '/datastores/current', + method: HTTP_METHOD.GET, + headers: {'content-type': 'application/json'}, }), - invalidatesTags: [{type: TracetestApiTags.DATA_STORE, id: 'datastore'}], + providesTags: () => [{type: TracetestApiTags.DATA_STORE, id: 'datastore'}], + transformResponse: (rawDataStore: TRawDataStore) => DataStoreConfig(rawDataStore), }), - updateDataStore: builder.mutation({ - query: ({dataStore, dataStoreId}) => ({ - url: `/datastores/${dataStoreId}`, + updateDataStore: builder.mutation({ + query: ({dataStore}) => ({ + url: `/datastores/current`, method: HTTP_METHOD.PUT, body: dataStore, }), invalidatesTags: [{type: TracetestApiTags.DATA_STORE, id: 'datastore'}], }), - deleteDataStore: builder.mutation({ - query: ({dataStoreId}) => ({ - url: `/datastores/${dataStoreId}`, + deleteDataStore: builder.mutation({ + query: () => ({ + url: `/datastores/current`, method: HTTP_METHOD.DELETE, + headers: {'content-type': 'application/json'}, }), invalidatesTags: [{type: TracetestApiTags.DATA_STORE, id: 'datastore'}], }), diff --git a/web/src/services/DataStore.service.ts b/web/src/services/DataStore.service.ts index c9f7dffacc..3305a76c14 100644 --- a/web/src/services/DataStore.service.ts +++ b/web/src/services/DataStore.service.ts @@ -33,16 +33,19 @@ const DataStoreService = (): IDataStoreService => ({ const dataStoreValues = await dataStoreServiceMap[dataStoreType].getRequest(draft, dataStoreType); const isUpdate = !!defaultDataStore.id; - const dataStore: TRawDataStore = isUpdate - ? {id: defaultDataStore.id, ...dataStoreValues, isDefault: true} + const dataStore: DataStore = isUpdate + ? {id: defaultDataStore.id, ...dataStoreValues, default: true} : { ...dataStoreValues, name: dataStoreType, type: dataStoreType as SupportedDataStores, - isDefault: true, + default: true, }; - return dataStore; + return { + type: 'DataStore', + spec: dataStore, + } as TRawDataStore; }, getInitialValues(dataStoreConfig) { diff --git a/web/src/types/DataStore.types.ts b/web/src/types/DataStore.types.ts index 5718183964..1a38242638 100644 --- a/web/src/types/DataStore.types.ts +++ b/web/src/types/DataStore.types.ts @@ -1,7 +1,7 @@ import {FormInstance} from 'antd'; import {Model, TDataStoreSchemas, TConfigSchemas} from 'types/Common.types'; import ConnectionTestStep from 'models/ConnectionResultStep.model'; -import {TRawDataStore} from 'models/DataStore.model'; +import DataStore from 'models/DataStore.model'; import DataStoreConfig from 'models/DataStoreConfig.model'; import {THeader} from './Test.types'; @@ -39,7 +39,7 @@ export type TRawElasticSearch = TDataStoreSchemas['ElasticSearch']; export type TRawBaseClientSettings = TDataStoreSchemas['BaseClient']; export type TRawHttpClientSettings = TDataStoreSchemas['HTTPClientSettings']; -export type TTestConnectionRequest = TRawDataStore; +export type TTestConnectionRequest = DataStore; export type TRawConnectionResult = TConfigSchemas['ConnectionResult']; export type TConnectionResult = Model< TRawConnectionResult, @@ -76,7 +76,7 @@ export interface IElasticSearch extends TRawElasticSearch { certificateFile?: File; } -type IDataStore = TRawDataStore & { +type IDataStore = DataStore & { jaeger?: IBaseClientSettings; tempo?: IBaseClientSettings; openSearch?: IElasticSearch; @@ -85,7 +85,7 @@ type IDataStore = TRawDataStore & { lightstep?: {}; newRelic?: {}; datadog?: {}; -} +}; export type TDraftDataStore = { dataStore?: IDataStore; @@ -95,7 +95,7 @@ export type TDraftDataStore = { export type TDataStoreForm = FormInstance; export type TDataStoreService = { - getRequest(values: TDraftDataStore, dataStoreType?: SupportedDataStores): Promise; + getRequest(values: TDraftDataStore, dataStoreType?: SupportedDataStores): Promise; validateDraft(draft: TDraftDataStore): Promise; getInitialValues(draft: DataStoreConfig, dataStoreType?: SupportedDataStores): TDraftDataStore; }; diff --git a/web/src/types/Generated.types.ts b/web/src/types/Generated.types.ts index 5e9ad02fe9..a5cd957d65 100644 --- a/web/src/types/Generated.types.ts +++ b/web/src/types/Generated.types.ts @@ -170,23 +170,11 @@ export interface paths { /** Delete a demonstration used on Tracetest as quick start examples. */ delete: operations["deleteDemo"]; }; - "/datastores": { - /** Get all Data Stores */ - get: operations["getDataStores"]; - /** Create a new Data Store */ - post: operations["createDataStore"]; - }; "/datastores/{dataStoreId}": { /** Get a Data Store */ get: operations["getDataStore"]; /** Update a Data Store */ put: operations["updateDataStore"]; - /** Delete a Data Store */ - delete: operations["deleteDataStore"]; - }; - "/datastores/{dataStoreId}/definition.yaml": { - /** Get the data store as an YAML file */ - get: operations["getDataStoreDefinitionFile"]; }; } @@ -988,42 +976,6 @@ export interface operations { 500: unknown; }; }; - /** Get all Data Stores */ - getDataStores: { - parameters: {}; - responses: { - /** successful operation */ - 200: { - headers: { - /** Total records count */ - "X-Total-Count"?: number; - }; - content: { - "application/json": external["dataStores.yaml"]["components"]["schemas"]["DataStore"][]; - }; - }; - /** problem with getting data stores */ - 500: unknown; - }; - }; - /** Create a new Data Store */ - createDataStore: { - responses: { - /** successful operation */ - 200: { - content: { - "application/json": external["dataStores.yaml"]["components"]["schemas"]["DataStore"]; - }; - }; - /** trying to create a data store with an already existing ID */ - 400: unknown; - }; - requestBody: { - content: { - "application/json": external["dataStores.yaml"]["components"]["schemas"]["DataStore"]; - }; - }; - }; /** Get a Data Store */ getDataStore: { parameters: {}; @@ -1031,9 +983,11 @@ export interface operations { /** successful operation */ 200: { content: { - "application/json": external["dataStores.yaml"]["components"]["schemas"]["DataStore"]; + "application/json": external["dataStores.yaml"]["components"]["schemas"]["DataStoreResource"]; }; }; + /** data store not found */ + 404: unknown; /** problem with getting a data store */ 500: unknown; }; @@ -1044,6 +998,8 @@ export interface operations { responses: { /** successful operation */ 204: never; + /** invalid data store, some data was sent in incorrect format. */ + 400: unknown; /** problem with updating data store */ 500: unknown; }; @@ -1053,26 +1009,6 @@ export interface operations { }; }; }; - /** Delete a Data Store */ - deleteDataStore: { - parameters: {}; - responses: { - /** OK */ - 204: never; - }; - }; - /** Get the data store as an YAML file */ - getDataStoreDefinitionFile: { - parameters: {}; - responses: { - /** OK */ - 200: { - content: { - "application/yaml": string; - }; - }; - }; - }; } export interface external { @@ -1198,11 +1134,20 @@ export interface external { paths: {}; components: { schemas: { + /** @description Represents a data store structured into the Resources format. */ + DataStoreResource: { + /** + * @description Represents the type of this resource. It should always be set as 'DataStore'. + * @enum {string} + */ + type?: "DataStore"; + spec?: external["dataStores.yaml"]["components"]["schemas"]["DataStore"]; + }; DataStore: { id?: string; name: string; type: external["dataStores.yaml"]["components"]["schemas"]["SupportedDataStores"]; - isDefault?: boolean; + default?: boolean; jaeger?: external["dataStores.yaml"]["components"]["schemas"]["GRPCClientSettings"]; tempo?: external["dataStores.yaml"]["components"]["schemas"]["BaseClient"]; openSearch?: external["dataStores.yaml"]["components"]["schemas"]["ElasticSearch"];