diff --git a/internal/common/helpers.go b/internal/common/helpers.go index 023d527cd2..218b5e712f 100644 --- a/internal/common/helpers.go +++ b/internal/common/helpers.go @@ -4,7 +4,10 @@ package common -import "strings" +import ( + "fmt" + "strings" +) // TrimStringSlice removes whitespace from the beginning and end of the contents of a []string. func TrimStringSlice(slice []string) { @@ -36,3 +39,21 @@ func StringSlicesUnion(slices ...[]string) (result []string) { } return } + +// ToStringSlice returns the list of strings from an interface variable +func ToStringSlice(val interface{}) ([]string, error) { + vals, ok := val.([]interface{}) + if !ok { + return nil, fmt.Errorf("conversion error") + } + + var s []string + for _, v := range vals { + str, ok := v.(string) + if !ok { + return nil, fmt.Errorf("conversion error") + } + s = append(s, str) + } + return s, nil +} diff --git a/internal/common/mapstr_test.go b/internal/common/mapstr_test.go new file mode 100644 index 0000000000..c61372a296 --- /dev/null +++ b/internal/common/mapstr_test.go @@ -0,0 +1,64 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package common + +import ( + "encoding/json" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestMapStrGetValue(t *testing.T) { + + cases := []struct { + title string + testFile string + fieldKey string + expectedValue interface{} + }{ + { + title: "string value", + testFile: "./testdata/source.json", + fieldKey: "host.architecture", + expectedValue: "x86_64", + }, + { + title: "float64 value", + testFile: "./testdata/source.json", + fieldKey: "metricset.period", + expectedValue: float64(10000), + }, + { + title: "slice value", + testFile: "./testdata/source.json", + fieldKey: "tags", + expectedValue: []interface{}{"apache_tomcat-cache", "forwarded"}, + }, + { + title: "map value", + testFile: "./testdata/source.json", + fieldKey: "data_stream", + expectedValue: map[string]interface{}{"dataset": "apache_tomcat.cache", "namespace": "ep", "type": "metrics"}, + }, + } + + for _, c := range cases { + t.Run(c.title, func(t *testing.T) { + b, err := os.ReadFile("./testdata/source.json") + require.NoError(t, err) + + var given MapStr + err = json.Unmarshal(b, &given) + require.NoError(t, err) + + val, err := given.GetValue(c.fieldKey) + assert.NoError(t, err) + assert.Equal(t, c.expectedValue, val) + }) + } +} diff --git a/internal/common/testdata/source.json b/internal/common/testdata/source.json new file mode 100644 index 0000000000..598d3ef705 --- /dev/null +++ b/internal/common/testdata/source.json @@ -0,0 +1,92 @@ +{ + "@timestamp": "2023-05-30T11:42:07.923Z", + "agent": { + "ephemeral_id": "7dd994f8-d866-4068-9f29-d6c90a28738e", + "id": "5cad1bda-ddc8-4333-8d88-333b6ef311b5", + "name": "docker-fleet-agent", + "type": "metricbeat", + "version": "8.8.0" + }, + "apache_tomcat": { + "cache": { + "application_name": "/", + "hit": { + "count": 15.0 + }, + "lookup": { + "count": 30.0 + }, + "object": { + "size": { + "max": { + "kb": 512.0 + } + } + }, + "size": { + "current": { + "kb": 19.0 + }, + "max": { + "kb": 10240.0 + } + }, + "ttl": { + "ms": 5000.0 + } + } + }, + "data_stream": { + "dataset": "apache_tomcat.cache", + "namespace": "ep", + "type": "metrics" + }, + "ecs": { + "version": "8.7.0" + }, + "elastic_agent": { + "id": "5cad1bda-ddc8-4333-8d88-333b6ef311b5", + "snapshot": true, + "version": "8.8.0" + }, + "event": { + "agent_id_status": "verified", + "category": "web", + "dataset": "apache_tomcat.cache", + "duration": 136411023, + "ingested": "2023-05-30T11:42:09Z", + "kind": "metric", + "module": "apache_tomcat", + "type": "info" + }, + "host": { + "architecture": "x86_64", + "containerized": false, + "hostname": "docker-fleet-agent", + "id": "4e0d02de87bd4101b7215d3aaf5f623b", + "ip": "172.23.0.7", + "mac": "02-42-AC-17-00-07", + "name": "docker-fleet-agent", + "os": { + "codename": "focal", + "family": "debian", + "kernel": "5.19.0-42-generic", + "name": "Ubuntu", + "platform": "ubuntu", + "type": "linux", + "version": "20.04.6 LTS (Focal Fossa)" + } + }, + "metricset": { + "name": "collector", + "period": 10000 + }, + "service": { + "address": "http://elastic-package-service-apache_tomcat-1:9090/metrics", + "type": "prometheus" + }, + "tags": [ + "apache_tomcat-cache", + "forwarded" + ] +} \ No newline at end of file diff --git a/internal/fields/validate.go b/internal/fields/validate.go index 76eb564a3c..9b541aa5af 100644 --- a/internal/fields/validate.go +++ b/internal/fields/validate.go @@ -13,10 +13,10 @@ import ( "os" "path/filepath" "regexp" + "sort" "strings" "github.com/Masterminds/semver/v3" - "github.com/pkg/errors" "gopkg.in/yaml.v3" "github.com/elastic/elastic-package/internal/common" @@ -55,6 +55,8 @@ type Validator struct { allowedCIDRs []*net.IPNet enabledImportAllECSSchema bool + + disabledNormalization bool } // ValidatorOption represents an optional flag that can be passed to CreateValidatorForDirectory. @@ -124,6 +126,14 @@ func WithEnabledImportAllECSSChema(importSchema bool) ValidatorOption { } } +// WithDisableNormalization configures the validator to disable normalization. +func WithDisableNormalization(disabledNormalization bool) ValidatorOption { + return func(v *Validator) error { + v.disabledNormalization = disabledNormalization + return nil + } +} + type packageRootFinder interface { FindPackageRoot() (string, bool, error) } @@ -153,7 +163,7 @@ func createValidatorForDirectoryAndPackageRoot(fieldsParentDir string, finder pa fieldsDir := filepath.Join(fieldsParentDir, "fields") v.Schema, err = loadFieldsFromDir(fieldsDir) if err != nil { - return nil, errors.Wrapf(err, "can't load fields from directory (path: %s)", fieldsDir) + return nil, fmt.Errorf("can't load fields from directory (path: %s): %w", fieldsDir, err) } if v.disabledDependencyManagement { @@ -162,7 +172,7 @@ func createValidatorForDirectoryAndPackageRoot(fieldsParentDir string, finder pa packageRoot, found, err := finder.FindPackageRoot() if err != nil { - return nil, errors.Wrap(err, "can't find package root") + return nil, fmt.Errorf("can't find package root: %w", err) } // As every command starts with approximating where is the package root, it isn't required to return an error in case the root is missing. // This is also useful for testing purposes, where we don't have a real package, but just "fields" directory. The package root is always absent. @@ -174,7 +184,7 @@ func createValidatorForDirectoryAndPackageRoot(fieldsParentDir string, finder pa bm, ok, err := buildmanifest.ReadBuildManifest(packageRoot) if err != nil { - return nil, errors.Wrap(err, "can't read build manifest") + return nil, fmt.Errorf("can't read build manifest: %w", err) } if !ok { v.disabledDependencyManagement = true @@ -183,7 +193,7 @@ func createValidatorForDirectoryAndPackageRoot(fieldsParentDir string, finder pa fdm, err := CreateFieldDependencyManager(bm.Dependencies) if err != nil { - return nil, errors.Wrap(err, "can't create field dependency manager") + return nil, fmt.Errorf("can't create field dependency manager: %w", err) } v.FieldDependencyManager = fdm @@ -217,20 +227,20 @@ func initializeAllowedCIDRsList() (cidrs []*net.IPNet) { func loadFieldsFromDir(fieldsDir string) ([]FieldDefinition, error) { files, err := filepath.Glob(filepath.Join(fieldsDir, "*.yml")) if err != nil { - return nil, errors.Wrapf(err, "reading directory with fields failed (path: %s)", fieldsDir) + return nil, fmt.Errorf("reading directory with fields failed (path: %s): %w", fieldsDir, err) } var fields []FieldDefinition for _, file := range files { body, err := os.ReadFile(file) if err != nil { - return nil, errors.Wrap(err, "reading fields file failed") + return nil, fmt.Errorf("reading fields file failed: %w", err) } var u []FieldDefinition err = yaml.Unmarshal(body, &u) if err != nil { - return nil, errors.Wrap(err, "unmarshalling field body failed") + return nil, fmt.Errorf("unmarshalling field body failed: %w", err) } fields = append(fields, u...) } @@ -243,7 +253,7 @@ func (v *Validator) ValidateDocumentBody(body json.RawMessage) multierror.Error err := json.Unmarshal(body, &c) if err != nil { var errs multierror.Error - errs = append(errs, errors.Wrap(err, "unmarshalling document body failed")) + errs = append(errs, fmt.Errorf("unmarshalling document body failed: %w", err)) return errs } @@ -273,9 +283,10 @@ func (v *Validator) validateDocumentValues(body common.MapStr) multierror.Error if err == common.ErrKeyNotFound { continue } - str, ok := value.(string) + + str, ok := valueToString(value, v.disabledNormalization) if !ok || str != v.expectedDataset { - err := errors.Errorf("field %q should have value %q, it has \"%v\"", + err := fmt.Errorf("field %q should have value %q, it has \"%v\"", datasetField, v.expectedDataset, value) errs = append(errs, err) } @@ -284,6 +295,20 @@ func (v *Validator) validateDocumentValues(body common.MapStr) multierror.Error return errs } +func valueToString(value any, disabledNormalization bool) (string, bool) { + if disabledNormalization { + // when synthetics mode is enabled, each field present in the document is an array + // so this check needs to retrieve the first element of the array + vals, err := common.ToStringSlice(value) + if err != nil || len(vals) != 1 { + return "", false + } + return vals[0], true + } + str, ok := value.(string) + return str, ok +} + func (v *Validator) validateMapElement(root string, elem common.MapStr, doc common.MapStr) multierror.Error { var errs multierror.Error for name, val := range elem { @@ -334,7 +359,7 @@ func (v *Validator) validateScalarElement(key string, val interface{}, doc commo if !v.disabledDependencyManagement && definition.External != "" { def, err := v.FieldDependencyManager.ImportField(definition.External, key) if err != nil { - return errors.Wrapf(err, "can't import field (field: %s)", key) + return fmt.Errorf("can't import field (field: %s): %w", key, err) } definition = &def } @@ -345,18 +370,110 @@ func (v *Validator) validateScalarElement(key string, val interface{}, doc commo val = fmt.Sprintf("%q", val) } - err := v.validateExpectedNormalization(*definition, val) - if err != nil { - return errors.Wrapf(err, "field %q is not normalized as expected", key) + if !v.disabledNormalization { + err := v.validateExpectedNormalization(*definition, val) + if err != nil { + return fmt.Errorf("field %q is not normalized as expected: %w", key, err) + } } - err = v.parseElementValue(key, *definition, val, doc) + err := v.parseElementValue(key, *definition, val, doc) if err != nil { - return errors.Wrap(err, "parsing field value failed") + return fmt.Errorf("parsing field value failed: %w", err) } return nil } +func (v *Validator) SanitizeSyntheticSourceDocs(docs []common.MapStr) ([]common.MapStr, error) { + var newDocs []common.MapStr + for _, doc := range docs { + for key, contents := range doc { + shouldBeArray := false + definition := FindElementDefinition(key, v.Schema) + if definition != nil { + if !v.disabledDependencyManagement && definition.External != "" { + def, err := v.FieldDependencyManager.ImportField(definition.External, key) + if err != nil { + return nil, fmt.Errorf("can't import field (field: %s): %w", key, err) + } + definition = &def + } + + shouldBeArray = v.shouldValueBeArray(definition) + } + + // if it needs to be normalized, the field is kept as it is + if shouldBeArray { + continue + } + // in case it is not specified any normalization and that field is an array of + // just one element, the field is going to be updated to remove the array and keep + // that element as a value. + vals, ok := contents.([]interface{}) + if !ok { + continue + } + if len(vals) == 1 { + _, err := doc.Put(key, vals[0]) + if err != nil { + return nil, fmt.Errorf("key %s could not be updated: %w", key, err) + } + } + } + expandedDoc, err := createDocExpandingObjects(doc) + if err != nil { + return nil, fmt.Errorf("failure while expanding objects from doc: %w", err) + } + + newDocs = append(newDocs, expandedDoc) + } + return newDocs, nil +} + +func (v *Validator) shouldValueBeArray(definition *FieldDefinition) bool { + // normalization should just be checked if synthetic source is enabled and the + // spec version of this package is >= 2.0.0 + if v.disabledNormalization && !v.specVersion.LessThan(semver2_0_0) { + for _, normalize := range definition.Normalize { + switch normalize { + case "array": + return true + } + } + } + return false +} + +func createDocExpandingObjects(doc common.MapStr) (common.MapStr, error) { + keys := make([]string, 0) + for k := range doc { + keys = append(keys, k) + } + sort.Strings(keys) + + newDoc := make(common.MapStr) + for _, k := range keys { + value, err := doc.GetValue(k) + if err != nil { + return nil, fmt.Errorf("not found key %s: %w", k, err) + } + + _, err = newDoc.Put(k, value) + if err == nil { + continue + } + + // Possible errors found but not limited to those + // - expected map but type is string + // - expected map but type is []interface{} + if strings.HasPrefix(err.Error(), "expected map but type is") { + logger.Debugf("not able to add key %s, is this a multifield?: %s", k, err) + continue + } + return nil, fmt.Errorf("not added key %s with value %s: %w", k, value, err) + } + return newDoc, nil +} func isNumericKeyword(definition FieldDefinition, val interface{}) bool { _, isNumber := val.(float64) return isNumber && (definition.Type == "keyword" || definition.Type == "constant_keyword") @@ -683,7 +800,7 @@ func ensurePatternMatches(key, value, pattern string) error { } valid, err := regexp.MatchString(pattern, value) if err != nil { - return errors.Wrap(err, "invalid pattern") + return fmt.Errorf("invalid pattern: %w", err) } if !valid { return fmt.Errorf("field %q's value, %s, does not match the expected pattern: %s", key, value, pattern) diff --git a/internal/testrunner/runners/system/runner.go b/internal/testrunner/runners/system/runner.go index d400d6e816..e54b368a70 100644 --- a/internal/testrunner/runners/system/runner.go +++ b/internal/testrunner/runners/system/runner.go @@ -33,6 +33,8 @@ import ( const ( testRunMaxID = 99999 testRunMinID = 10000 + + allFieldsBody = `{"fields": ["*"]}` ) func init() { @@ -295,11 +297,75 @@ func createTestRunID() string { return fmt.Sprintf("%d", rand.Intn(testRunMaxID-testRunMinID)+testRunMinID) } -func (r *runner) getDocs(dataStream string) ([]common.MapStr, error) { +func (r *runner) isSyntheticsEnabled(dataStream, componentTemplatePackage string) (bool, error) { + logger.Debugf("check whether or not synthetics is enabled (component template %s)...", componentTemplatePackage) + resp, err := r.options.API.Cluster.GetComponentTemplate( + r.options.API.Cluster.GetComponentTemplate.WithName(componentTemplatePackage), + ) + if err != nil { + return false, fmt.Errorf("could not get component template from data stream %s: %w", dataStream, err) + } + defer resp.Body.Close() + + var results struct { + ComponentTemplates []struct { + Name string `json:"name"` + ComponentTemplate struct { + Template struct { + Mappings struct { + Source *struct { + Mode string `json:"mode"` + } `json:"_source,omitempty"` + } `json:"mappings"` + } `json:"template"` + } `json:"component_template"` + } `json:"component_templates"` + } + + if err := json.NewDecoder(resp.Body).Decode(&results); err != nil { + return false, fmt.Errorf("could not decode search results response: %w", err) + } + + if len(results.ComponentTemplates) == 0 { + logger.Debugf("no component template found for data stream %s", dataStream) + return false, nil + } + if len(results.ComponentTemplates) != 1 { + return false, fmt.Errorf("unexpected response, not found component template") + } + + template := results.ComponentTemplates[0] + + if template.ComponentTemplate.Template.Mappings.Source == nil { + return false, nil + } + + return template.ComponentTemplate.Template.Mappings.Source.Mode == "synthetic", nil +} + +type hits struct { + Source []common.MapStr `json:"_source"` + Fields []common.MapStr `json:"fields"` +} + +func (h hits) getDocs(syntheticsEnabled bool) []common.MapStr { + if syntheticsEnabled { + return h.Fields + } + return h.Source +} + +func (h hits) size() int { + return len(h.Source) +} + +func (r *runner) getDocs(dataStream string) (*hits, error) { resp, err := r.options.API.Search( r.options.API.Search.WithIndex(dataStream), r.options.API.Search.WithSort("@timestamp:asc"), r.options.API.Search.WithSize(elasticsearchQuerySize), + r.options.API.Search.WithSource("true"), + r.options.API.Search.WithBody(strings.NewReader(allFieldsBody)), ) if err != nil { return nil, errors.Wrap(err, "could not search data stream") @@ -313,6 +379,7 @@ func (r *runner) getDocs(dataStream string) ([]common.MapStr, error) { } Hits []struct { Source common.MapStr `json:"_source"` + Fields common.MapStr `json:"fields"` } } Error *struct { @@ -334,12 +401,13 @@ func (r *runner) getDocs(dataStream string) ([]common.MapStr, error) { logger.Debugf("found %d hits in %s data stream", numHits, dataStream) } - var docs []common.MapStr + var hits hits for _, hit := range results.Hits.Hits { - docs = append(docs, hit.Source) + hits.Source = append(hits.Source, hit.Source) + hits.Fields = append(hits.Fields, hit.Fields) } - return docs, nil + return &hits, nil } func (r *runner) runTest(config *testConfig, ctxt servicedeployer.ServiceContext, serviceOptions servicedeployer.FactoryOptions) ([]testrunner.TestResult, error) { @@ -438,6 +506,12 @@ func (r *runner) runTest(config *testConfig, ctxt servicedeployer.ServiceContext ds.Namespace, ) + componentTemplatePackage := fmt.Sprintf( + "%s-%s@package", + ds.Inputs[0].Streams[0].DataStream.Type, + ds.Inputs[0].Streams[0].DataStream.Dataset, + ) + r.wipeDataStreamHandler = func() error { logger.Debugf("deleting data in data stream...") if err := deleteDataStreamDocs(r.options.API, dataStream); err != nil { @@ -455,8 +529,8 @@ func (r *runner) runTest(config *testConfig, ctxt servicedeployer.ServiceContext return true, errors.New("SIGINT: cancel clearing data") } - docs, err := r.getDocs(dataStream) - return len(docs) == 0, err + hits, err := r.getDocs(dataStream) + return hits.size() == 0, err }, 2*time.Minute) if err != nil || !cleared { if err == nil { @@ -509,20 +583,20 @@ func (r *runner) runTest(config *testConfig, ctxt servicedeployer.ServiceContext // (TODO in future) Optionally exercise service to generate load. logger.Debug("checking for expected data in data stream...") - var docs []common.MapStr + var hits *hits passed, err := waitUntilTrue(func() (bool, error) { if signal.SIGINT() { return true, errors.New("SIGINT: cancel waiting for policy assigned") } var err error - docs, err = r.getDocs(dataStream) + hits, err = r.getDocs(dataStream) if config.Assert.HitCount > 0 { - return len(docs) >= config.Assert.HitCount, err + return hits.size() >= config.Assert.HitCount, err } - return len(docs) > 0, err + return hits.size() > 0, err }, waitForDataTimeout) if err != nil { return result.WithError(err) @@ -533,6 +607,14 @@ func (r *runner) runTest(config *testConfig, ctxt servicedeployer.ServiceContext return result.WithError(fmt.Errorf("%s", result.FailureMsg)) } + syntheticEnabled, err := r.isSyntheticsEnabled(dataStream, componentTemplatePackage) + if err != nil { + return result.WithError(fmt.Errorf("failed to check if synthetic source is enabled: %w", err)) + } + logger.Debugf("data stream %s has synthetics enabled: %t", dataStream, syntheticEnabled) + + docs := hits.getDocs(syntheticEnabled) + // Validate fields in docs var expectedDataset string if ds := r.options.TestFolder.DataStream; ds != "" { @@ -545,6 +627,7 @@ func (r *runner) runTest(config *testConfig, ctxt servicedeployer.ServiceContext fields.WithNumericKeywordFields(config.NumericKeywordFields), fields.WithExpectedDataset(expectedDataset), fields.WithEnabledImportAllECSSChema(true), + fields.WithDisableNormalization(syntheticEnabled), ) if err != nil { return result.WithError(errors.Wrapf(err, "creating fields validator for data stream failed (path: %s)", serviceOptions.DataStreamRootPath)) @@ -553,6 +636,13 @@ func (r *runner) runTest(config *testConfig, ctxt servicedeployer.ServiceContext return result.WithError(err) } + if syntheticEnabled { + docs, err = fieldsValidator.SanitizeSyntheticSourceDocs(docs) + if err != nil { + return result.WithError(fmt.Errorf("failed to sanitize synthetic source docs: %w", err)) + } + } + // Write sample events file from first doc, if requested if err := r.generateTestResult(docs); err != nil { return result.WithError(err) diff --git a/scripts/test-check-packages.sh b/scripts/test-check-packages.sh index 9eebcec9c9..6c610a0b39 100755 --- a/scripts/test-check-packages.sh +++ b/scripts/test-check-packages.sh @@ -53,6 +53,8 @@ elastic-package stack update -v # Boot up the stack elastic-package stack up -d -v +elastic-package stack status + if [ "${PACKAGE_TEST_TYPE:-other}" == "with-kind" ]; then # Boot up the kind cluster kind create cluster --config $PWD/scripts/kind-config.yaml diff --git a/test/packages/other/synthetic_mode/LICENSE.txt b/test/packages/other/synthetic_mode/LICENSE.txt new file mode 100644 index 0000000000..809108b857 --- /dev/null +++ b/test/packages/other/synthetic_mode/LICENSE.txt @@ -0,0 +1,93 @@ +Elastic License 2.0 + +URL: https://www.elastic.co/licensing/elastic-license + +## Acceptance + +By using the software, you agree to all of the terms and conditions below. + +## Copyright License + +The licensor grants you a non-exclusive, royalty-free, worldwide, +non-sublicensable, non-transferable license to use, copy, distribute, make +available, and prepare derivative works of the software, in each case subject to +the limitations and conditions below. + +## Limitations + +You may not provide the software to third parties as a hosted or managed +service, where the service provides users with access to any substantial set of +the features or functionality of the software. + +You may not move, change, disable, or circumvent the license key functionality +in the software, and you may not remove or obscure any functionality in the +software that is protected by the license key. + +You may not alter, remove, or obscure any licensing, copyright, or other notices +of the licensor in the software. Any use of the licensor’s trademarks is subject +to applicable law. + +## Patents + +The licensor grants you a license, under any patent claims the licensor can +license, or becomes able to license, to make, have made, use, sell, offer for +sale, import and have imported the software, in each case subject to the +limitations and conditions in this license. This license does not cover any +patent claims that you cause to be infringed by modifications or additions to +the software. If you or your company make any written claim that the software +infringes or contributes to infringement of any patent, your patent license for +the software granted under these terms ends immediately. If your company makes +such a claim, your patent license ends immediately for work on behalf of your +company. + +## Notices + +You must ensure that anyone who gets a copy of any part of the software from you +also gets a copy of these terms. + +If you modify the software, you must include in any modified copies of the +software prominent notices stating that you have modified the software. + +## No Other Rights + +These terms do not imply any licenses other than those expressly granted in +these terms. + +## Termination + +If you use the software in violation of these terms, such use is not licensed, +and your licenses will automatically terminate. If the licensor provides you +with a notice of your violation, and you cease all violation of this license no +later than 30 days after you receive that notice, your licenses will be +reinstated retroactively. However, if you violate these terms after such +reinstatement, any additional violation of these terms will cause your licenses +to terminate automatically and permanently. + +## No Liability + +*As far as the law allows, the software comes as is, without any warranty or +condition, and the licensor will not be liable to you for any damages arising +out of these terms or the use or nature of the software, under any kind of +legal claim.* + +## Definitions + +The **licensor** is the entity offering these terms, and the **software** is the +software the licensor makes available under these terms, including any portion +of it. + +**you** refers to the individual or entity agreeing to these terms. + +**your company** is any legal entity, sole proprietorship, or other kind of +organization that you work for, plus all organizations that have control over, +are under the control of, or are under common control with that +organization. **control** means ownership of substantially all the assets of an +entity, or the power to direct its management and policies by vote, contract, or +otherwise. Control can be direct or indirect. + +**your licenses** are all the licenses granted to you for the software under +these terms. + +**use** means anything you do with the software requiring one of your licenses. + +**trademark** means trademarks, service marks, and similar rights. diff --git a/test/packages/other/synthetic_mode/_dev/build/build.yml b/test/packages/other/synthetic_mode/_dev/build/build.yml new file mode 100644 index 0000000000..883ced510c --- /dev/null +++ b/test/packages/other/synthetic_mode/_dev/build/build.yml @@ -0,0 +1,4 @@ +dependencies: + ecs: + reference: git@v8.7.0 + import_mappings: true diff --git a/test/packages/other/synthetic_mode/_dev/deploy/docker/docker-compose.yml b/test/packages/other/synthetic_mode/_dev/deploy/docker/docker-compose.yml new file mode 100644 index 0000000000..c8d97af0bd --- /dev/null +++ b/test/packages/other/synthetic_mode/_dev/deploy/docker/docker-compose.yml @@ -0,0 +1,6 @@ +version: '3.2' +services: + synthetic_mode: + image: prom/prometheus:${PROMETHEUS_VERSION:-prometheus_2} + ports: + - 9090:9090 diff --git a/test/packages/other/synthetic_mode/_dev/deploy/variants.yml b/test/packages/other/synthetic_mode/_dev/deploy/variants.yml new file mode 100644 index 0000000000..1058c81e28 --- /dev/null +++ b/test/packages/other/synthetic_mode/_dev/deploy/variants.yml @@ -0,0 +1,4 @@ +variants: + prometheus_2: + PROMETHEUS_VERSION: v2.36.2 +default: prometheus_2 diff --git a/test/packages/other/synthetic_mode/changelog.yml b/test/packages/other/synthetic_mode/changelog.yml new file mode 100644 index 0000000000..bb0320a524 --- /dev/null +++ b/test/packages/other/synthetic_mode/changelog.yml @@ -0,0 +1,6 @@ +# newer versions go on top +- version: "0.0.1" + changes: + - description: Initial draft of the package + type: enhancement + link: https://github.com/elastic/integrations/pull/1 # FIXME Replace with the real PR link diff --git a/test/packages/other/synthetic_mode/data_stream/synthetic/_dev/test/system/test-default-config.yml b/test/packages/other/synthetic_mode/data_stream/synthetic/_dev/test/system/test-default-config.yml new file mode 100644 index 0000000000..10731d5946 --- /dev/null +++ b/test/packages/other/synthetic_mode/data_stream/synthetic/_dev/test/system/test-default-config.yml @@ -0,0 +1,5 @@ +vars: ~ +data_stream: + vars: + hosts: + - "{{Hostname}}:9090" diff --git a/test/packages/other/synthetic_mode/data_stream/synthetic/agent/stream/stream.yml.hbs b/test/packages/other/synthetic_mode/data_stream/synthetic/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..58f93cc7a4 --- /dev/null +++ b/test/packages/other/synthetic_mode/data_stream/synthetic/agent/stream/stream.yml.hbs @@ -0,0 +1,53 @@ +metricsets: ["collector"] +hosts: +{{#each hosts}} + - {{this}} +{{/each}} +metrics_filters.exclude: +{{#each metrics_filters.exclude}} + - {{this}} +{{/each}} +metrics_filters.include: +{{#each metrics_filters.include}} + - {{this}} +{{/each}} +metrics_path: {{metrics_path}} +period: {{period}} +rate_counters: {{rate_counters}} +{{#if bearer_token_file}} +bearer_token_file: {{bearer_token_file}} +{{/if}} +{{#if ssl.certificate_authorities}} +ssl.certificate_authorities: +{{#each ssl.certificate_authorities}} + - {{this}} +{{/each}} +{{/if}} +use_types: {{use_types}} +username: {{username}} +password: {{password}} +{{#if leaderelection }} +{{#if condition }} +condition: ${kubernetes_leaderelection.leader} == true and {{ condition }} +{{ else }} +condition: ${kubernetes_leaderelection.leader} == true +{{/if}} +{{ else }} +{{#if condition }} +condition: {{ condition }} +{{/if}} +{{/if}} +{{#if query}} +{{query}} +{{/if}} +{{#if headers}} +{{headers}} +{{/if}} +{{#if connect_timeout}} +connect_timeout: {{connect_timeout}} +{{/if}} +{{#if timeout}} +timeout: {{timeout}} +{{/if}} +data_stream: + dataset: {{data_stream.dataset}} diff --git a/test/packages/other/synthetic_mode/data_stream/synthetic/elasticsearch/ingest_pipeline/default.yml b/test/packages/other/synthetic_mode/data_stream/synthetic/elasticsearch/ingest_pipeline/default.yml new file mode 100644 index 0000000000..c1d95b20e3 --- /dev/null +++ b/test/packages/other/synthetic_mode/data_stream/synthetic/elasticsearch/ingest_pipeline/default.yml @@ -0,0 +1,17 @@ +--- +processors: + - set: + field: ecs.version + value: 8.7.0 + - set: + field: event.kind + value: metric + - set: + field: event.module + value: synthetic_mode + - set: + field: event.type + value: [info] + - set: + field: event.category + value: [web] diff --git a/test/packages/other/synthetic_mode/data_stream/synthetic/fields/base-fields.yml b/test/packages/other/synthetic_mode/data_stream/synthetic/fields/base-fields.yml new file mode 100644 index 0000000000..7c798f4534 --- /dev/null +++ b/test/packages/other/synthetic_mode/data_stream/synthetic/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: '@timestamp' + type: date + description: Event timestamp. diff --git a/test/packages/other/synthetic_mode/data_stream/synthetic/fields/ecs.yml b/test/packages/other/synthetic_mode/data_stream/synthetic/fields/ecs.yml new file mode 100644 index 0000000000..b98c11b26e --- /dev/null +++ b/test/packages/other/synthetic_mode/data_stream/synthetic/fields/ecs.yml @@ -0,0 +1,10 @@ +- external: ecs + name: ecs.version +- external: ecs + name: service.address +- external: ecs + name: service.type +- external: ecs + name: event.category +- external: ecs + name: event.type diff --git a/test/packages/other/synthetic_mode/data_stream/synthetic/fields/fields.yml b/test/packages/other/synthetic_mode/data_stream/synthetic/fields/fields.yml new file mode 100644 index 0000000000..01fd9ee6b7 --- /dev/null +++ b/test/packages/other/synthetic_mode/data_stream/synthetic/fields/fields.yml @@ -0,0 +1,42 @@ +- name: prometheus + type: group + fields: + - name: labels.* + type: object + object_type: keyword + description: | + Prometheus metric labels + - name: metrics.* + type: object + object_type: double + object_type_mapping_type: "*" + description: | + Prometheus metric +- name: prometheus.*.value + type: object + object_type: double + object_type_mapping_type: "*" + description: > + Prometheus gauge metric + +- name: prometheus.*.counter + type: object + object_type: double + object_type_mapping_type: "*" + description: > + Prometheus counter metric + +- name: prometheus.*.rate + type: object + object_type: double + object_type_mapping_type: "*" + description: > + Prometheus rated counter metric + +- name: prometheus.*.histogram + type: object + object_type: histogram + object_type_mapping_type: "*" + description: > + Prometheus histogram metric + diff --git a/test/packages/other/synthetic_mode/data_stream/synthetic/manifest.yml b/test/packages/other/synthetic_mode/data_stream/synthetic/manifest.yml new file mode 100644 index 0000000000..907b04f12d --- /dev/null +++ b/test/packages/other/synthetic_mode/data_stream/synthetic/manifest.yml @@ -0,0 +1,147 @@ +title: Prometheus collector metrics synthetic +type: metrics +streams: + - input: prometheus/metrics + vars: + - name: hosts + type: text + title: Hosts + multi: true + required: true + show_user: true + default: + - localhost:9090 + - name: metrics_path + type: text + title: Metrics Path + multi: false + required: false + show_user: true + default: /metrics + - name: period + type: text + title: Period + multi: false + required: true + show_user: true + default: 10s + - name: use_types + type: bool + title: Use Types + multi: false + required: true + show_user: true + default: true + - name: rate_counters + type: bool + title: Rate Counters + multi: false + required: true + show_user: true + default: true + - name: leaderelection + type: bool + title: Leader Election + description: Enable leaderelection between a set of Elastic Agents running on Kubernetes + multi: false + required: true + show_user: true + default: false + - name: condition + title: Condition + description: Condition to filter when to apply this datastream + type: text + multi: false + required: false + show_user: true + - name: bearer_token_file + type: text + title: 'HTTP config options: bearer_token_file' + description: If defined, the contents of the file will be read once at initialization and then the value will be used in an HTTP Authorization header. + multi: false + required: false + show_user: false + - name: ssl.certificate_authorities + type: text + title: SSL Certificate Authorities + multi: true + required: false + show_user: false + - name: metrics_filters.exclude + type: text + title: Metrics Filters Exclude + multi: true + required: false + show_user: false + default: [] + - name: metrics_filters.include + type: text + title: Metrics Filters Include + multi: true + required: false + show_user: false + default: [] + - name: username + type: text + title: 'HTTP config options: Username' + description: The username to use for basic authentication. + multi: false + required: false + show_user: false + default: user + - name: password + type: password + title: 'HTTP config options: Password' + description: The password to use for basic authentication. + multi: false + required: false + show_user: false + default: secret + - name: connect_timeout + type: text + title: 'HTTP config options: connect_timeout' + description: Total time limit for an HTTP connection to be completed (Default 2 seconds) + multi: false + required: false + show_user: false + - name: timeout + type: text + title: 'HTTP config options: timeout' + description: Total time limit for HTTP requests made by the module (Default 10 seconds) + multi: false + required: false + show_user: false + - name: headers + type: yaml + title: "HTTP config options: headers" + description: A list of headers to use with the HTTP request + multi: false + required: false + show_user: false + default: | + # headers: + # Cookie: abcdef=123456 + # My-Custom-Header: my-custom-value + - name: query + type: yaml + title: "HTTP config options: query" + description: An optional value to pass common query params in YAML + multi: false + required: false + show_user: false + default: | + # query: + # key: value + - name: data_stream.dataset + type: text + title: 'Datasream Dataset name' + description: Name of Datastream dataset + multi: false + default: synthetic_mode.synthetic + required: true + show_user: true + title: Prometheus collector metrics + enabled: true + description: Collect Prometheus collector metrics +elasticsearch: + source_mode: synthetic diff --git a/test/packages/other/synthetic_mode/data_stream/synthetic/sample_event.json b/test/packages/other/synthetic_mode/data_stream/synthetic/sample_event.json new file mode 100644 index 0000000000..7826070c8f --- /dev/null +++ b/test/packages/other/synthetic_mode/data_stream/synthetic/sample_event.json @@ -0,0 +1,79 @@ +{ + "@timestamp": "2023-06-07T11:18:14.475Z", + "agent": { + "ephemeral_id": "0265eeb1-26e2-452a-8ec0-e52e125ba9f6", + "id": "f1c9ef05-68a9-4eee-b254-89692ad3eacf", + "name": "docker-fleet-agent", + "type": "metricbeat", + "version": "8.7.1" + }, + "data_stream": { + "dataset": "synthetic_mode.synthetic", + "namespace": "ep", + "type": "metrics" + }, + "ecs": { + "version": "8.7.0" + }, + "elastic_agent": { + "id": "f1c9ef05-68a9-4eee-b254-89692ad3eacf", + "snapshot": false, + "version": "8.7.1" + }, + "event": { + "agent_id_status": "verified", + "category": [ + "web" + ], + "dataset": "synthetic_mode.synthetic", + "duration": 4378129, + "ingested": "2023-06-07T11:18:18Z", + "kind": "metric", + "module": "synthetic_mode", + "type": [ + "info" + ] + }, + "host": { + "architecture": "x86_64", + "containerized": false, + "hostname": "docker-fleet-agent", + "id": "fd2c4b0943e444508c12855a04d117c7", + "ip": [ + "192.168.80.7" + ], + "mac": [ + "02-42-C0-A8-50-07" + ], + "name": "docker-fleet-agent", + "os": { + "codename": "focal", + "family": "debian", + "kernel": "5.19.0-42-generic", + "name": "Ubuntu", + "platform": "ubuntu", + "type": "linux", + "version": "20.04.6 LTS (Focal Fossa)" + } + }, + "metricset": { + "name": "collector", + "period": 10000 + }, + "prometheus": { + "labels": { + "event": "delete", + "instance": "elastic-package-service-synthetic_mode-1:9090", + "job": "prometheus", + "role": "node" + }, + "prometheus_sd_kubernetes_events_total": { + "counter": 0, + "rate": 0 + } + }, + "service": { + "address": "http://elastic-package-service-synthetic_mode-1:9090/metrics", + "type": "prometheus" + } +} \ No newline at end of file diff --git a/test/packages/other/synthetic_mode/docs/README.md b/test/packages/other/synthetic_mode/docs/README.md new file mode 100644 index 0000000000..355a85cb5f --- /dev/null +++ b/test/packages/other/synthetic_mode/docs/README.md @@ -0,0 +1,84 @@ + + + +# Synthetic Mode + + + +## Data streams + + + + + + + + + + + +## Requirements + +You need Elasticsearch for storing and searching your data and Kibana for visualizing and managing it. +You can use our hosted Elasticsearch Service on Elastic Cloud, which is recommended, or self-manage the Elastic Stack on your own hardware. + + + +## Setup + + + +For step-by-step instructions on how to set up an integration, see the +[Getting started](https://www.elastic.co/guide/en/welcome-to-elastic/current/getting-started-observability.html) guide. + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/packages/other/synthetic_mode/img/sample-logo.svg b/test/packages/other/synthetic_mode/img/sample-logo.svg new file mode 100644 index 0000000000..6268dd88f3 --- /dev/null +++ b/test/packages/other/synthetic_mode/img/sample-logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/test/packages/other/synthetic_mode/img/sample-screenshot.png b/test/packages/other/synthetic_mode/img/sample-screenshot.png new file mode 100644 index 0000000000..d7a56a3ecc Binary files /dev/null and b/test/packages/other/synthetic_mode/img/sample-screenshot.png differ diff --git a/test/packages/other/synthetic_mode/manifest.yml b/test/packages/other/synthetic_mode/manifest.yml new file mode 100644 index 0000000000..a2a4bd50e9 --- /dev/null +++ b/test/packages/other/synthetic_mode/manifest.yml @@ -0,0 +1,33 @@ +format_version: 2.8.0 +name: synthetic_mode +title: "Synthetic Mode" +version: 0.0.1 +source: + license: "Elastic-2.0" +description: "This package contains data streams with synthetic mode enabled" +type: integration +categories: + - custom +conditions: + kibana.version: "^8.6.0" + elastic.subscription: "basic" +screenshots: + - src: /img/sample-screenshot.png + title: Sample screenshot + size: 600x600 + type: image/png +icons: + - src: /img/sample-logo.svg + title: Sample logo + size: 32x32 + type: image/svg+xml +policy_templates: + - name: prometheus + title: Prometheus metrics + description: Collect metrics from Prometheus instances + inputs: + - type: prometheus/metrics + title: Collect Prometheus metrics + description: Collecting metrics from Prometheus exporters, Prometheus query API as well as able to receive metrics via remote write functionality. +owner: + github: elastic/integrations