diff --git a/.buildkite/pipeline.trigger.integration.tests.sh b/.buildkite/pipeline.trigger.integration.tests.sh index 5a27da9986..6cc396c901 100755 --- a/.buildkite/pipeline.trigger.integration.tests.sh +++ b/.buildkite/pipeline.trigger.integration.tests.sh @@ -46,6 +46,7 @@ for test in ${CHECK_PACKAGES_TESTS[@]}; do echo " - build/test-results/*.xml" echo " - build/elastic-stack-dump/check-*/logs/*.log" echo " - build/elastic-stack-dump/check-*/logs/fleet-server-internal/**/*" + echo " - build/test-coverage/coverage-*.xml" # these files should not be used to compute the final coverage of elastic-package if [[ $test =~ with-kind$ ]]; then echo " - build/kubectl-dump.txt" fi @@ -63,6 +64,7 @@ for package in $(find . -maxdepth 1 -mindepth 1 -type d) ; do echo " provider: \"gcp\"" echo " artifact_paths:" echo " - build/test-results/*.xml" + echo " - build/test-coverage/coverage-*.xml" # these files should not be used to compute the final coverage of elastic-package done popd > /dev/null @@ -79,6 +81,7 @@ for package in $(find . -maxdepth 1 -mindepth 1 -type d) ; do echo " provider: \"gcp\"" echo " artifact_paths:" echo " - build/test-results/*.xml" + echo " - build/test-coverage/coverage-*.xml" # these files should not be used to compute the final coverage of elastic-package done popd > /dev/null diff --git a/cmd/testrunner.go b/cmd/testrunner.go index 242844257d..f4308d421b 100644 --- a/cmd/testrunner.go +++ b/cmd/testrunner.go @@ -9,6 +9,7 @@ import ( "fmt" "os" "path/filepath" + "slices" "strings" "github.com/spf13/cobra" @@ -71,6 +72,7 @@ func setupTestCommand() *cobraext.Command { cmd.PersistentFlags().StringP(cobraext.ReportFormatFlagName, "", string(formats.ReportFormatHuman), cobraext.ReportFormatFlagDescription) cmd.PersistentFlags().StringP(cobraext.ReportOutputFlagName, "", string(outputs.ReportOutputSTDOUT), cobraext.ReportOutputFlagDescription) cmd.PersistentFlags().BoolP(cobraext.TestCoverageFlagName, "", false, cobraext.TestCoverageFlagDescription) + cmd.PersistentFlags().StringP(cobraext.TestCoverageFormatFlagName, "", "cobertura", fmt.Sprintf(cobraext.TestCoverageFormatFlagDescription, strings.Join(testrunner.CoverageFormatsList(), ","))) cmd.PersistentFlags().DurationP(cobraext.DeferCleanupFlagName, "", 0, cobraext.DeferCleanupFlagDescription) cmd.PersistentFlags().String(cobraext.VariantFlagName, "", cobraext.VariantFlagDescription) cmd.PersistentFlags().StringP(cobraext.ProfileFlagName, "p", "", fmt.Sprintf(cobraext.ProfileFlagDescription, install.ProfileNameEnvVar)) @@ -127,6 +129,15 @@ func testTypeCommandActionFactory(runner testrunner.TestRunner) cobraext.Command return cobraext.FlagParsingError(err, cobraext.TestCoverageFlagName) } + testCoverageFormat, err := cmd.Flags().GetString(cobraext.TestCoverageFormatFlagName) + if err != nil { + return cobraext.FlagParsingError(err, cobraext.TestCoverageFormatFlagName) + } + + if !slices.Contains(testrunner.CoverageFormatsList(), testCoverageFormat) { + return cobraext.FlagParsingError(fmt.Errorf("coverage format not available: %s", testCoverageFormat), cobraext.TestCoverageFormatFlagName) + } + packageRootPath, found, err := packages.FindPackageRoot() if !found { return errors.New("package root not found") @@ -246,6 +257,7 @@ func testTypeCommandActionFactory(runner testrunner.TestRunner) cobraext.Command DeferCleanup: deferCleanup, ServiceVariant: variantFlag, WithCoverage: testCoverage, + CoverageType: testCoverageFormat, }) results = append(results, r...) @@ -266,7 +278,7 @@ func testTypeCommandActionFactory(runner testrunner.TestRunner) cobraext.Command } if testCoverage { - err := testrunner.WriteCoverage(packageRootPath, manifest.Name, runner.Type(), results) + err := testrunner.WriteCoverage(packageRootPath, manifest.Name, manifest.Type, runner.Type(), results, testCoverageFormat) if err != nil { return fmt.Errorf("error writing test coverage: %w", err) } diff --git a/internal/cobraext/flags.go b/internal/cobraext/flags.go index 4cd1580e08..2faaa4863d 100644 --- a/internal/cobraext/flags.go +++ b/internal/cobraext/flags.go @@ -190,7 +190,10 @@ const ( StatusExtraInfoFlagDescription = "show additional information (comma-separated values: \"%s\")" TestCoverageFlagName = "test-coverage" - TestCoverageFlagDescription = "generate Cobertura test coverage reports" + TestCoverageFlagDescription = "enable test coverage reports" + + TestCoverageFormatFlagName = "coverage-format" + TestCoverageFormatFlagDescription = "set format for coverage reports: %s" VariantFlagName = "variant" VariantFlagDescription = "service variant" diff --git a/internal/packages/assets.go b/internal/packages/assets.go index 2ed2ec6ad6..4f3a98bc21 100644 --- a/internal/packages/assets.go +++ b/internal/packages/assets.go @@ -153,6 +153,8 @@ func loadElasticsearchAssets(pkgRootPath string) ([]Asset, error) { } } + // TODO add assets for input packages + return assets, nil } diff --git a/internal/testrunner/coberturacoverage.go b/internal/testrunner/coberturacoverage.go new file mode 100644 index 0000000000..38e3a8e651 --- /dev/null +++ b/internal/testrunner/coberturacoverage.go @@ -0,0 +1,263 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package testrunner + +import ( + "bytes" + "encoding/xml" + "fmt" + "path/filepath" + "sort" +) + +func init() { + registerCoverageReporterFormat("cobertura") +} + +const coverageDtd = `` + +// CoberturaCoverage is the root element for a Cobertura XML report. +type CoberturaCoverage struct { + XMLName xml.Name `xml:"coverage"` + LineRate float32 `xml:"line-rate,attr"` + BranchRate float32 `xml:"branch-rate,attr"` + Version string `xml:"version,attr"` + Timestamp int64 `xml:"timestamp,attr"` + LinesCovered int64 `xml:"lines-covered,attr"` + LinesValid int64 `xml:"lines-valid,attr"` + BranchesCovered int64 `xml:"branches-covered,attr"` + BranchesValid int64 `xml:"branches-valid,attr"` + Complexity float32 `xml:"complexity,attr"` + Sources []*CoberturaSource `xml:"sources>source"` + Packages []*CoberturaPackage `xml:"packages>package"` +} + +// CoberturaSource represents a base path to the covered source code. +type CoberturaSource struct { + Path string `xml:",chardata"` +} + +// CoberturaPackage represents a package in a Cobertura XML report. +type CoberturaPackage struct { + Name string `xml:"name,attr"` + LineRate float32 `xml:"line-rate,attr"` + BranchRate float32 `xml:"branch-rate,attr"` + Complexity float32 `xml:"complexity,attr"` + Classes []*CoberturaClass `xml:"classes>class"` +} + +// CoberturaClass represents a class in a Cobertura XML report. +type CoberturaClass struct { + Name string `xml:"name,attr"` + Filename string `xml:"filename,attr"` + LineRate float32 `xml:"line-rate,attr"` + BranchRate float32 `xml:"branch-rate,attr"` + Complexity float32 `xml:"complexity,attr"` + Methods []*CoberturaMethod `xml:"methods>method"` + Lines []*CoberturaLine `xml:"lines>line"` +} + +// CoberturaMethod represents a method in a Cobertura XML report. +type CoberturaMethod struct { + Name string `xml:"name,attr"` + Signature string `xml:"signature,attr"` + LineRate float32 `xml:"line-rate,attr"` + BranchRate float32 `xml:"branch-rate,attr"` + Complexity float32 `xml:"complexity,attr"` + Lines []*CoberturaLine `xml:"lines>line"` +} + +// CoberturaLine represents a source line in a Cobertura XML report. +type CoberturaLine struct { + Number int `xml:"number,attr"` + Hits int64 `xml:"hits,attr"` +} + +func (c *CoberturaCoverage) TimeStamp() int64 { + return c.Timestamp +} + +func (c *CoberturaCoverage) Bytes() ([]byte, error) { + out, err := xml.MarshalIndent(&c, "", " ") + if err != nil { + return nil, fmt.Errorf("unable to format test results as Coverage: %w", err) + } + + var buffer bytes.Buffer + buffer.WriteString(xml.Header) + buffer.WriteString("\n") + buffer.WriteString(coverageDtd) + buffer.WriteString("\n") + buffer.Write(out) + return buffer.Bytes(), nil +} + +// merge merges two coverage reports for a given class. +func (c *CoberturaClass) merge(b *CoberturaClass) error { + // Check preconditions: classes should be the same. + equal := c.Name == b.Name && + c.Filename == b.Filename && + len(c.Lines) == len(b.Lines) && + len(c.Methods) == len(b.Methods) + for idx := range c.Lines { + equal = equal && c.Lines[idx].Number == b.Lines[idx].Number + } + for idx := range c.Methods { + equal = equal && c.Methods[idx].Name == b.Methods[idx].Name && + len(c.Methods[idx].Lines) == len(b.Methods[idx].Lines) + } + if !equal { + return fmt.Errorf("merging incompatible classes: %+v != %+v", *c, *b) + } + // Update methods + for idx := range b.Methods { + for l := range b.Methods[idx].Lines { + c.Methods[idx].Lines[l].Hits += b.Methods[idx].Lines[l].Hits + } + } + // Rebuild lines + c.Lines = nil + for _, m := range c.Methods { + c.Lines = append(c.Lines, m.Lines...) + } + return nil +} + +// merge merges two coverage reports for a given package. +func (p *CoberturaPackage) merge(b *CoberturaPackage) error { + // Merge classes + for _, class := range b.Classes { + var target *CoberturaClass + for _, existing := range p.Classes { + if existing.Name == class.Name { + target = existing + break + } + } + if target != nil { + if err := target.merge(class); err != nil { + return err + } + } else { + p.Classes = append(p.Classes, class) + } + } + return nil +} + +// merge merges two coverage reports. +func (c *CoberturaCoverage) Merge(other CoverageReport) error { + b, ok := other.(*CoberturaCoverage) + if !ok { + return fmt.Errorf("not able to assert report to be merged as CoberturaCoverage") + + } + // Merge source paths + for _, path := range b.Sources { + found := false + for _, existing := range c.Sources { + if found = existing.Path == path.Path; found { + break + } + } + if !found { + c.Sources = append(c.Sources, path) + } + } + + // Merge packages + for _, pkg := range b.Packages { + var target *CoberturaPackage + for _, existing := range c.Packages { + if existing.Name == pkg.Name { + target = existing + break + } + } + if target != nil { + if err := target.merge(pkg); err != nil { + return err + } + } else { + c.Packages = append(c.Packages, pkg) + } + } + + // Recalculate global line coverage count + c.LinesValid = 0 + c.LinesCovered = 0 + for _, pkg := range c.Packages { + for _, cls := range pkg.Classes { + for _, line := range cls.Lines { + c.LinesValid++ + if line.Hits > 0 { + c.LinesCovered++ + } + } + } + } + return nil +} + +func transformToCoberturaReport(details *testCoverageDetails, baseFolder string, timestamp int64) *CoberturaCoverage { + var classes []*CoberturaClass + lineNumberTestType := lineNumberPerTestType(string(details.testType)) + + // sort data streams to ensure same ordering in coverage arrays + sortedDataStreams := make([]string, 0, len(details.dataStreams)) + for dataStream := range details.dataStreams { + sortedDataStreams = append(sortedDataStreams, dataStream) + } + sort.Strings(sortedDataStreams) + + for _, dataStream := range sortedDataStreams { + testCases := details.dataStreams[dataStream] + + if dataStream == "" && details.packageType == "integration" { + continue // ignore tests running in the package context (not data stream), mostly referring to installed assets + } + + var methods []*CoberturaMethod + var lines []*CoberturaLine + + if len(testCases) == 0 { + methods = append(methods, &CoberturaMethod{ + Name: "Missing", + Lines: []*CoberturaLine{{Number: lineNumberTestType, Hits: 0}}, + }) + lines = append(lines, []*CoberturaLine{{Number: lineNumberTestType, Hits: 0}}...) + } else { + methods = append(methods, &CoberturaMethod{ + Name: "OK", + Lines: []*CoberturaLine{{Number: lineNumberTestType, Hits: 1}}, + }) + lines = append(lines, []*CoberturaLine{{Number: lineNumberTestType, Hits: 1}}...) + } + + fileName := filepath.Join(baseFolder, details.packageName, "data_stream", dataStream, "manifest.yml") + if dataStream == "" { + // input package + fileName = filepath.Join(baseFolder, details.packageName, "manifest.yml") + } + + aClass := &CoberturaClass{ + Name: string(details.testType), + Filename: fileName, + Methods: methods, + Lines: lines, + } + classes = append(classes, aClass) + } + + return &CoberturaCoverage{ + Timestamp: timestamp, + Packages: []*CoberturaPackage{ + { + Name: details.packageName, + Classes: classes, + }, + }, + } +} diff --git a/internal/testrunner/coverageoutput_test.go b/internal/testrunner/coberturacoverage_test.go similarity index 97% rename from internal/testrunner/coverageoutput_test.go rename to internal/testrunner/coberturacoverage_test.go index 5cdd234c30..c92c70933f 100644 --- a/internal/testrunner/coverageoutput_test.go +++ b/internal/testrunner/coberturacoverage_test.go @@ -111,7 +111,6 @@ func TestCoberturaCoverage_Merge(t *testing.T) { Methods: []*CoberturaMethod{ { Name: "foo", - Hits: 2, Lines: []*CoberturaLine{ { Number: 13, @@ -125,7 +124,6 @@ func TestCoberturaCoverage_Merge(t *testing.T) { }, { Name: "bar", - Hits: 1, Lines: []*CoberturaLine{ { Number: 24, @@ -163,7 +161,6 @@ func TestCoberturaCoverage_Merge(t *testing.T) { Methods: []*CoberturaMethod{ { Name: "foo", - Hits: 1, Lines: []*CoberturaLine{ { Number: 13, @@ -177,7 +174,6 @@ func TestCoberturaCoverage_Merge(t *testing.T) { }, { Name: "bar", - Hits: 1, Lines: []*CoberturaLine{ { Number: 24, @@ -217,7 +213,6 @@ func TestCoberturaCoverage_Merge(t *testing.T) { Methods: []*CoberturaMethod{ { Name: "foo", - Hits: 3, Lines: []*CoberturaLine{ { Number: 13, @@ -231,7 +226,6 @@ func TestCoberturaCoverage_Merge(t *testing.T) { }, { Name: "bar", - Hits: 2, Lines: []*CoberturaLine{ { Number: 24, @@ -263,7 +257,7 @@ func TestCoberturaCoverage_Merge(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - err := tt.rhs.merge(&tt.lhs) + err := tt.rhs.Merge(&tt.lhs) if !tt.wantErr { if !assert.NoError(t, err) { t.Fatal(err) diff --git a/internal/testrunner/coverageoutput.go b/internal/testrunner/coverageoutput.go deleted file mode 100644 index 72658ade2f..0000000000 --- a/internal/testrunner/coverageoutput.go +++ /dev/null @@ -1,400 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -package testrunner - -import ( - "bytes" - "encoding/xml" - "errors" - "fmt" - "os" - "path" - "path/filepath" - "time" - - "github.com/elastic/elastic-package/internal/builder" - "github.com/elastic/elastic-package/internal/multierror" -) - -const coverageDtd = `` - -type testCoverageDetails struct { - packageName string - testType TestType - dataStreams map[string][]string // : - cobertura *CoberturaCoverage // For tests to provide custom Cobertura results. - errors multierror.Error -} - -func newTestCoverageDetails(packageName string, testType TestType) *testCoverageDetails { - return &testCoverageDetails{packageName: packageName, testType: testType, dataStreams: map[string][]string{}} -} - -func (tcd *testCoverageDetails) withUncoveredDataStreams(dataStreams []string) *testCoverageDetails { - for _, wt := range dataStreams { - tcd.dataStreams[wt] = []string{} - } - return tcd -} - -func (tcd *testCoverageDetails) withTestResults(results []TestResult) *testCoverageDetails { - for _, result := range results { - if _, ok := tcd.dataStreams[result.DataStream]; !ok { - tcd.dataStreams[result.DataStream] = []string{} - } - tcd.dataStreams[result.DataStream] = append(tcd.dataStreams[result.DataStream], result.Name) - if tcd.cobertura != nil && result.Coverage != nil { - if err := tcd.cobertura.merge(result.Coverage); err != nil { - tcd.errors = append(tcd.errors, fmt.Errorf("can't merge Cobertura coverage for test `%s`: %w", result.Name, err)) - } - } else if tcd.cobertura == nil { - tcd.cobertura = result.Coverage - } - } - return tcd -} - -// CoberturaCoverage is the root element for a Cobertura XML report. -type CoberturaCoverage struct { - XMLName xml.Name `xml:"coverage"` - LineRate float32 `xml:"line-rate,attr"` - BranchRate float32 `xml:"branch-rate,attr"` - Version string `xml:"version,attr"` - Timestamp int64 `xml:"timestamp,attr"` - LinesCovered int64 `xml:"lines-covered,attr"` - LinesValid int64 `xml:"lines-valid,attr"` - BranchesCovered int64 `xml:"branches-covered,attr"` - BranchesValid int64 `xml:"branches-valid,attr"` - Complexity float32 `xml:"complexity,attr"` - Sources []*CoberturaSource `xml:"sources>source"` - Packages []*CoberturaPackage `xml:"packages>package"` -} - -// CoberturaSource represents a base path to the covered source code. -type CoberturaSource struct { - Path string `xml:",chardata"` -} - -// CoberturaPackage represents a package in a Cobertura XML report. -type CoberturaPackage struct { - Name string `xml:"name,attr"` - LineRate float32 `xml:"line-rate,attr"` - BranchRate float32 `xml:"branch-rate,attr"` - Complexity float32 `xml:"complexity,attr"` - Classes []*CoberturaClass `xml:"classes>class"` -} - -// CoberturaClass represents a class in a Cobertura XML report. -type CoberturaClass struct { - Name string `xml:"name,attr"` - Filename string `xml:"filename,attr"` - LineRate float32 `xml:"line-rate,attr"` - BranchRate float32 `xml:"branch-rate,attr"` - Complexity float32 `xml:"complexity,attr"` - Methods []*CoberturaMethod `xml:"methods>method"` - Lines []*CoberturaLine `xml:"lines>line"` -} - -// CoberturaMethod represents a method in a Cobertura XML report. -type CoberturaMethod struct { - Name string `xml:"name,attr"` - Signature string `xml:"signature,attr"` - LineRate float32 `xml:"line-rate,attr"` - BranchRate float32 `xml:"branch-rate,attr"` - Complexity float32 `xml:"complexity,attr"` - Hits int64 `xml:"hits,attr"` - Lines []*CoberturaLine `xml:"lines>line"` -} - -// CoberturaLine represents a source line in a Cobertura XML report. -type CoberturaLine struct { - Number int `xml:"number,attr"` - Hits int64 `xml:"hits,attr"` -} - -func (c *CoberturaCoverage) bytes() ([]byte, error) { - out, err := xml.MarshalIndent(&c, "", " ") - if err != nil { - return nil, fmt.Errorf("unable to format test results as xUnit: %w", err) - } - - var buffer bytes.Buffer - buffer.WriteString(xml.Header) - buffer.WriteString("\n") - buffer.WriteString(coverageDtd) - buffer.WriteString("\n") - buffer.Write(out) - return buffer.Bytes(), nil -} - -// merge merges two coverage reports for a given class. -func (c *CoberturaClass) merge(b *CoberturaClass) error { - // Check preconditions: classes should be the same. - equal := c.Name == b.Name && - c.Filename == b.Filename && - len(c.Lines) == len(b.Lines) && - len(c.Methods) == len(b.Methods) - for idx := range c.Lines { - equal = equal && c.Lines[idx].Number == b.Lines[idx].Number - } - for idx := range c.Methods { - equal = equal && c.Methods[idx].Name == b.Methods[idx].Name && - len(c.Methods[idx].Lines) == len(b.Methods[idx].Lines) - } - if !equal { - return fmt.Errorf("merging incompatible classes: %+v != %+v", *c, *b) - } - // Update methods - for idx := range b.Methods { - c.Methods[idx].Hits += b.Methods[idx].Hits - for l := range b.Methods[idx].Lines { - c.Methods[idx].Lines[l].Hits += b.Methods[idx].Lines[l].Hits - } - } - // Rebuild lines - c.Lines = nil - for _, m := range c.Methods { - c.Lines = append(c.Lines, m.Lines...) - } - return nil -} - -// merge merges two coverage reports for a given package. -func (p *CoberturaPackage) merge(b *CoberturaPackage) error { - // Merge classes - for _, class := range b.Classes { - var target *CoberturaClass - for _, existing := range p.Classes { - if existing.Name == class.Name { - target = existing - break - } - } - if target != nil { - if err := target.merge(class); err != nil { - return err - } - } else { - p.Classes = append(p.Classes, class) - } - } - return nil -} - -// merge merges two coverage reports. -func (c *CoberturaCoverage) merge(b *CoberturaCoverage) error { - // Merge source paths - for _, path := range b.Sources { - found := false - for _, existing := range c.Sources { - if found = existing.Path == path.Path; found { - break - } - } - if !found { - c.Sources = append(c.Sources, path) - } - } - - // Merge packages - for _, pkg := range b.Packages { - var target *CoberturaPackage - for _, existing := range c.Packages { - if existing.Name == pkg.Name { - target = existing - break - } - } - if target != nil { - if err := target.merge(pkg); err != nil { - return err - } - } else { - c.Packages = append(c.Packages, pkg) - } - } - - // Recalculate global line coverage count - c.LinesValid = 0 - c.LinesCovered = 0 - for _, pkg := range c.Packages { - for _, cls := range pkg.Classes { - for _, line := range cls.Lines { - c.LinesValid++ - if line.Hits > 0 { - c.LinesCovered++ - } - } - } - } - return nil -} - -// WriteCoverage function calculates test coverage for the given package. -// It requires to execute tests for all data streams (same test type), so the coverage can be calculated properly. -func WriteCoverage(packageRootPath, packageName string, testType TestType, results []TestResult) error { - details, err := collectTestCoverageDetails(packageRootPath, packageName, testType, results) - if err != nil { - return fmt.Errorf("can't collect test coverage details: %w", err) - } - - // Use provided cobertura report, or generate a custom report if not available. - report := details.cobertura - if report == nil { - report = transformToCoberturaReport(details) - } - - err = writeCoverageReportFile(report, packageName) - if err != nil { - return fmt.Errorf("can't write test coverage report file: %w", err) - } - return nil -} - -func collectTestCoverageDetails(packageRootPath, packageName string, testType TestType, results []TestResult) (*testCoverageDetails, error) { - withoutTests, err := findDataStreamsWithoutTests(packageRootPath, testType) - if err != nil { - return nil, fmt.Errorf("can't find data streams without tests: %w", err) - } - - details := newTestCoverageDetails(packageName, testType). - withUncoveredDataStreams(withoutTests). - withTestResults(results) - if len(details.errors) > 0 { - return nil, details.errors - } - return details, nil -} - -func findDataStreamsWithoutTests(packageRootPath string, testType TestType) ([]string, error) { - var noTests []string - - dataStreamDir := filepath.Join(packageRootPath, "data_stream") - dataStreams, err := os.ReadDir(dataStreamDir) - if errors.Is(err, os.ErrNotExist) { - return noTests, nil // there are packages that don't have any data streams (fleet_server, security_detection_engine) - } else if err != nil { - return nil, fmt.Errorf("can't list data streams directory: %w", err) - } - - for _, dataStream := range dataStreams { - if !dataStream.IsDir() { - continue - } - - expected, err := verifyTestExpected(packageRootPath, dataStream.Name(), testType) - if err != nil { - return nil, fmt.Errorf("can't verify if test is expected: %w", err) - } - if !expected { - continue - } - - dataStreamTestPath := filepath.Join(packageRootPath, "data_stream", dataStream.Name(), "_dev", "test", string(testType)) - _, err = os.Stat(dataStreamTestPath) - if errors.Is(err, os.ErrNotExist) { - noTests = append(noTests, dataStream.Name()) - continue - } - if err != nil { - return nil, fmt.Errorf("can't stat path: %s: %w", dataStreamTestPath, err) - } - } - return noTests, nil -} - -// verifyTestExpected function checks if tests are actually expected. -// Pipeline tests require an ingest pipeline to be defined in the data stream. -func verifyTestExpected(packageRootPath string, dataStreamName string, testType TestType) (bool, error) { - if testType != "pipeline" { - return true, nil - } - - ingestPipelinePath := filepath.Join(packageRootPath, "data_stream", dataStreamName, "elasticsearch", "ingest_pipeline") - _, err := os.Stat(ingestPipelinePath) - if errors.Is(err, os.ErrNotExist) { - return false, nil - } - if err != nil { - return false, fmt.Errorf("can't stat path: %s: %w", ingestPipelinePath, err) - } - return true, nil -} - -func transformToCoberturaReport(details *testCoverageDetails) *CoberturaCoverage { - var classes []*CoberturaClass - for dataStream, testCases := range details.dataStreams { - if dataStream == "" { - continue // ignore tests running in the package context (not data stream), mostly referring to installed assets - } - - var methods []*CoberturaMethod - - if len(testCases) == 0 { - methods = append(methods, &CoberturaMethod{ - Name: "Missing", - Lines: []*CoberturaLine{{Number: 1, Hits: 0}}, - }) - } else { - methods = append(methods, &CoberturaMethod{ - Name: "OK", - Lines: []*CoberturaLine{{Number: 1, Hits: 1}}, - }) - } - - aClass := &CoberturaClass{ - Name: string(details.testType), - Filename: path.Join(details.packageName, dataStream), - Methods: methods, - } - classes = append(classes, aClass) - } - - return &CoberturaCoverage{ - Timestamp: time.Now().UnixNano(), - Packages: []*CoberturaPackage{ - { - Name: details.packageName, - Classes: classes, - }, - }, - } -} - -func writeCoverageReportFile(report *CoberturaCoverage, packageName string) error { - dest, err := testCoverageReportsDir() - if err != nil { - return fmt.Errorf("could not determine test coverage reports folder: %w", err) - } - - // Create test coverage reports folder if it doesn't exist - _, err = os.Stat(dest) - if err != nil && errors.Is(err, os.ErrNotExist) { - if err := os.MkdirAll(dest, 0755); err != nil { - return fmt.Errorf("could not create test coverage reports folder: %w", err) - } - } - - fileName := fmt.Sprintf("coverage-%s-%d-report.xml", packageName, report.Timestamp) - filePath := filepath.Join(dest, fileName) - - b, err := report.bytes() - if err != nil { - return fmt.Errorf("can't marshal test coverage report: %w", err) - } - - if err := os.WriteFile(filePath, b, 0644); err != nil { - return fmt.Errorf("could not write test coverage report file: %w", err) - } - return nil -} - -func testCoverageReportsDir() (string, error) { - buildDir, err := builder.BuildDirectory() - if err != nil { - return "", fmt.Errorf("locating build directory failed: %w", err) - } - return filepath.Join(buildDir, "test-coverage"), nil -} diff --git a/internal/testrunner/coveragereport.go b/internal/testrunner/coveragereport.go new file mode 100644 index 0000000000..9a81c7cc31 --- /dev/null +++ b/internal/testrunner/coveragereport.go @@ -0,0 +1,257 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package testrunner + +import ( + "errors" + "fmt" + "os" + "path/filepath" + "time" + + "github.com/elastic/elastic-package/internal/builder" + "github.com/elastic/elastic-package/internal/files" + "github.com/elastic/elastic-package/internal/multierror" +) + +type CoverageReport interface { + TimeStamp() int64 + Merge(CoverageReport) error + Bytes() ([]byte, error) +} + +var coverageReportFormatters = []string{} + +// registerCoverageReporterFormat registers a test coverage report formatter. +func registerCoverageReporterFormat(name string) { + coverageReportFormatters = append(coverageReportFormatters, name) +} + +func CoverageFormatsList() []string { + return coverageReportFormatters +} + +func lineNumberPerTestType(testType string) int { + var lineNumberPerTestType map[string]int = map[string]int{ + "asset": 1, + "pipeline": 2, + "system": 3, + "static": 4, + } + lineNumber, ok := lineNumberPerTestType[testType] + if !ok { + lineNumber = 5 + } + return lineNumber +} + +type testCoverageDetails struct { + packageName string + packageType string + testType TestType + dataStreams map[string][]string // : + coverage CoverageReport // For tests to provide custom Coverage results. + errors multierror.Error +} + +func newTestCoverageDetails(packageName, packageType string, testType TestType) *testCoverageDetails { + return &testCoverageDetails{packageName: packageName, packageType: packageType, testType: testType, dataStreams: map[string][]string{}} +} + +func (tcd *testCoverageDetails) withUncoveredDataStreams(dataStreams []string) *testCoverageDetails { + for _, wt := range dataStreams { + tcd.dataStreams[wt] = []string{} + } + return tcd +} + +func (tcd *testCoverageDetails) withTestResults(results []TestResult) *testCoverageDetails { + for _, result := range results { + if _, ok := tcd.dataStreams[result.DataStream]; !ok { + tcd.dataStreams[result.DataStream] = []string{} + } + tcd.dataStreams[result.DataStream] = append(tcd.dataStreams[result.DataStream], result.Name) + if tcd.coverage != nil && result.Coverage != nil { + if err := tcd.coverage.Merge(result.Coverage); err != nil { + tcd.errors = append(tcd.errors, fmt.Errorf("can't merge coverage for test `%s`: %w", result.Name, err)) + } + } else if tcd.coverage == nil { + tcd.coverage = result.Coverage + } + } + return tcd +} + +// WriteCoverage function calculates test coverage for the given package. +// It requires to execute tests for all data streams (same test type), so the coverage can be calculated properly. +func WriteCoverage(packageRootPath, packageName, packageType string, testType TestType, results []TestResult, testCoverageType string) error { + timestamp := time.Now().UnixNano() + report, err := createCoverageReport(packageRootPath, packageName, packageType, testType, results, testCoverageType, timestamp) + if err != nil { + return fmt.Errorf("can't create coverage report: %w", err) + } + + err = writeCoverageReportFile(report, packageName, string(testType)) + if err != nil { + return fmt.Errorf("can't write test coverage report file: %w", err) + } + return nil +} + +func createCoverageReport(packageRootPath, packageName, packageType string, testType TestType, results []TestResult, coverageFormat string, timestamp int64) (CoverageReport, error) { + details, err := collectTestCoverageDetails(packageRootPath, packageName, packageType, testType, results) + if err != nil { + return nil, fmt.Errorf("can't collect test coverage details: %w", err) + } + + if details.coverage != nil { + // Use provided coverage report + return details.coverage, nil + } + + // generate a custom report if not available + baseFolder, err := GetBaseFolderPackageForCoverage(packageRootPath) + if err != nil { + return nil, err + } + + report := transformToCoverageReport(details, baseFolder, coverageFormat, timestamp) + + return report, nil +} + +func GetBaseFolderPackageForCoverage(packageRootPath string) (string, error) { + dir, err := files.FindRepositoryRootDirectory() + if err != nil { + return "", err + } + + relativePath, err := filepath.Rel(dir, packageRootPath) + if err != nil { + return "", fmt.Errorf("cannot create relative path to package root path. Root directory: '%s', Package root path: '%s': %w", dir, packageRootPath, err) + } + // Remove latest folder (package) since coverage methods already add the package name in the paths + baseFolder := filepath.Dir(relativePath) + + return baseFolder, nil +} + +func collectTestCoverageDetails(packageRootPath, packageName, packageType string, testType TestType, results []TestResult) (*testCoverageDetails, error) { + withoutTests, err := findDataStreamsWithoutTests(packageRootPath, testType) + if err != nil { + return nil, fmt.Errorf("can't find data streams without tests: %w", err) + } + + details := newTestCoverageDetails(packageName, packageType, testType). + withUncoveredDataStreams(withoutTests). + withTestResults(results) + if len(details.errors) > 0 { + return nil, details.errors + } + return details, nil +} + +func findDataStreamsWithoutTests(packageRootPath string, testType TestType) ([]string, error) { + var noTests []string + + dataStreamDir := filepath.Join(packageRootPath, "data_stream") + dataStreams, err := os.ReadDir(dataStreamDir) + if errors.Is(err, os.ErrNotExist) { + return noTests, nil // there are packages that don't have any data streams (fleet_server, security_detection_engine) + } else if err != nil { + return nil, fmt.Errorf("can't list data streams directory: %w", err) + } + + for _, dataStream := range dataStreams { + if !dataStream.IsDir() { + continue + } + + expected, err := verifyTestExpected(packageRootPath, dataStream.Name(), testType) + if err != nil { + return nil, fmt.Errorf("can't verify if test is expected: %w", err) + } + if !expected { + continue + } + + dataStreamTestPath := filepath.Join(packageRootPath, "data_stream", dataStream.Name(), "_dev", "test", string(testType)) + _, err = os.Stat(dataStreamTestPath) + if errors.Is(err, os.ErrNotExist) { + noTests = append(noTests, dataStream.Name()) + continue + } + if err != nil { + return nil, fmt.Errorf("can't stat path: %s: %w", dataStreamTestPath, err) + } + } + return noTests, nil +} + +// verifyTestExpected function checks if tests are actually expected. +// Pipeline tests require an ingest pipeline to be defined in the data stream. +func verifyTestExpected(packageRootPath string, dataStreamName string, testType TestType) (bool, error) { + if testType != "pipeline" { + return true, nil + } + + ingestPipelinePath := filepath.Join(packageRootPath, "data_stream", dataStreamName, "elasticsearch", "ingest_pipeline") + _, err := os.Stat(ingestPipelinePath) + if errors.Is(err, os.ErrNotExist) { + return false, nil + } + if err != nil { + return false, fmt.Errorf("can't stat path: %s: %w", ingestPipelinePath, err) + } + return true, nil +} + +func transformToCoverageReport(details *testCoverageDetails, baseFolder, coverageFormat string, timestamp int64) CoverageReport { + if coverageFormat == "cobertura" { + return transformToCoberturaReport(details, baseFolder, timestamp) + } + + if coverageFormat == "generic" { + return transformToGenericCoverageReport(details, baseFolder, timestamp) + } + + return nil +} + +func writeCoverageReportFile(report CoverageReport, packageName, testType string) error { + dest, err := testCoverageReportsDir() + if err != nil { + return fmt.Errorf("could not determine test coverage reports folder: %w", err) + } + + // Create test coverage reports folder if it doesn't exist + _, err = os.Stat(dest) + if err != nil && errors.Is(err, os.ErrNotExist) { + if err := os.MkdirAll(dest, 0755); err != nil { + return fmt.Errorf("could not create test coverage reports folder: %w", err) + } + } + + fileName := fmt.Sprintf("coverage-%s-%s-%d-report.xml", packageName, testType, report.TimeStamp()) + filePath := filepath.Join(dest, fileName) + + b, err := report.Bytes() + if err != nil { + return fmt.Errorf("can't marshal test coverage report: %w", err) + } + + if err := os.WriteFile(filePath, b, 0644); err != nil { + return fmt.Errorf("could not write test coverage report file: %w", err) + } + return nil +} + +func testCoverageReportsDir() (string, error) { + buildDir, err := builder.BuildDirectory() + if err != nil { + return "", fmt.Errorf("locating build directory failed: %w", err) + } + return filepath.Join(buildDir, "test-coverage"), nil +} diff --git a/internal/testrunner/coveragereport_test.go b/internal/testrunner/coveragereport_test.go new file mode 100644 index 0000000000..32f51f4116 --- /dev/null +++ b/internal/testrunner/coveragereport_test.go @@ -0,0 +1,262 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package testrunner + +import ( + "os" + "path/filepath" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCreateCoverageReport(t *testing.T) { + workDir, err := os.Getwd() + require.NoError(t, err) + packageRootPath := filepath.Join(workDir, "my", "path", "package") + tests := []struct { + name string + rootPath string + packageName string + packageType string + coverageFormat string + timestamp int64 + testType TestType + results []TestResult + expected CoverageReport + }{ + { + name: "generate custom cobertura coverage", + testType: "system", + rootPath: packageRootPath, + packageName: "mypackage", + packageType: "integration", + coverageFormat: "cobertura", + timestamp: 10, + results: []TestResult{ + { + Name: "test1", + Package: "mypackage", + DataStream: "metrics", + TimeElapsed: 1 * time.Second, + Coverage: nil, + }, + { + Name: "test2", + Package: "mypackage", + DataStream: "logs", + TimeElapsed: 2 * time.Second, + Coverage: nil, + }, + }, + expected: &CoberturaCoverage{ + Version: "", + Timestamp: 10, + Packages: []*CoberturaPackage{ + { + Name: "mypackage", + Classes: []*CoberturaClass{ + { + Name: "system", + Filename: filepath.Join("internal", "testrunner", "my", "path", "mypackage", "data_stream", "logs", "manifest.yml"), + Methods: []*CoberturaMethod{ + { + Name: "OK", + Signature: "", + Lines: []*CoberturaLine{ + { + Number: 3, + Hits: 1, + }, + }, + }, + }, + Lines: []*CoberturaLine{ + { + Number: 3, + Hits: 1, + }, + }, + }, + { + Name: "system", + Filename: filepath.Join("internal", "testrunner", "my", "path", "mypackage", "data_stream", "metrics", "manifest.yml"), + Methods: []*CoberturaMethod{ + { + Name: "OK", + Signature: "", + Lines: []*CoberturaLine{ + { + Number: 3, + Hits: 1, + }, + }, + }, + }, + Lines: []*CoberturaLine{ + { + Number: 3, + Hits: 1, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "generate custom generic coverage", + testType: "system", + rootPath: packageRootPath, + packageName: "mypackage", + packageType: "integration", + coverageFormat: "generic", + timestamp: 10, + results: []TestResult{ + { + Name: "test1", + Package: "mypackage", + DataStream: "metrics", + TimeElapsed: 1 * time.Second, + Coverage: nil, + }, + { + Name: "test2", + Package: "mypackage", + DataStream: "logs", + TimeElapsed: 2 * time.Second, + Coverage: nil, + }, + }, + expected: &GenericCoverage{ + Version: 1, + Files: []*GenericFile{ + { + Path: filepath.Join("internal", "testrunner", "my", "path", "mypackage", "data_stream", "logs", "manifest.yml"), + Lines: []*GenericLine{ + { + LineNumber: 3, + Covered: true, + }, + }, + }, + { + Path: filepath.Join("internal", "testrunner", "my", "path", "mypackage", "data_stream", "metrics", "manifest.yml"), + Lines: []*GenericLine{ + { + LineNumber: 3, + Covered: true, + }, + }, + }, + }, + TestType: "Coverage for system test", + Timestamp: 10, + }, + }, + { + name: "use provided generic coverage", + testType: "system", + rootPath: packageRootPath, + packageName: "mypackage", + packageType: "integration", + coverageFormat: "generic", + timestamp: 10, + results: []TestResult{ + { + Name: "test1", + Package: "mypackage", + DataStream: "metrics", + TimeElapsed: 1 * time.Second, + Coverage: &GenericCoverage{ + Version: 1, + Files: []*GenericFile{ + { + Path: filepath.Join("internal", "testrunner", "my", "path", "mypackage", "data_stream", "metrics", "foo.yml"), + Lines: []*GenericLine{ + { + LineNumber: 1, + Covered: true, + }, + { + LineNumber: 2, + Covered: true, + }, + }, + }, + }, + TestType: "Coverage for system test", + Timestamp: 20, + }, + }, + }, + expected: &GenericCoverage{ + Version: 1, + Files: []*GenericFile{ + { + Path: filepath.Join("internal", "testrunner", "my", "path", "mypackage", "data_stream", "metrics", "foo.yml"), + Lines: []*GenericLine{ + { + LineNumber: 1, + Covered: true, + }, + { + LineNumber: 2, + Covered: true, + }, + }, + }, + }, + TestType: "Coverage for system test", + Timestamp: 20, + }, + }, + { + name: "generic coverage for an input package", + testType: "asset", + rootPath: packageRootPath, + packageName: "mypackage", + packageType: "input", + coverageFormat: "generic", + timestamp: 10, + results: []TestResult{ + { + Name: "test1", + Package: "mypackage", + DataStream: "", + TimeElapsed: 1 * time.Second, + Coverage: nil, + }, + }, + expected: &GenericCoverage{ + Version: 1, + Files: []*GenericFile{ + { + Path: filepath.Join("internal", "testrunner", "my", "path", "mypackage", "manifest.yml"), + Lines: []*GenericLine{ + { + LineNumber: 1, + Covered: true, + }, + }, + }, + }, + TestType: "Coverage for asset test", + Timestamp: 10, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + report, err := createCoverageReport(tt.rootPath, tt.packageName, tt.packageType, tt.testType, tt.results, tt.coverageFormat, tt.timestamp) + require.NoError(t, err) + assert.Equal(t, tt.expected, report) + }) + } +} diff --git a/internal/testrunner/genericcobertura.go b/internal/testrunner/genericcobertura.go new file mode 100644 index 0000000000..8b17c4b4a9 --- /dev/null +++ b/internal/testrunner/genericcobertura.go @@ -0,0 +1,139 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package testrunner + +import ( + "bytes" + "encoding/xml" + "fmt" + "path/filepath" + "sort" +) + +func init() { + registerCoverageReporterFormat("generic") +} + +// GenericCoverage is the root element for a Cobertura XML report. +type GenericCoverage struct { + XMLName xml.Name `xml:"coverage"` + Version int64 `xml:"version,attr"` + Files []*GenericFile `xml:"file"` + Timestamp int64 `xml:"-"` + TestType string `xml:",comment"` +} + +type GenericFile struct { + Path string `xml:"path,attr"` + Lines []*GenericLine `xml:"lineToCover"` +} + +type GenericLine struct { + LineNumber int64 `xml:"lineNumber,attr"` + Covered bool `xml:"covered,attr"` +} + +func (c *GenericCoverage) TimeStamp() int64 { + return c.Timestamp +} + +func (c *GenericCoverage) Bytes() ([]byte, error) { + out, err := xml.MarshalIndent(&c, "", " ") + if err != nil { + return nil, fmt.Errorf("unable to format test results as Coverage: %w", err) + } + + var buffer bytes.Buffer + buffer.WriteString(xml.Header) + buffer.WriteString("\n") + buffer.Write(out) + return buffer.Bytes(), nil +} + +func (c *GenericFile) merge(b *GenericFile) error { + // Merge files + for _, coverageLine := range b.Lines { + found := false + for _, existingLine := range c.Lines { + if existingLine.LineNumber == coverageLine.LineNumber { + found = true + break + } + } + if !found { + c.Lines = append(c.Lines, coverageLine) + } + } + return nil +} + +// merge merges two coverage reports. +func (c *GenericCoverage) Merge(other CoverageReport) error { + b, ok := other.(*GenericCoverage) + if !ok { + return fmt.Errorf("not able to assert report to be merged as GenericCoverage") + } + // Merge files + for _, coverageFile := range b.Files { + var target *GenericFile + for _, existingFile := range c.Files { + if existingFile.Path == coverageFile.Path { + target = existingFile + break + } + } + if target != nil { + if err := target.merge(coverageFile); err != nil { + return err + } + } else { + c.Files = append(c.Files, coverageFile) + } + } + return nil +} + +func transformToGenericCoverageReport(details *testCoverageDetails, baseFolder string, timestamp int64) *GenericCoverage { + lineNumberTestType := lineNumberPerTestType(string(details.testType)) + var files []*GenericFile + // sort data streams to ensure same ordering in coverage arrays + sortedDataStreams := make([]string, 0, len(details.dataStreams)) + for dataStream := range details.dataStreams { + sortedDataStreams = append(sortedDataStreams, dataStream) + } + sort.Strings(sortedDataStreams) + + for _, dataStream := range sortedDataStreams { + if dataStream == "" && details.packageType == "integration" { + continue // ignore tests running in the package context (not data stream), mostly referring to installed assets + } + testCases := details.dataStreams[dataStream] + + fileName := filepath.Join(baseFolder, details.packageName, "data_stream", dataStream, "manifest.yml") + if dataStream == "" { + // input package + fileName = filepath.Join(baseFolder, details.packageName, "manifest.yml") + } + + if len(testCases) == 0 { + files = append(files, &GenericFile{ + Path: fileName, + Lines: []*GenericLine{{LineNumber: int64(lineNumberTestType), Covered: false}}, + }) + } else { + files = append(files, &GenericFile{ + Path: fileName, + Lines: []*GenericLine{{LineNumber: int64(lineNumberTestType), Covered: true}}, + }) + } + } + + return &GenericCoverage{ + Timestamp: timestamp, + Version: 1, + Files: files, + TestType: fmt.Sprintf("Coverage for %s test", details.testType), + } +} diff --git a/internal/testrunner/genericcobertura_test.go b/internal/testrunner/genericcobertura_test.go new file mode 100644 index 0000000000..173d6ab381 --- /dev/null +++ b/internal/testrunner/genericcobertura_test.go @@ -0,0 +1,92 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package testrunner + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGenericCoverage_Merge(t *testing.T) { + tests := []struct { + name string + rhs, lhs, expected GenericCoverage + wantErr bool + }{ + { + name: "merge files", + rhs: GenericCoverage{ + Files: []*GenericFile{ + { + Path: "/a", + Lines: []*GenericLine{ + {LineNumber: 1, Covered: true}, + }, + }, + { + Path: "/c", + Lines: []*GenericLine{}, + }, + }, + }, + lhs: GenericCoverage{ + Files: []*GenericFile{ + { + Path: "/b", + Lines: []*GenericLine{ + {LineNumber: 1, Covered: true}, + }, + }, + { + Path: "/c", + Lines: []*GenericLine{ + {LineNumber: 1, Covered: false}, + {LineNumber: 2, Covered: false}, + }, + }, + }, + }, + expected: GenericCoverage{ + Files: []*GenericFile{ + { + Path: "/a", + Lines: []*GenericLine{ + {LineNumber: 1, Covered: true}, + }, + }, + { + Path: "/c", + Lines: []*GenericLine{ + {LineNumber: 1, Covered: false}, + {LineNumber: 2, Covered: false}, + }, + }, + { + Path: "/b", + Lines: []*GenericLine{ + {LineNumber: 1, Covered: true}, + }, + }, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.rhs.Merge(&tt.lhs) + if !tt.wantErr { + if !assert.NoError(t, err) { + t.Fatal(err) + } + } else { + if !assert.Error(t, err) { + t.Fatal("error expected") + } + } + assert.Equal(t, tt.expected, tt.rhs) + }) + } +} diff --git a/internal/testrunner/runners/pipeline/coverage.go b/internal/testrunner/runners/pipeline/coverage.go index 8735f96b3a..42f1619602 100644 --- a/internal/testrunner/runners/pipeline/coverage.go +++ b/internal/testrunner/runners/pipeline/coverage.go @@ -12,12 +12,13 @@ import ( "time" "github.com/elastic/elastic-package/internal/elasticsearch/ingest" + "github.com/elastic/elastic-package/internal/files" "github.com/elastic/elastic-package/internal/packages" "github.com/elastic/elastic-package/internal/testrunner" ) // GetPipelineCoverage returns a coverage report for the provided set of ingest pipelines. -func GetPipelineCoverage(options testrunner.TestOptions, pipelines []ingest.Pipeline) (*testrunner.CoberturaCoverage, error) { +func GetPipelineCoverage(options testrunner.TestOptions, pipelines []ingest.Pipeline) (testrunner.CoverageReport, error) { dataStreamPath, found, err := packages.FindDataStreamRootForPath(options.TestFolder.Path) if err != nil { return nil, fmt.Errorf("locating data_stream root failed: %w", err) @@ -33,62 +34,97 @@ func GetPipelineCoverage(options testrunner.TestOptions, pipelines []ingest.Pipe return nil, fmt.Errorf("error fetching pipeline stats for code coverage calculations: %w", err) } - // Construct the Cobertura report. - pkg := &testrunner.CoberturaPackage{ - Name: options.TestFolder.Package + "." + options.TestFolder.DataStream, - } - // Use the package's parent directory as base path, so that the relative paths // for each class (pipeline) include the package name. This prevents paths for // different packages colliding (i.e. a lot of packages have a "log" datastream // and a default.yml pipeline). basePath := filepath.Dir(options.PackageRootPath) - coverage := &testrunner.CoberturaCoverage{ - Sources: []*testrunner.CoberturaSource{ - { - Path: basePath, + repositoryRootDir, err := files.FindRepositoryRootDirectory() + if err != nil { + return nil, err + } + + if options.CoverageType == "cobertura" { + pkg := &testrunner.CoberturaPackage{ + Name: options.TestFolder.Package + "." + options.TestFolder.DataStream, + } + + cobertura := &testrunner.CoberturaCoverage{ + Sources: []*testrunner.CoberturaSource{ + { + Path: basePath, + }, }, - }, - Packages: []*testrunner.CoberturaPackage{pkg}, - Timestamp: time.Now().UnixNano(), + Packages: []*testrunner.CoberturaPackage{pkg}, + Timestamp: time.Now().UnixNano(), + } + + // Calculate coverage for each pipeline + for _, pipeline := range pipelines { + pipelineName, pipelineRelPath, src, pstats, err := pipelineDataForCoverage(pipeline, stats, repositoryRootDir, dataStreamPath) + if err != nil { + return nil, err + } + covered, class, err := coberturaForSinglePipeline(pipelineName, pipelineRelPath, src, pstats) + if err != nil { + return nil, fmt.Errorf("error calculating coverage for pipeline '%s': %w", pipeline.Filename(), err) + } + pkg.Classes = append(pkg.Classes, class) + cobertura.LinesValid += int64(len(class.Methods)) + cobertura.LinesCovered += covered + } + return cobertura, nil } - // Calculate coverage for each pipeline - for _, pipeline := range pipelines { - covered, class, err := coverageForSinglePipeline(pipeline, stats, basePath, dataStreamPath) - if err != nil { - return nil, fmt.Errorf("error calculating coverage for pipeline '%s': %w", pipeline.Filename(), err) + if options.CoverageType == "generic" { + coverage := &testrunner.GenericCoverage{ + Version: 1, + Timestamp: time.Now().UnixNano(), + TestType: "Cobertura for pipeline test", + } + + // Calculate coverage for each pipeline + for _, pipeline := range pipelines { + _, pipelineRelPath, src, pstats, err := pipelineDataForCoverage(pipeline, stats, repositoryRootDir, dataStreamPath) + if err != nil { + return nil, err + } + _, file, err := genericCoverageForSinglePipeline(pipelineRelPath, src, pstats) + if err != nil { + return nil, fmt.Errorf("error calculating coverage for pipeline '%s': %w", pipeline.Filename(), err) + } + coverage.Files = append(coverage.Files, file) } - pkg.Classes = append(pkg.Classes, class) - coverage.LinesValid += int64(len(class.Methods)) - coverage.LinesCovered += covered + return coverage, nil + } - return coverage, nil + + return nil, fmt.Errorf("unrecognised coverage type") } -func coverageForSinglePipeline(pipeline ingest.Pipeline, stats ingest.PipelineStatsMap, basePath, dataStreamPath string) (linesCovered int64, class *testrunner.CoberturaClass, err error) { +func pipelineDataForCoverage(pipeline ingest.Pipeline, stats ingest.PipelineStatsMap, basePath, dataStreamPath string) (string, string, []ingest.Processor, ingest.PipelineStats, error) { // Load the list of main processors from the pipeline source code, annotated with line numbers. src, err := pipeline.Processors() if err != nil { - return 0, nil, err + return "", "", nil, ingest.PipelineStats{}, err } pstats, found := stats[pipeline.Name] if !found { - return 0, nil, fmt.Errorf("pipeline '%s' not installed in Elasticsearch", pipeline.Name) + return "", "", nil, ingest.PipelineStats{}, fmt.Errorf("pipeline '%s' not installed in Elasticsearch", pipeline.Name) } // Ensure there is no inconsistency in the list of processors in stats vs obtained from source. if len(src) != len(pstats.Processors) { - return 0, nil, fmt.Errorf("processor count mismatch for %s (src:%d stats:%d)", pipeline.Filename(), len(src), len(pstats.Processors)) + return "", "", nil, ingest.PipelineStats{}, fmt.Errorf("processor count mismatch for %s (src:%d stats:%d)", pipeline.Filename(), len(src), len(pstats.Processors)) } for idx, st := range pstats.Processors { // Check that we have the expected type of processor, except for `compound` processors. // Elasticsearch will return a `compound` processor in the case of `foreach` and // any processor that defines `on_failure` processors. if st.Type != "compound" && st.Type != src[idx].Type { - return 0, nil, fmt.Errorf("processor type mismatch for %s processor %d (src:%s stats:%s)", pipeline.Filename(), idx, src[idx].Type, st.Type) + return "", "", nil, ingest.PipelineStats{}, fmt.Errorf("processor type mismatch for %s processor %d (src:%s stats:%s)", pipeline.Filename(), idx, src[idx].Type, st.Type) } } @@ -104,9 +140,33 @@ func coverageForSinglePipeline(pipeline ingest.Pipeline, stats ingest.PipelineSt pipelinePath := filepath.Join(dataStreamPath, "elasticsearch", "ingest_pipeline", pipeline.Filename()) pipelineRelPath, err := filepath.Rel(basePath, pipelinePath) if err != nil { - return 0, nil, fmt.Errorf("cannot create relative path to pipeline file. Package root: '%s', pipeline path: '%s': %w", basePath, pipelinePath, err) + return "", "", nil, ingest.PipelineStats{}, fmt.Errorf("cannot create relative path to pipeline file. Package root: '%s', pipeline path: '%s': %w", basePath, pipelinePath, err) + } + + return pipelineName, pipelineRelPath, src, pstats, nil +} + +func genericCoverageForSinglePipeline(pipelineRelPath string, src []ingest.Processor, pstats ingest.PipelineStats) (linesCovered int64, class *testrunner.GenericFile, err error) { + // Report every pipeline as a "file". + file := &testrunner.GenericFile{ + Path: pipelineRelPath, } + for idx, srcProc := range src { + if pstats.Processors[idx].Stats.Count > 0 { + linesCovered++ + } + for num := srcProc.FirstLine; num <= srcProc.LastLine; num++ { + line := &testrunner.GenericLine{ + LineNumber: int64(num), + Covered: pstats.Processors[idx].Stats.Count > 0, + } + file.Lines = append(file.Lines, line) + } + } + return linesCovered, file, nil +} +func coberturaForSinglePipeline(pipelineName, pipelineRelPath string, src []ingest.Processor, pstats ingest.PipelineStats) (linesCovered int64, class *testrunner.CoberturaClass, err error) { // Report every pipeline as a "class". class = &testrunner.CoberturaClass{ Name: pipelineName, @@ -120,7 +180,6 @@ func coverageForSinglePipeline(pipeline ingest.Pipeline, stats ingest.PipelineSt } method := testrunner.CoberturaMethod{ Name: srcProc.Type, - Hits: pstats.Processors[idx].Stats.Count, } for num := srcProc.FirstLine; num <= srcProc.LastLine; num++ { line := &testrunner.CoberturaLine{ diff --git a/internal/testrunner/testrunner.go b/internal/testrunner/testrunner.go index 2c03e01341..d9df974b08 100644 --- a/internal/testrunner/testrunner.go +++ b/internal/testrunner/testrunner.go @@ -33,6 +33,7 @@ type TestOptions struct { DeferCleanup time.Duration ServiceVariant string WithCoverage bool + CoverageType string } // TestRunner is the interface all test runners must implement. @@ -92,7 +93,7 @@ type TestResult struct { Skipped *SkipConfig // Coverage details in Cobertura format (optional). - Coverage *CoberturaCoverage + Coverage CoverageReport } // ResultComposer wraps a TestResult and provides convenience methods for diff --git a/scripts/test-check-false-positives.sh b/scripts/test-check-false-positives.sh index f09016441a..eb78480e7f 100755 --- a/scripts/test-check-false-positives.sh +++ b/scripts/test-check-false-positives.sh @@ -38,7 +38,7 @@ function check_expected_errors() { rm -f ${result_tests} ( cd "$package_root" - elastic-package test -v --report-format xUnit --report-output file --test-coverage --defer-cleanup 1s || true + elastic-package test -v --report-format xUnit --report-output file --test-coverage --coverage-format=generic --defer-cleanup 1s || true ) cat ${result_tests} | tr -d '\n' > ${results_no_spaces} diff --git a/scripts/test-check-packages.sh b/scripts/test-check-packages.sh index a9b116e3fd..f0a9c88a9c 100755 --- a/scripts/test-check-packages.sh +++ b/scripts/test-check-packages.sh @@ -107,7 +107,7 @@ for d in test/packages/${PACKAGE_TEST_TYPE:-other}/${PACKAGE_UNDER_TEST:-*}/; do elastic-package benchmark system --benchmark logs-benchmark -v --defer-cleanup 1s else # defer-cleanup is set to a short period to verify that the option is available - elastic-package test -v --report-format xUnit --report-output file --defer-cleanup 1s --test-coverage + elastic-package test -v --report-format xUnit --report-output file --defer-cleanup 1s --test-coverage --coverage-format=generic fi ) cd -