diff --git a/.github/scripts/json-schema-drift-check.sh b/.github/scripts/json-schema-drift-check.sh index 7b7f7dd2f62..3002236d68b 100755 --- a/.github/scripts/json-schema-drift-check.sh +++ b/.github/scripts/json-schema-drift-check.sh @@ -1,27 +1,17 @@ #!/usr/bin/env bash set -u -if ! git diff-index --quiet HEAD --; then - git diff-index HEAD -- - git --no-pager diff - echo "there are uncommitted changes, please commit them before running this check" +if [ "$(git status --porcelain | wc -l)" -ne "0" ]; then + echo " 🔴 there are uncommitted changes, please commit them before running this check" exit 1 fi -success=true - if ! make generate-json-schema; then echo "Generating json schema failed" - success=false -fi - -if ! git diff-index --quiet HEAD --; then - git diff-index HEAD -- - git --no-pager diff - echo "JSON schema drift detected!" - success=false + exit 1 fi -if ! $success; then +if [ "$(git status --porcelain | wc -l)" -ne "0" ]; then + echo " 🔴 there are uncommitted changes, please commit them before running this check" exit 1 fi diff --git a/DEVELOPING.md b/DEVELOPING.md index 2a787bef11f..7d915b7a313 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -167,12 +167,12 @@ always feel free to file an issue or reach out to us [on slack](https://anchore. #### Searching for files -All catalogers are provided an instance of the [`source.FileResolver`](https://github.com/anchore/syft/blob/v0.70.0/syft/source/file_resolver.go#L8) to interface with the image and search for files. The implementations for these +All catalogers are provided an instance of the [`file.Resolver`](https://github.com/anchore/syft/blob/v0.70.0/syft/source/file_resolver.go#L8) to interface with the image and search for files. The implementations for these abstractions leverage [`stereoscope`](https://github.com/anchore/stereoscope) in order to perform searching. Here is a rough outline how that works: -1. a stereoscope `file.Index` is searched based on the input given (a path, glob, or MIME type). The index is relatively fast to search, but requires results to be filtered down to the files that exist in the specific layer(s) of interest. This is done automatically by the `filetree.Searcher` abstraction. This abstraction will fallback to searching directly against the raw `filetree.FileTree` if the index does not contain the file(s) of interest. Note: the `filetree.Searcher` is used by the `source.FileResolver` abstraction. -2. Once the set of files are returned from the `filetree.Searcher` the results are filtered down further to return the most unique file results. For example, you may have requested for files by a glob that returns multiple results. These results are filtered down to deduplicate by real files, so if a result contains two references to the same file, say one accessed via symlink and one accessed via the real path, then the real path reference is returned and the symlink reference is filtered out. If both were accessed by symlink then the first (by lexical order) is returned. This is done automatically by the `source.FileResolver` abstraction. +1. a stereoscope `file.Index` is searched based on the input given (a path, glob, or MIME type). The index is relatively fast to search, but requires results to be filtered down to the files that exist in the specific layer(s) of interest. This is done automatically by the `filetree.Searcher` abstraction. This abstraction will fallback to searching directly against the raw `filetree.FileTree` if the index does not contain the file(s) of interest. Note: the `filetree.Searcher` is used by the `file.Resolver` abstraction. +2. Once the set of files are returned from the `filetree.Searcher` the results are filtered down further to return the most unique file results. For example, you may have requested for files by a glob that returns multiple results. These results are filtered down to deduplicate by real files, so if a result contains two references to the same file, say one accessed via symlink and one accessed via the real path, then the real path reference is returned and the symlink reference is filtered out. If both were accessed by symlink then the first (by lexical order) is returned. This is done automatically by the `file.Resolver` abstraction. 3. By the time results reach the `pkg.Cataloger` you are guaranteed to have a set of unique files that exist in the layer(s) of interest (relative to what the resolver supports). ## Testing diff --git a/Makefile b/Makefile index 0b944b83bd9..ae917777f72 100644 --- a/Makefile +++ b/Makefile @@ -302,7 +302,7 @@ compare-test-rpm-package-install: $(TEMP_DIR) $(SNAPSHOT_DIR) .PHONY: generate-json-schema generate-json-schema: ## Generate a new json schema - cd schema/json && go run generate.go + cd schema/json && go generate . && go run . .PHONY: generate-license-list generate-license-list: ## Generate an updated spdx license list diff --git a/cmd/syft/cli/attest/attest.go b/cmd/syft/cli/attest/attest.go index 05e17867933..997f3307de2 100644 --- a/cmd/syft/cli/attest/attest.go +++ b/cmd/syft/cli/attest/attest.go @@ -47,7 +47,7 @@ func Run(_ context.Context, app *config.Application, args []string) error { // could be an image or a directory, with or without a scheme // TODO: validate that source is image userInput := args[0] - si, err := source.ParseInputWithName(userInput, app.Platform, app.Name, app.DefaultImagePullSource) + si, err := source.ParseInputWithNameVersion(userInput, app.Platform, app.SourceName, app.SourceVersion, app.DefaultImagePullSource) if err != nil { return fmt.Errorf("could not generate source input for packages command: %w", err) } diff --git a/cmd/syft/cli/eventloop/tasks.go b/cmd/syft/cli/eventloop/tasks.go index 56bbcc93535..536a39ee6f1 100644 --- a/cmd/syft/cli/eventloop/tasks.go +++ b/cmd/syft/cli/eventloop/tasks.go @@ -8,6 +8,10 @@ import ( "github.com/anchore/syft/syft" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/file/cataloger/filecontent" + "github.com/anchore/syft/syft/file/cataloger/filedigest" + "github.com/anchore/syft/syft/file/cataloger/filemetadata" + "github.com/anchore/syft/syft/file/cataloger/secrets" "github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/source" ) @@ -61,7 +65,7 @@ func generateCatalogFileMetadataTask(app *config.Application) (Task, error) { return nil, nil } - metadataCataloger := file.NewMetadataCataloger() + metadataCataloger := filemetadata.NewCataloger() task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) { resolver, err := src.FileResolver(app.FileMetadata.Cataloger.ScopeOpt) @@ -104,10 +108,7 @@ func generateCatalogFileDigestsTask(app *config.Application) (Task, error) { hashes = append(hashes, hashObj) } - digestsCataloger, err := file.NewDigestsCataloger(hashes) - if err != nil { - return nil, err - } + digestsCataloger := filedigest.NewCataloger(hashes) task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) { resolver, err := src.FileResolver(app.FileMetadata.Cataloger.ScopeOpt) @@ -131,12 +132,12 @@ func generateCatalogSecretsTask(app *config.Application) (Task, error) { return nil, nil } - patterns, err := file.GenerateSearchPatterns(file.DefaultSecretsPatterns, app.Secrets.AdditionalPatterns, app.Secrets.ExcludePatternNames) + patterns, err := secrets.GenerateSearchPatterns(secrets.DefaultSecretsPatterns, app.Secrets.AdditionalPatterns, app.Secrets.ExcludePatternNames) if err != nil { return nil, err } - secretsCataloger, err := file.NewSecretsCataloger(patterns, app.Secrets.RevealValues, app.Secrets.SkipFilesAboveSize) + secretsCataloger, err := secrets.NewCataloger(patterns, app.Secrets.RevealValues, app.Secrets.SkipFilesAboveSize) //nolint:staticcheck if err != nil { return nil, err } @@ -163,7 +164,7 @@ func generateCatalogContentsTask(app *config.Application) (Task, error) { return nil, nil } - contentsCataloger, err := file.NewContentsCataloger(app.FileContents.Globs, app.FileContents.SkipFilesAboveSize) + contentsCataloger, err := filecontent.NewCataloger(app.FileContents.Globs, app.FileContents.SkipFilesAboveSize) //nolint:staticcheck if err != nil { return nil, err } diff --git a/cmd/syft/cli/options/packages.go b/cmd/syft/cli/options/packages.go index 7ab3b1fc23f..f6992a948c2 100644 --- a/cmd/syft/cli/options/packages.go +++ b/cmd/syft/cli/options/packages.go @@ -21,7 +21,8 @@ type PackagesOptions struct { Platform string Exclude []string Catalogers []string - Name string + SourceName string + SourceVersion string } var _ Interface = (*PackagesOptions)(nil) @@ -48,7 +49,14 @@ func (o *PackagesOptions) AddFlags(cmd *cobra.Command, v *viper.Viper) error { cmd.Flags().StringArrayVarP(&o.Catalogers, "catalogers", "", nil, "enable one or more package catalogers") - cmd.Flags().StringVarP(&o.Name, "name", "", "", + cmd.Flags().StringVarP(&o.SourceName, "name", "", "", + "set the name of the target being analyzed") + cmd.Flags().Lookup("name").Deprecated = "use: source-name" + + cmd.Flags().StringVarP(&o.SourceName, "source-name", "", "", + "set the name of the target being analyzed") + + cmd.Flags().StringVarP(&o.SourceVersion, "source-version", "", "", "set the name of the target being analyzed") return bindPackageConfigOptions(cmd.Flags(), v) @@ -78,6 +86,14 @@ func bindPackageConfigOptions(flags *pflag.FlagSet, v *viper.Viper) error { return err } + if err := v.BindPFlag("source-name", flags.Lookup("source-name")); err != nil { + return err + } + + if err := v.BindPFlag("source-version", flags.Lookup("source-version")); err != nil { + return err + } + if err := v.BindPFlag("output", flags.Lookup("output")); err != nil { return err } diff --git a/cmd/syft/cli/packages/packages.go b/cmd/syft/cli/packages/packages.go index 1f3acd13e56..12695e4f086 100644 --- a/cmd/syft/cli/packages/packages.go +++ b/cmd/syft/cli/packages/packages.go @@ -42,7 +42,7 @@ func Run(_ context.Context, app *config.Application, args []string) error { // could be an image or a directory, with or without a scheme userInput := args[0] - si, err := source.ParseInputWithName(userInput, app.Platform, app.Name, app.DefaultImagePullSource) + si, err := source.ParseInputWithNameVersion(userInput, app.Platform, app.SourceName, app.SourceVersion, app.DefaultImagePullSource) if err != nil { return fmt.Errorf("could not generate source input for packages command: %w", err) } diff --git a/cmd/syft/cli/poweruser/poweruser.go b/cmd/syft/cli/poweruser/poweruser.go index b6fae72fef1..b4e524feadd 100644 --- a/cmd/syft/cli/poweruser/poweruser.go +++ b/cmd/syft/cli/poweruser/poweruser.go @@ -47,7 +47,7 @@ func Run(_ context.Context, app *config.Application, args []string) error { }() userInput := args[0] - si, err := source.ParseInputWithName(userInput, app.Platform, app.Name, app.DefaultImagePullSource) + si, err := source.ParseInputWithNameVersion(userInput, app.Platform, app.SourceName, app.SourceVersion, app.DefaultImagePullSource) if err != nil { return fmt.Errorf("could not generate source input for packages command: %w", err) } diff --git a/go.mod b/go.mod index b9844c8b2ef..00012e15567 100644 --- a/go.mod +++ b/go.mod @@ -30,13 +30,13 @@ require ( // pinned to pull in 386 arch fix: https://github.com/scylladb/go-set/commit/cc7b2070d91ebf40d233207b633e28f5bd8f03a5 github.com/scylladb/go-set v1.0.3-0.20200225121959-cc7b2070d91e github.com/sergi/go-diff v1.3.1 - github.com/sirupsen/logrus v1.9.1 + github.com/sirupsen/logrus v1.9.3 github.com/spdx/tools-golang v0.5.0 github.com/spf13/afero v1.9.5 github.com/spf13/cobra v1.7.0 github.com/spf13/pflag v1.0.5 - github.com/spf13/viper v1.15.0 - github.com/stretchr/testify v1.8.2 + github.com/spf13/viper v1.16.0 + github.com/stretchr/testify v1.8.4 github.com/vifraa/gopom v0.2.1 github.com/wagoodman/go-partybus v0.0.0-20210627031916-db1f5573bbc5 github.com/wagoodman/go-progress v0.0.0-20230301185719-21920a456ad5 @@ -53,12 +53,13 @@ require ( github.com/Masterminds/semver v1.5.0 github.com/Masterminds/sprig/v3 v3.2.3 github.com/anchore/go-logger v0.0.0-20220728155337-03b66a5207d8 - github.com/anchore/stereoscope v0.0.0-20230412183729-8602f1afc574 + github.com/anchore/stereoscope v0.0.0-20230522170632-e14bc4437b2e + github.com/dave/jennifer v1.6.1 github.com/deitch/magic v0.0.0-20230404182410-1ff89d7342da - github.com/docker/docker v24.0.0+incompatible + github.com/docker/docker v24.0.2+incompatible github.com/github/go-spdx/v2 v2.1.2 github.com/go-git/go-billy/v5 v5.4.1 - github.com/go-git/go-git/v5 v5.6.1 + github.com/go-git/go-git/v5 v5.7.0 github.com/google/go-containerregistry v0.15.2 github.com/google/licensecheck v0.3.1 github.com/invopop/jsonschema v0.7.0 @@ -68,7 +69,7 @@ require ( github.com/vbatts/go-mtree v0.5.3 golang.org/x/exp v0.0.0-20230202163644-54bba9f4231b gopkg.in/yaml.v3 v3.0.1 - modernc.org/sqlite v1.22.1 + modernc.org/sqlite v1.23.0 ) require ( @@ -77,16 +78,16 @@ require ( github.com/Masterminds/goutils v1.1.1 // indirect github.com/Masterminds/semver/v3 v3.2.0 // indirect github.com/Microsoft/go-winio v0.6.1 // indirect - github.com/ProtonMail/go-crypto v0.0.0-20230217124315-7d5c6f04bbb8 // indirect + github.com/ProtonMail/go-crypto v0.0.0-20230518184743-7afd39499903 // indirect github.com/acomagu/bufpipe v1.0.4 // indirect github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092 // indirect github.com/becheran/wildmatch-go v1.0.0 // indirect - github.com/cloudflare/circl v1.1.0 // indirect + github.com/cloudflare/circl v1.3.3 // indirect github.com/containerd/containerd v1.7.0 // indirect github.com/containerd/stargz-snapshotter/estargz v0.14.3 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/docker/cli v23.0.5+incompatible // indirect - github.com/docker/distribution v2.8.1+incompatible // indirect + github.com/docker/distribution v2.8.2+incompatible // indirect github.com/docker/docker-credential-helpers v0.7.0 // indirect github.com/docker/go-connections v0.4.0 // indirect github.com/docker/go-units v0.5.0 // indirect @@ -94,16 +95,17 @@ require ( github.com/emirpasic/gods v1.18.1 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect github.com/gabriel-vasile/mimetype v1.4.0 // indirect - github.com/go-git/gcfg v1.5.0 // indirect + github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-restruct/restruct v1.2.0-alpha // indirect github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.3 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/huandu/xstrings v1.3.3 // indirect github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0 // indirect - github.com/imdario/mergo v0.3.13 // indirect + github.com/imdario/mergo v0.3.15 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect @@ -119,16 +121,15 @@ require ( github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/nwaples/rardecode v1.1.0 // indirect github.com/opencontainers/image-spec v1.1.0-rc3 // indirect - github.com/pelletier/go-toml/v2 v2.0.6 // indirect + github.com/pelletier/go-toml/v2 v2.0.8 // indirect github.com/pierrec/lz4/v4 v4.1.15 // indirect github.com/pjbgf/sha1cd v0.3.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect github.com/rivo/uniseg v0.2.0 // indirect - github.com/rogpeppe/go-internal v1.8.0 // indirect github.com/shopspring/decimal v1.2.0 // indirect - github.com/skeema/knownhosts v1.1.0 // indirect - github.com/spf13/cast v1.5.0 // indirect + github.com/skeema/knownhosts v1.1.1 // indirect + github.com/spf13/cast v1.5.1 // indirect github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/stretchr/objx v0.5.0 // indirect github.com/subosito/gotenv v1.4.2 // indirect @@ -147,8 +148,8 @@ require ( golang.org/x/text v0.9.0 // indirect golang.org/x/tools v0.8.0 // indirect golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect - google.golang.org/genproto v0.0.0-20230403163135-c38d8f061ccd // indirect - google.golang.org/grpc v1.54.0 // indirect + google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect + google.golang.org/grpc v1.55.0 // indirect google.golang.org/protobuf v1.30.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect @@ -168,7 +169,7 @@ require ( // go: warning: github.com/andybalholm/brotli@v1.0.1: retracted by module author: occasional panics and data corruption github.com/andybalholm/brotli v1.0.4 // indirect github.com/pkg/errors v0.9.1 // indirect - golang.org/x/crypto v0.6.0 // indirect + golang.org/x/crypto v0.9.0 // indirect ) retract ( diff --git a/go.sum b/go.sum index 51d209ad60c..966186b95e0 100644 --- a/go.sum +++ b/go.sum @@ -72,8 +72,8 @@ github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/ProtonMail/go-crypto v0.0.0-20230217124315-7d5c6f04bbb8 h1:wPbRQzjjwFc0ih8puEVAOFGELsn1zoIIYdxvML7mDxA= -github.com/ProtonMail/go-crypto v0.0.0-20230217124315-7d5c6f04bbb8/go.mod h1:I0gYDMZ6Z5GRU7l58bNFSkPTFN6Yl12dsUlAZ8xy98g= +github.com/ProtonMail/go-crypto v0.0.0-20230518184743-7afd39499903 h1:ZK3C5DtzV2nVAQTx5S5jQvMeDqWtD1By5mOoyY/xJek= +github.com/ProtonMail/go-crypto v0.0.0-20230518184743-7afd39499903/go.mod h1:8TI4H3IbrackdNgv+92dI+rhpCaLqM0IfpgCgenFvRE= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= github.com/acobaugh/osrelease v0.1.0 h1:Yb59HQDGGNhCj4suHaFQQfBps5wyoKLSSX/J/+UifRE= @@ -98,14 +98,13 @@ github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b h1:e1bmaoJfZV github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b/go.mod h1:Bkc+JYWjMCF8OyZ340IMSIi2Ebf3uwByOk6ho4wne1E= github.com/anchore/packageurl-go v0.1.1-0.20230104203445-02e0a6721501 h1:AV7qjwMcM4r8wFhJq3jLRztew3ywIyPTRapl2T1s9o8= github.com/anchore/packageurl-go v0.1.1-0.20230104203445-02e0a6721501/go.mod h1:Blo6OgJNiYF41ufcgHKkbCKF2MDOMlrqhXv/ij6ocR4= -github.com/anchore/stereoscope v0.0.0-20230412183729-8602f1afc574 h1:VFX+FD9EH6am+tfqwr1KeCAmabAknSJQX95aIY3QJJI= -github.com/anchore/stereoscope v0.0.0-20230412183729-8602f1afc574/go.mod h1:2GGFHkHry/xDlEQgBrVGcarq+z7Z6hLnHdyhcKB2lfQ= +github.com/anchore/stereoscope v0.0.0-20230522170632-e14bc4437b2e h1:YPWJxds1hKRedS92u7O6D6ULVOx1F2HGgS4CWqJdBYw= +github.com/anchore/stereoscope v0.0.0-20230522170632-e14bc4437b2e/go.mod h1:0LsgHgXO4QFnk2hsYwtqd3fR18PIZXlFLIl2qb9tu3g= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= -github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= @@ -113,7 +112,6 @@ github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= -github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/becheran/wildmatch-go v1.0.0 h1:mE3dGGkTmpKtT4Z+88t8RStG40yN9T+kFEGj2PZFSzA= github.com/becheran/wildmatch-go v1.0.0/go.mod h1:gbMvj0NtVdJ15Mg/mH9uxk2R1QCistMyU7d9KFzroX4= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= @@ -135,8 +133,9 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cloudflare/circl v1.1.0 h1:bZgT/A+cikZnKIwn7xL2OBj012Bmvho/o6RpRvv3GKY= github.com/cloudflare/circl v1.1.0/go.mod h1:prBCrKB9DV4poKZY1l9zBXg2QJY7mvgRvtMxxK7fi4I= +github.com/cloudflare/circl v1.3.3 h1:fE/Qz0QdIGqeWfnwq0RE0R7MI51s0M2E4Ga9kq5AEMs= +github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= @@ -156,6 +155,8 @@ github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSV github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/dave/jennifer v1.6.1 h1:T4T/67t6RAA5AIV6+NP8Uk/BIsXgDoqEowgycdQQLuk= +github.com/dave/jennifer v1.6.1/go.mod h1:nXbxhEmQfOZhWml3D1cDK5M1FLnMSozpbFN/m3RmGZc= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -164,10 +165,10 @@ github.com/deitch/magic v0.0.0-20230404182410-1ff89d7342da/go.mod h1:B3tI9iGHi4i github.com/dgrijalva/jwt-go/v4 v4.0.0-preview1/go.mod h1:+hnT3ywWDTAFrW5aE+u2Sa/wT555ZqwoCS+pk3p6ry4= github.com/docker/cli v23.0.5+incompatible h1:ufWmAOuD3Vmr7JP2G5K3cyuNC4YZWiAsuDEvFVVDafE= github.com/docker/cli v23.0.5+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= -github.com/docker/distribution v2.8.1+incompatible h1:Q50tZOPR6T/hjNsyc9g8/syEs6bk8XXApsHjKukMl68= -github.com/docker/distribution v2.8.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v24.0.0+incompatible h1:z4bf8HvONXX9Tde5lGBMQ7yCJgNahmJumdrStZAbeY4= -github.com/docker/docker v24.0.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8= +github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v24.0.2+incompatible h1:eATx+oLz9WdNVkQrr0qjQ8HvRJ4bOOxfzEo8R+dA3cg= +github.com/docker/docker v24.0.2+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker-credential-helpers v0.7.0 h1:xtCHsjxogADNZcdv1pKUHXryefjlVRqWqIhk/uXJp0A= github.com/docker/docker-credential-helpers v0.7.0/go.mod h1:rETQfLdHNT3foU5kuNkFR1R1V12OJRRO5lzt2D1b5X0= github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= @@ -179,6 +180,7 @@ github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5/go.mod h1:qssHWj6 github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/elazarl/goproxy v0.0.0-20221015165544-a0805db90819 h1:RIB4cRk+lBqKK3Oy0r2gRX4ui7tuhiZq2SuTtTCi0/0= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= @@ -200,7 +202,7 @@ github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/set v0.2.1 h1:nn2CaJyknWE/6txyUDGwysr3G5QC6xWB/PtVjPBbeaA= github.com/fatih/set v0.2.1/go.mod h1:+RKtMCH+favT2+3YecHGxcc0b4KyVWA1QWWJUs4E0CI= -github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= +github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= @@ -211,16 +213,13 @@ github.com/github/go-spdx/v2 v2.1.2 h1:p+Tv0yMgcuO0/vnMe9Qh4tmUgYhI6AsLVlakZ/Sx+ github.com/github/go-spdx/v2 v2.1.2/go.mod h1:hMCrsFgT0QnCwn7G8gxy/MxMpy67WgZrwFeISTn0o6w= github.com/glebarez/go-sqlite v1.20.3 h1:89BkqGOXR9oRmG58ZrzgoY/Fhy5x0M+/WV48U5zVrZ4= github.com/gliderlabs/ssh v0.3.5 h1:OcaySEmAQJgyYcArR+gGGTHCyE7nvhEMTlYY+Dp8CpY= -github.com/gliderlabs/ssh v0.3.5/go.mod h1:8XB4KraRrX39qHhT6yxPsHedjA08I/uBVwj4xC+/+z4= -github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4= -github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E= -github.com/go-git/go-billy/v5 v5.3.1/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= github.com/go-git/go-billy/v5 v5.4.1 h1:Uwp5tDRkPr+l/TnbHOQzp+tmJfLceOlbVucgpTz8ix4= github.com/go-git/go-billy/v5 v5.4.1/go.mod h1:vjbugF6Fz7JIflbVpl1hJsGjSHNltrSw45YK/ukIvQg= -github.com/go-git/go-git-fixtures/v4 v4.3.1 h1:y5z6dd3qi8Hl+stezc8p3JxDkoTRqMAlKnXHuzrfjTQ= -github.com/go-git/go-git-fixtures/v4 v4.3.1/go.mod h1:8LHG1a3SRW71ettAD/jW13h8c6AqjVSeL11RAdgaqpo= -github.com/go-git/go-git/v5 v5.6.1 h1:q4ZRqQl4pR/ZJHc1L5CFjGA1a10u76aV1iC+nh+bHsk= -github.com/go-git/go-git/v5 v5.6.1/go.mod h1:mvyoL6Unz0PiTQrGQfSfiLFhBH1c1e84ylC2MDs4ee8= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20230305113008-0c11038e723f h1:Pz0DHeFij3XFhoBRGUDPzSJ+w2UcK5/0JvF8DRI58r8= +github.com/go-git/go-git/v5 v5.7.0 h1:t9AudWVLmqzlo+4bqdf7GY+46SUuRsx59SboFxkq2aE= +github.com/go-git/go-git/v5 v5.7.0/go.mod h1:coJHKEOk5kUClpsNlXrUvPrDxY3w3gjHvhcZd8Fodw8= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -242,6 +241,7 @@ github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfU github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= @@ -373,8 +373,8 @@ github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47 github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/imdario/mergo v0.3.13 h1:lFzP57bqS/wsqKssCGmtLAb8A0wKjLGrve2q3PPVcBk= -github.com/imdario/mergo v0.3.13/go.mod h1:4lJ1jqUDcsbIECGy0RUJAXNIhg+6ocWgb1ALK2O4oXg= +github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM= +github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= @@ -382,7 +382,6 @@ github.com/invopop/jsonschema v0.7.0 h1:2vgQcBz1n256N+FpX3Jq7Y17AjYt46Ig3zIWyy77 github.com/invopop/jsonschema v0.7.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= -github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= github.com/jinzhu/copier v0.3.5 h1:GlvfUwHk62RokgqVNvYsku0TATCF7bAHVwEXoBh3iJg= github.com/jinzhu/copier v0.3.5/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= @@ -415,7 +414,7 @@ github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFB github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= @@ -476,7 +475,6 @@ github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/mmcloughlin/avo v0.5.0/go.mod h1:ChHFdoV7ql95Wi7vuq2YT1bwCJqiWdZrQ1im3VujLYM= github.com/moby/term v0.0.0-20221205130635-1aeaba878587 h1:HfkjXDfhgVaN5rmueG8cL8KKeFNecRCXFhaJ2qZ5SKA= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -499,14 +497,13 @@ github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144T github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/pelletier/go-toml/v2 v2.0.6 h1:nrzqCb7j9cDFj2coyLNLaZuJTLjWjlaz6nvTvIwycIU= -github.com/pelletier/go-toml/v2 v2.0.6/go.mod h1:eumQOmlWiOPt5WriQQqoM5y18pDHwha2N+QD+EUNTek= +github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ= +github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4= github.com/pierrec/lz4/v4 v4.1.2/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pierrec/lz4/v4 v4.1.15 h1:MO0/ucJhngq7299dKLwIMtgTfbkoSPF6AoMYDd8Q4q0= github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= @@ -536,8 +533,7 @@ github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= -github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= +github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig= @@ -557,10 +553,10 @@ github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPx github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/sirupsen/logrus v1.9.1 h1:Ou41VVR3nMWWmTiEUnj0OlsgOSCUFgsPAOl6jRIcVtQ= -github.com/sirupsen/logrus v1.9.1/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/skeema/knownhosts v1.1.0 h1:Wvr9V0MxhjRbl3f9nMnKnFfiWTJmtECJ9Njkea3ysW0= -github.com/skeema/knownhosts v1.1.0/go.mod h1:sKFq3RD6/TKZkSWn8boUbDC7Qkgcv+8XXijpFO6roag= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/skeema/knownhosts v1.1.1 h1:MTk78x9FPgDFVFkDLTrsnnfCJl7g1C/nnKvePgrIngE= +github.com/skeema/knownhosts v1.1.1/go.mod h1:g4fPeYpque7P0xefxtGzV81ihjC8sX2IqpAoNkjxbMo= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spdx/gordf v0.0.0-20201111095634-7098f93598fb/go.mod h1:uKWaldnbMnjsSAXRurWqqrdyZen1R7kxl8TkmWk2OyM= github.com/spdx/tools-golang v0.5.0 h1:/fqihV2Jna7fmow65dHpgKNsilgLK7ICpd2tkCnPEyY= @@ -571,8 +567,8 @@ github.com/spf13/afero v1.9.5 h1:stMpOSZFs//0Lv29HduCmli3GUfpFoF3Y1Q/aXj/wVM= github.com/spf13/afero v1.9.5/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= -github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= +github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA= +github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48= github.com/spf13/cobra v1.3.0/go.mod h1:BrRVncBjOJa/eUcVVm9CE+oC6as8k+VYr4NY7WCi9V4= github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= @@ -581,8 +577,8 @@ github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0 github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM= -github.com/spf13/viper v1.15.0 h1:js3yy885G8xwJa6iOISGFwd+qlUo5AvyXb7CiihdtiU= -github.com/spf13/viper v1.15.0/go.mod h1:fFcTBJxvhhzSJiZy8n+PeW6t8l+KeT/uTARa0jHOQLA= +github.com/spf13/viper v1.16.0 h1:rGGH0XDZhdUOryiDWjmIvUSWpbNqisK8Wk0Vyefw8hc= +github.com/spf13/viper v1.16.0/go.mod h1:yg78JgCJcbrQOvV9YLXgkLaZqUidkY9K+Dd1FofRzQg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= @@ -598,8 +594,10 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8= github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= @@ -662,7 +660,6 @@ go.uber.org/goleak v1.1.10 h1:z+mqJhf6ss6BSfSM671tgKyZBFPTTJM+HLxnhPC3wu0= go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -golang.org/x/arch v0.1.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190228161510-8dd112bcdc25/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= @@ -677,14 +674,12 @@ golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWP golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220525230936-793ad666bf5e/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= -golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc= -golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= +golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g= +golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -725,7 +720,7 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.6.0/go.mod h1:4mET923SAdbXp2ki8ey+zGs1SLqsuM2Y0uvdZR/fUNI= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.10.0 h1:lFO9qtOdlre5W1jxS3r/4szv2/6iXxScdzjoBMXNhYk= golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -773,11 +768,9 @@ golang.org/x/net v0.0.0-20210505024714-0287a6fb4125/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= -golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -883,25 +876,22 @@ golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220825204002-c680a09ffe64/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220906165534-d0df966e6959/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.0.0-20220722155259-a9ba230a4035/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.8.0 h1:n5xxQn2i3PC0yLAbjTpNT85q/Kgzcr2gIoX9OrJUols= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -915,6 +905,7 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -978,7 +969,7 @@ golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.2.0/go.mod h1:y4OqIKeOV/fWJetJ8bXPU1sEVniLMIyDAZWeHdV+NTA= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.8.0 h1:vSDcovVPld282ceKgDimkRSC8kpaH1dgyc9UMzlt84Y= golang.org/x/tools v0.8.0/go.mod h1:JxBZ99ISMI5ViVkT1tr6tdNmXeTrcpVSD3vZ1RsRdN4= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1091,8 +1082,8 @@ google.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ6 google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20230403163135-c38d8f061ccd h1:sLpv7bNL1AsX3fdnWh9WVh7ejIzXdOc1RRHGeAmeStU= -google.golang.org/genproto v0.0.0-20230403163135-c38d8f061ccd/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= +google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A= +google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -1120,8 +1111,8 @@ google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnD google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.54.0 h1:EhTqbhiYeixwWQtAEZAxmV9MGqcjEU2mFx52xCzNyag= -google.golang.org/grpc v1.54.0/go.mod h1:PUSEXI6iWghWaB6lXM4knEgpJNu2qUcKfDtNci3EC2g= +google.golang.org/grpc v1.55.0 h1:3Oj82/tFSCeUrRTg/5E/7d/W5A1tj6Ky1ABAuZuv5ag= +google.golang.org/grpc v1.55.0/go.mod h1:iYEXKGkEBhg1PjZQvoYEVPTDkHo1/bjTnfwTeGONTY8= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -1142,7 +1133,6 @@ gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLks gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= @@ -1162,7 +1152,6 @@ gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gotest.tools/v3 v3.0.3 h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0= @@ -1189,8 +1178,8 @@ modernc.org/memory v1.5.0 h1:N+/8c5rE6EqugZwHii4IFsaJ7MUhoWX07J5tC/iI5Ds= modernc.org/memory v1.5.0/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU= modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4= modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= -modernc.org/sqlite v1.22.1 h1:P2+Dhp5FR1RlVRkQ3dDfCiv3Ok8XPxqpe70IjYVA9oE= -modernc.org/sqlite v1.22.1/go.mod h1:OrDj17Mggn6MhE+iPbBNf7RGKODDE9NFT0f3EwDzJqk= +modernc.org/sqlite v1.23.0 h1:MWTFBI5H1WLnXpNBh/BTruBVqzzoh28DA0iOnlkkRaM= +modernc.org/sqlite v1.23.0/go.mod h1:OrDj17Mggn6MhE+iPbBNf7RGKODDE9NFT0f3EwDzJqk= modernc.org/strutil v1.1.3 h1:fNMm+oJklMGYfU9Ylcywl0CO5O6nTfaowNsh2wpPjzY= modernc.org/strutil v1.1.3/go.mod h1:MEHNA7PdEnEwLvspRMtWTNnp2nnyvMfkimT1NKNAGbw= modernc.org/tcl v1.15.2 h1:C4ybAYCGJw968e+Me18oW55kD/FexcHbqH2xak1ROSY= @@ -1198,7 +1187,6 @@ modernc.org/token v1.0.1 h1:A3qvTqOwexpfZZeyI0FeGPDlSWX5pjZu9hF4lU+EKWg= modernc.org/token v1.0.1/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= modernc.org/z v1.7.3 h1:zDJf6iHjrnB+WRD88stbXokugjyc0/pB91ri1gO6LZY= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= diff --git a/internal/config/application.go b/internal/config/application.go index 1a37594f30b..9f3274265fa 100644 --- a/internal/config/application.go +++ b/internal/config/application.go @@ -61,6 +61,8 @@ type Application struct { Exclusions []string `yaml:"exclude" json:"exclude" mapstructure:"exclude"` Platform string `yaml:"platform" json:"platform" mapstructure:"platform"` Name string `yaml:"name" json:"name" mapstructure:"name"` + SourceName string `yaml:"source-name" json:"source-name" mapstructure:"source-name"` + SourceVersion string `yaml:"source-version" json:"source-version" mapstructure:"source-version"` Parallelism int `yaml:"parallelism" json:"parallelism" mapstructure:"parallelism"` // the number of catalog workers to run in parallel DefaultImagePullSource string `yaml:"default-image-pull-source" json:"default-image-pull-source" mapstructure:"default-image-pull-source"` // specify default image pull source } @@ -143,6 +145,13 @@ func (cfg *Application) parseConfigValues() error { return err } + if cfg.Name != "" { + log.Warnf("name parameter is deprecated. please use: source-name. name will be removed in a future version") + if cfg.SourceName == "" { + cfg.SourceName = cfg.Name + } + } + // check for valid default source options // parse nested config options // for each field in the configuration struct, see if the field implements the parser interface diff --git a/internal/constants.go b/internal/constants.go index b7be2824039..73bd40a41b8 100644 --- a/internal/constants.go +++ b/internal/constants.go @@ -6,5 +6,5 @@ const ( // JSONSchemaVersion is the current schema version output by the JSON encoder // This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment. - JSONSchemaVersion = "8.0.0" + JSONSchemaVersion = "8.0.1" ) diff --git a/internal/licenses/parser.go b/internal/licenses/parser.go index b5cde28f7d0..58e4deb2d22 100644 --- a/internal/licenses/parser.go +++ b/internal/licenses/parser.go @@ -5,9 +5,9 @@ import ( "github.com/google/licensecheck" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/license" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) const ( @@ -16,7 +16,7 @@ const ( ) // Parse scans the contents of a license file to attempt to determine the type of license it is -func Parse(reader io.Reader, l source.Location) (licenses []pkg.License, err error) { +func Parse(reader io.Reader, l file.Location) (licenses []pkg.License, err error) { licenses = make([]pkg.License, 0) contents, err := io.ReadAll(reader) if err != nil { diff --git a/schema/json/generate/main.go b/schema/json/generate/main.go new file mode 100644 index 00000000000..fc8dc120a21 --- /dev/null +++ b/schema/json/generate/main.go @@ -0,0 +1,50 @@ +package main + +import ( + "fmt" + "os" + + "github.com/dave/jennifer/jen" + + "github.com/anchore/syft/schema/json/internal" +) + +// This program generates internal/generated.go. + +const ( + pkgImport = "github.com/anchore/syft/syft/pkg" + path = "internal/generated.go" +) + +func main() { + typeNames, err := internal.AllSyftMetadataTypeNames() + if err != nil { + panic(fmt.Errorf("unable to get all metadata type names: %w", err)) + } + + fmt.Printf("updating metadata container object with %+v types\n", len(typeNames)) + + f := jen.NewFile("internal") + f.HeaderComment("DO NOT EDIT: generated by schema/json/generate/main.go") + f.ImportName(pkgImport, "pkg") + f.Comment("ArtifactMetadataContainer is a struct that contains all the metadata types for a package, as represented in the pkg.Package.Metadata field.") + f.Type().Id("ArtifactMetadataContainer").StructFunc(func(g *jen.Group) { + for _, typeName := range typeNames { + g.Id(typeName).Qual(pkgImport, typeName) + } + }) + + rendered := fmt.Sprintf("%#v", f) + + fh, err := os.OpenFile(path, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644) + if err != nil { + panic(fmt.Errorf("unable to open file: %w", err)) + } + _, err = fh.WriteString(rendered) + if err != nil { + panic(fmt.Errorf("unable to write file: %w", err)) + } + if err := fh.Close(); err != nil { + panic(fmt.Errorf("unable to close file: %w", err)) + } +} diff --git a/schema/json/internal/generated.go b/schema/json/internal/generated.go new file mode 100644 index 00000000000..3341818deb7 --- /dev/null +++ b/schema/json/internal/generated.go @@ -0,0 +1,39 @@ +// DO NOT EDIT: generated by schema/json/generate/main.go + +package internal + +import "github.com/anchore/syft/syft/pkg" + +// ArtifactMetadataContainer is a struct that contains all the metadata types for a package, as represented in the pkg.Package.Metadata field. +type ArtifactMetadataContainer struct { + AlpmMetadata pkg.AlpmMetadata + ApkMetadata pkg.ApkMetadata + BinaryMetadata pkg.BinaryMetadata + CargoPackageMetadata pkg.CargoPackageMetadata + CocoapodsMetadata pkg.CocoapodsMetadata + ConanLockMetadata pkg.ConanLockMetadata + ConanMetadata pkg.ConanMetadata + DartPubMetadata pkg.DartPubMetadata + DotnetDepsMetadata pkg.DotnetDepsMetadata + DpkgMetadata pkg.DpkgMetadata + GemMetadata pkg.GemMetadata + GolangBinMetadata pkg.GolangBinMetadata + GolangModMetadata pkg.GolangModMetadata + HackageMetadata pkg.HackageMetadata + JavaMetadata pkg.JavaMetadata + KbPackageMetadata pkg.KbPackageMetadata + LinuxKernelMetadata pkg.LinuxKernelMetadata + LinuxKernelModuleMetadata pkg.LinuxKernelModuleMetadata + MixLockMetadata pkg.MixLockMetadata + NixStoreMetadata pkg.NixStoreMetadata + NpmPackageJSONMetadata pkg.NpmPackageJSONMetadata + NpmPackageLockJSONMetadata pkg.NpmPackageLockJSONMetadata + PhpComposerJSONMetadata pkg.PhpComposerJSONMetadata + PortageMetadata pkg.PortageMetadata + PythonPackageMetadata pkg.PythonPackageMetadata + PythonPipfileLockMetadata pkg.PythonPipfileLockMetadata + PythonRequirementsMetadata pkg.PythonRequirementsMetadata + RDescriptionFileMetadata pkg.RDescriptionFileMetadata + RebarLockMetadata pkg.RebarLockMetadata + RpmMetadata pkg.RpmMetadata +} diff --git a/schema/json/internal/metadata_types.go b/schema/json/internal/metadata_types.go new file mode 100644 index 00000000000..4d515a18890 --- /dev/null +++ b/schema/json/internal/metadata_types.go @@ -0,0 +1,150 @@ +package internal + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "os/exec" + "path/filepath" + "sort" + "strings" + "unicode" + + "github.com/scylladb/go-set/strset" +) + +var metadataExceptions = strset.New( + "FileMetadata", +) + +func AllSyftMetadataTypeNames() ([]string, error) { + root, err := repoRoot() + if err != nil { + return nil, err + } + files, err := filepath.Glob(filepath.Join(root, "syft/pkg/*.go")) + if err != nil { + return nil, err + } + return findMetadataDefinitionNames(files...) +} + +func repoRoot() (string, error) { + root, err := exec.Command("git", "rev-parse", "--show-toplevel").Output() + if err != nil { + return "", fmt.Errorf("unable to find repo root dir: %+v", err) + } + absRepoRoot, err := filepath.Abs(strings.TrimSpace(string(root))) + if err != nil { + return "", fmt.Errorf("unable to get abs path to repo root: %w", err) + } + return absRepoRoot, nil +} + +func findMetadataDefinitionNames(paths ...string) ([]string, error) { + names := strset.New() + usedNames := strset.New() + for _, path := range paths { + metadataDefinitions, usedTypeNames, err := findMetadataDefinitionNamesInFile(path) + if err != nil { + return nil, err + } + + // useful for debugging... + // fmt.Println(path) + // fmt.Println("Defs:", metadataDefinitions) + // fmt.Println("Used Types:", usedTypeNames) + // fmt.Println() + + names.Add(metadataDefinitions...) + usedNames.Add(usedTypeNames...) + } + + // any definition that is used within another struct should not be considered a top-level metadata definition + names.Remove(usedNames.List()...) + + strNames := names.List() + sort.Strings(strNames) + + // note: 30 is a point-in-time gut check. This number could be updated if new metadata definitions are added, but is not required. + // it is really intended to catch any major issues with the generation process that would generate, say, 0 definitions. + if len(strNames) < 30 { + return nil, fmt.Errorf("not enough metadata definitions found (discovered: " + fmt.Sprintf("%d", len(strNames)) + ")") + } + + return strNames, nil +} + +func findMetadataDefinitionNamesInFile(path string) ([]string, []string, error) { + // set up the parser + fs := token.NewFileSet() + f, err := parser.ParseFile(fs, path, nil, parser.ParseComments) + if err != nil { + return nil, nil, err + } + + var metadataDefinitions []string + var usedTypeNames []string + for _, decl := range f.Decls { + // check if the declaration is a type declaration + spec, ok := decl.(*ast.GenDecl) + if !ok || spec.Tok != token.TYPE { + continue + } + + // loop over all types declared in the type declaration + for _, typ := range spec.Specs { + // check if the type is a struct type + spec, ok := typ.(*ast.TypeSpec) + if !ok || spec.Type == nil { + continue + } + + structType, ok := spec.Type.(*ast.StructType) + if !ok { + continue + } + + // check if the struct type ends with "Metadata" + name := spec.Name.String() + + // only look for exported types that end with "Metadata" + if isMetadataTypeCandidate(name) { + // print the full declaration of the struct type + metadataDefinitions = append(metadataDefinitions, name) + usedTypeNames = append(usedTypeNames, typeNamesUsedInStruct(structType)...) + } + } + } + return metadataDefinitions, usedTypeNames, nil +} + +func typeNamesUsedInStruct(structType *ast.StructType) []string { + // recursively find all type names used in the struct type + var names []string + for i := range structType.Fields.List { + // capture names of all of the types (not field names) + ast.Inspect(structType.Fields.List[i].Type, func(n ast.Node) bool { + ident, ok := n.(*ast.Ident) + if !ok { + return true + } + + // add the type name to the list + names = append(names, ident.Name) + + // continue inspecting + return true + }) + } + + return names +} + +func isMetadataTypeCandidate(name string) bool { + return len(name) > 0 && + strings.HasSuffix(name, "Metadata") && + unicode.IsUpper(rune(name[0])) && // must be exported + !metadataExceptions.Has(name) +} diff --git a/schema/json/generate.go b/schema/json/main.go similarity index 60% rename from schema/json/generate.go rename to schema/json/main.go index 169e3c22ff8..246abc532a5 100644 --- a/schema/json/generate.go +++ b/schema/json/main.go @@ -13,8 +13,8 @@ import ( "github.com/invopop/jsonschema" "github.com/anchore/syft/internal" + genInt "github.com/anchore/syft/schema/json/internal" syftjsonModel "github.com/anchore/syft/syft/formats/syftjson/model" - "github.com/anchore/syft/syft/pkg" ) /* @@ -24,46 +24,9 @@ are not captured (empty interfaces). This means that pkg.Package.Metadata is not can be extended to include specific package metadata struct shapes in the future. */ -// This should represent all possible metadatas represented in the pkg.Package.Metadata field (an interface{}). -// When a new package metadata definition is created it will need to be manually added here. The variable name does -// not matter as long as it is exported. - -// TODO: this should be generated from reflection of whats in the pkg package -// Should be created during generation below; use reflection's ability to -// create types at runtime. -// should be same name as struct minus metadata -type artifactMetadataContainer struct { - Alpm pkg.AlpmMetadata - Apk pkg.ApkMetadata - Binary pkg.BinaryMetadata - Cocopods pkg.CocoapodsMetadata - Conan pkg.ConanMetadata - ConanLock pkg.ConanLockMetadata - Dart pkg.DartPubMetadata - Dotnet pkg.DotnetDepsMetadata - Dpkg pkg.DpkgMetadata - Gem pkg.GemMetadata - GoBin pkg.GolangBinMetadata - GoMod pkg.GolangModMetadata - Hackage pkg.HackageMetadata - Java pkg.JavaMetadata - KbPackage pkg.KbPackageMetadata - LinuxKernel pkg.LinuxKernelMetadata - LinuxKernelModule pkg.LinuxKernelModuleMetadata - Nix pkg.NixStoreMetadata - NpmPackage pkg.NpmPackageJSONMetadata - NpmPackageLock pkg.NpmPackageLockJSONMetadata - MixLock pkg.MixLockMetadata - Php pkg.PhpComposerJSONMetadata - Portage pkg.PortageMetadata - PythonPackage pkg.PythonPackageMetadata - PythonPipfilelock pkg.PythonPipfileLockMetadata - PythonRequirements pkg.PythonRequirementsMetadata - RDescriptionFile pkg.RDescriptionFileMetadata - Rebar pkg.RebarLockMetadata - Rpm pkg.RpmMetadata - RustCargo pkg.CargoPackageMetadata -} +//go:generate go run ./generate/main.go + +const schemaVersion = internal.JSONSchemaVersion func main() { write(encode(build())) @@ -77,14 +40,14 @@ func build() *jsonschema.Schema { }, } documentSchema := reflector.ReflectFromType(reflect.TypeOf(&syftjsonModel.Document{})) - metadataSchema := reflector.ReflectFromType(reflect.TypeOf(&artifactMetadataContainer{})) + metadataSchema := reflector.ReflectFromType(reflect.TypeOf(&genInt.ArtifactMetadataContainer{})) // TODO: inject source definitions // inject the definitions of all metadatas into the schema definitions var metadataNames []string for name, definition := range metadataSchema.Definitions { - if name == "artifactMetadataContainer" { + if name == reflect.TypeOf(genInt.ArtifactMetadataContainer{}).Name() { // ignore the definition for the fake container continue } @@ -130,7 +93,7 @@ func encode(schema *jsonschema.Schema) []byte { } func write(schema []byte) { - filename := fmt.Sprintf("schema-%s.json", internal.JSONSchemaVersion) + filename := fmt.Sprintf("schema-%s.json", schemaVersion) if _, err := os.Stat(filename); !os.IsNotExist(err) { // check if the schema is the same... @@ -167,5 +130,5 @@ func write(schema []byte) { defer fh.Close() - fmt.Printf("wrote new schema to %q\n", filename) + fmt.Printf("Wrote new schema to %q\n", filename) } diff --git a/schema/json/main_test.go b/schema/json/main_test.go new file mode 100644 index 00000000000..0903b4dde39 --- /dev/null +++ b/schema/json/main_test.go @@ -0,0 +1,39 @@ +package main + +import ( + "reflect" + "sort" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/anchore/syft/schema/json/internal" +) + +func TestAllMetadataRepresented(t *testing.T) { + // this test checks that all the metadata types are represented in the currently generated ArtifactMetadataContainer struct + // such that PRs will reflect when there is drift from the implemented set of metadata types and the generated struct + // which controls the JSON schema content. + expected, err := internal.AllSyftMetadataTypeNames() + require.NoError(t, err) + actual := allTypeNamesFromStruct(internal.ArtifactMetadataContainer{}) + if !assert.ElementsMatch(t, expected, actual) { + t.Errorf("metadata types not fully represented: \n%s", cmp.Diff(expected, actual)) + t.Log("did you add a new pkg.*Metadata type without updating the JSON schema?") + t.Log("if so, you need to update the schema version and regenerate the JSON schema (make generate-json-schema)") + } +} + +func allTypeNamesFromStruct(instance any) []string { + // get all the type names from the struct (not recursively) + var typeNames []string + tt := reflect.TypeOf(instance) + for i := 0; i < tt.NumField(); i++ { + field := tt.Field(i) + typeNames = append(typeNames, field.Type.Name()) + } + sort.Strings(typeNames) + return typeNames +} diff --git a/schema/json/schema-8.0.1.json b/schema/json/schema-8.0.1.json new file mode 100644 index 00000000000..1aefee9b3f5 --- /dev/null +++ b/schema/json/schema-8.0.1.json @@ -0,0 +1,1873 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/anchore/syft/syft/formats/syftjson/model/document", + "$ref": "#/$defs/Document", + "$defs": { + "AlpmFileRecord": { + "properties": { + "path": { + "type": "string" + }, + "type": { + "type": "string" + }, + "uid": { + "type": "string" + }, + "gid": { + "type": "string" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "size": { + "type": "string" + }, + "link": { + "type": "string" + }, + "digest": { + "items": { + "$ref": "#/$defs/Digest" + }, + "type": "array" + } + }, + "type": "object" + }, + "AlpmMetadata": { + "properties": { + "basepackage": { + "type": "string" + }, + "package": { + "type": "string" + }, + "version": { + "type": "string" + }, + "description": { + "type": "string" + }, + "architecture": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "packager": { + "type": "string" + }, + "url": { + "type": "string" + }, + "validation": { + "type": "string" + }, + "reason": { + "type": "integer" + }, + "files": { + "items": { + "$ref": "#/$defs/AlpmFileRecord" + }, + "type": "array" + }, + "backup": { + "items": { + "$ref": "#/$defs/AlpmFileRecord" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "basepackage", + "package", + "version", + "description", + "architecture", + "size", + "packager", + "url", + "validation", + "reason", + "files", + "backup" + ] + }, + "ApkFileRecord": { + "properties": { + "path": { + "type": "string" + }, + "ownerUid": { + "type": "string" + }, + "ownerGid": { + "type": "string" + }, + "permissions": { + "type": "string" + }, + "digest": { + "$ref": "#/$defs/Digest" + } + }, + "type": "object", + "required": [ + "path" + ] + }, + "ApkMetadata": { + "properties": { + "package": { + "type": "string" + }, + "originPackage": { + "type": "string" + }, + "maintainer": { + "type": "string" + }, + "version": { + "type": "string" + }, + "architecture": { + "type": "string" + }, + "url": { + "type": "string" + }, + "description": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "installedSize": { + "type": "integer" + }, + "pullDependencies": { + "items": { + "type": "string" + }, + "type": "array" + }, + "provides": { + "items": { + "type": "string" + }, + "type": "array" + }, + "pullChecksum": { + "type": "string" + }, + "gitCommitOfApkPort": { + "type": "string" + }, + "files": { + "items": { + "$ref": "#/$defs/ApkFileRecord" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "package", + "originPackage", + "maintainer", + "version", + "architecture", + "url", + "description", + "size", + "installedSize", + "pullDependencies", + "provides", + "pullChecksum", + "gitCommitOfApkPort", + "files" + ] + }, + "BinaryMetadata": { + "properties": { + "matches": { + "items": { + "$ref": "#/$defs/ClassifierMatch" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "matches" + ] + }, + "CargoPackageMetadata": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "source": { + "type": "string" + }, + "checksum": { + "type": "string" + }, + "dependencies": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "name", + "version", + "source", + "checksum", + "dependencies" + ] + }, + "ClassifierMatch": { + "properties": { + "classifier": { + "type": "string" + }, + "location": { + "$ref": "#/$defs/Location" + } + }, + "type": "object", + "required": [ + "classifier", + "location" + ] + }, + "CocoapodsMetadata": { + "properties": { + "checksum": { + "type": "string" + } + }, + "type": "object", + "required": [ + "checksum" + ] + }, + "ConanLockMetadata": { + "properties": { + "ref": { + "type": "string" + }, + "package_id": { + "type": "string" + }, + "prev": { + "type": "string" + }, + "requires": { + "type": "string" + }, + "build_requires": { + "type": "string" + }, + "py_requires": { + "type": "string" + }, + "options": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "path": { + "type": "string" + }, + "context": { + "type": "string" + } + }, + "type": "object", + "required": [ + "ref" + ] + }, + "ConanMetadata": { + "properties": { + "ref": { + "type": "string" + } + }, + "type": "object", + "required": [ + "ref" + ] + }, + "Coordinates": { + "properties": { + "path": { + "type": "string" + }, + "layerID": { + "type": "string" + } + }, + "type": "object", + "required": [ + "path" + ] + }, + "DartPubMetadata": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "hosted_url": { + "type": "string" + }, + "vcs_url": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version" + ] + }, + "Descriptor": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "configuration": true + }, + "type": "object", + "required": [ + "name", + "version" + ] + }, + "Digest": { + "properties": { + "algorithm": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object", + "required": [ + "algorithm", + "value" + ] + }, + "Document": { + "properties": { + "artifacts": { + "items": { + "$ref": "#/$defs/Package" + }, + "type": "array" + }, + "artifactRelationships": { + "items": { + "$ref": "#/$defs/Relationship" + }, + "type": "array" + }, + "files": { + "items": { + "$ref": "#/$defs/File" + }, + "type": "array" + }, + "secrets": { + "items": { + "$ref": "#/$defs/Secrets" + }, + "type": "array" + }, + "source": { + "$ref": "#/$defs/Source" + }, + "distro": { + "$ref": "#/$defs/LinuxRelease" + }, + "descriptor": { + "$ref": "#/$defs/Descriptor" + }, + "schema": { + "$ref": "#/$defs/Schema" + } + }, + "type": "object", + "required": [ + "artifacts", + "artifactRelationships", + "source", + "distro", + "descriptor", + "schema" + ] + }, + "DotnetDepsMetadata": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "path": { + "type": "string" + }, + "sha512": { + "type": "string" + }, + "hashPath": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version", + "path", + "sha512", + "hashPath" + ] + }, + "DpkgFileRecord": { + "properties": { + "path": { + "type": "string" + }, + "digest": { + "$ref": "#/$defs/Digest" + }, + "isConfigFile": { + "type": "boolean" + } + }, + "type": "object", + "required": [ + "path", + "isConfigFile" + ] + }, + "DpkgMetadata": { + "properties": { + "package": { + "type": "string" + }, + "source": { + "type": "string" + }, + "version": { + "type": "string" + }, + "sourceVersion": { + "type": "string" + }, + "architecture": { + "type": "string" + }, + "maintainer": { + "type": "string" + }, + "installedSize": { + "type": "integer" + }, + "files": { + "items": { + "$ref": "#/$defs/DpkgFileRecord" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "package", + "source", + "version", + "sourceVersion", + "architecture", + "maintainer", + "installedSize", + "files" + ] + }, + "File": { + "properties": { + "id": { + "type": "string" + }, + "location": { + "$ref": "#/$defs/Coordinates" + }, + "metadata": { + "$ref": "#/$defs/FileMetadataEntry" + }, + "contents": { + "type": "string" + }, + "digests": { + "items": { + "$ref": "#/$defs/Digest" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "id", + "location" + ] + }, + "FileMetadataEntry": { + "properties": { + "mode": { + "type": "integer" + }, + "type": { + "type": "string" + }, + "linkDestination": { + "type": "string" + }, + "userID": { + "type": "integer" + }, + "groupID": { + "type": "integer" + }, + "mimeType": { + "type": "string" + }, + "size": { + "type": "integer" + } + }, + "type": "object", + "required": [ + "mode", + "type", + "userID", + "groupID", + "mimeType", + "size" + ] + }, + "GemMetadata": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "files": { + "items": { + "type": "string" + }, + "type": "array" + }, + "authors": { + "items": { + "type": "string" + }, + "type": "array" + }, + "homepage": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version" + ] + }, + "GolangBinMetadata": { + "properties": { + "goBuildSettings": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "goCompiledVersion": { + "type": "string" + }, + "architecture": { + "type": "string" + }, + "h1Digest": { + "type": "string" + }, + "mainModule": { + "type": "string" + } + }, + "type": "object", + "required": [ + "goCompiledVersion", + "architecture" + ] + }, + "GolangModMetadata": { + "properties": { + "h1Digest": { + "type": "string" + } + }, + "type": "object" + }, + "HackageMetadata": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "pkgHash": { + "type": "string" + }, + "snapshotURL": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version" + ] + }, + "IDLikes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "JavaManifest": { + "properties": { + "main": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "namedSections": { + "patternProperties": { + ".*": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "JavaMetadata": { + "properties": { + "virtualPath": { + "type": "string" + }, + "manifest": { + "$ref": "#/$defs/JavaManifest" + }, + "pomProperties": { + "$ref": "#/$defs/PomProperties" + }, + "pomProject": { + "$ref": "#/$defs/PomProject" + }, + "digest": { + "items": { + "$ref": "#/$defs/Digest" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "virtualPath" + ] + }, + "KbPackageMetadata": { + "properties": { + "product_id": { + "type": "string" + }, + "kb": { + "type": "string" + } + }, + "type": "object", + "required": [ + "product_id", + "kb" + ] + }, + "License": { + "properties": { + "value": { + "type": "string" + }, + "spdxExpression": { + "type": "string" + }, + "type": { + "type": "string" + }, + "urls": { + "items": { + "type": "string" + }, + "type": "array" + }, + "locations": { + "items": { + "$ref": "#/$defs/Location" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "value", + "spdxExpression", + "type", + "urls", + "locations" + ] + }, + "LinuxKernelMetadata": { + "properties": { + "name": { + "type": "string" + }, + "architecture": { + "type": "string" + }, + "version": { + "type": "string" + }, + "extendedVersion": { + "type": "string" + }, + "buildTime": { + "type": "string" + }, + "author": { + "type": "string" + }, + "format": { + "type": "string" + }, + "rwRootFS": { + "type": "boolean" + }, + "swapDevice": { + "type": "integer" + }, + "rootDevice": { + "type": "integer" + }, + "videoMode": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "architecture", + "version" + ] + }, + "LinuxKernelModuleMetadata": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "sourceVersion": { + "type": "string" + }, + "path": { + "type": "string" + }, + "description": { + "type": "string" + }, + "author": { + "type": "string" + }, + "license": { + "type": "string" + }, + "kernelVersion": { + "type": "string" + }, + "versionMagic": { + "type": "string" + }, + "parameters": { + "patternProperties": { + ".*": { + "$ref": "#/$defs/LinuxKernelModuleParameter" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "LinuxKernelModuleParameter": { + "properties": { + "type": { + "type": "string" + }, + "description": { + "type": "string" + } + }, + "type": "object" + }, + "LinuxRelease": { + "properties": { + "prettyName": { + "type": "string" + }, + "name": { + "type": "string" + }, + "id": { + "type": "string" + }, + "idLike": { + "$ref": "#/$defs/IDLikes" + }, + "version": { + "type": "string" + }, + "versionID": { + "type": "string" + }, + "versionCodename": { + "type": "string" + }, + "buildID": { + "type": "string" + }, + "imageID": { + "type": "string" + }, + "imageVersion": { + "type": "string" + }, + "variant": { + "type": "string" + }, + "variantID": { + "type": "string" + }, + "homeURL": { + "type": "string" + }, + "supportURL": { + "type": "string" + }, + "bugReportURL": { + "type": "string" + }, + "privacyPolicyURL": { + "type": "string" + }, + "cpeName": { + "type": "string" + }, + "supportEnd": { + "type": "string" + } + }, + "type": "object" + }, + "Location": { + "properties": { + "path": { + "type": "string" + }, + "layerID": { + "type": "string" + }, + "annotations": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object", + "required": [ + "path" + ] + }, + "MixLockMetadata": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "pkgHash": { + "type": "string" + }, + "pkgHashExt": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version", + "pkgHash", + "pkgHashExt" + ] + }, + "NixStoreMetadata": { + "properties": { + "outputHash": { + "type": "string" + }, + "output": { + "type": "string" + }, + "files": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "outputHash", + "files" + ] + }, + "NpmPackageJSONMetadata": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "author": { + "type": "string" + }, + "homepage": { + "type": "string" + }, + "description": { + "type": "string" + }, + "url": { + "type": "string" + }, + "private": { + "type": "boolean" + } + }, + "type": "object", + "required": [ + "name", + "version", + "author", + "homepage", + "description", + "url", + "private" + ] + }, + "NpmPackageLockJSONMetadata": { + "properties": { + "resolved": { + "type": "string" + }, + "integrity": { + "type": "string" + } + }, + "type": "object", + "required": [ + "resolved", + "integrity" + ] + }, + "Package": { + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "type": { + "type": "string" + }, + "foundBy": { + "type": "string" + }, + "locations": { + "items": { + "$ref": "#/$defs/Location" + }, + "type": "array" + }, + "licenses": { + "$ref": "#/$defs/licenses" + }, + "language": { + "type": "string" + }, + "cpes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "purl": { + "type": "string" + }, + "metadataType": { + "type": "string" + }, + "metadata": { + "anyOf": [ + { + "type": "null" + }, + { + "$ref": "#/$defs/AlpmMetadata" + }, + { + "$ref": "#/$defs/ApkMetadata" + }, + { + "$ref": "#/$defs/BinaryMetadata" + }, + { + "$ref": "#/$defs/CargoPackageMetadata" + }, + { + "$ref": "#/$defs/CocoapodsMetadata" + }, + { + "$ref": "#/$defs/ConanLockMetadata" + }, + { + "$ref": "#/$defs/ConanMetadata" + }, + { + "$ref": "#/$defs/DartPubMetadata" + }, + { + "$ref": "#/$defs/DotnetDepsMetadata" + }, + { + "$ref": "#/$defs/DpkgMetadata" + }, + { + "$ref": "#/$defs/GemMetadata" + }, + { + "$ref": "#/$defs/GolangBinMetadata" + }, + { + "$ref": "#/$defs/GolangModMetadata" + }, + { + "$ref": "#/$defs/HackageMetadata" + }, + { + "$ref": "#/$defs/JavaMetadata" + }, + { + "$ref": "#/$defs/KbPackageMetadata" + }, + { + "$ref": "#/$defs/LinuxKernelMetadata" + }, + { + "$ref": "#/$defs/LinuxKernelModuleMetadata" + }, + { + "$ref": "#/$defs/MixLockMetadata" + }, + { + "$ref": "#/$defs/NixStoreMetadata" + }, + { + "$ref": "#/$defs/NpmPackageJSONMetadata" + }, + { + "$ref": "#/$defs/NpmPackageLockJSONMetadata" + }, + { + "$ref": "#/$defs/PhpComposerJSONMetadata" + }, + { + "$ref": "#/$defs/PortageMetadata" + }, + { + "$ref": "#/$defs/PythonPackageMetadata" + }, + { + "$ref": "#/$defs/PythonPipfileLockMetadata" + }, + { + "$ref": "#/$defs/PythonRequirementsMetadata" + }, + { + "$ref": "#/$defs/RDescriptionFileMetadata" + }, + { + "$ref": "#/$defs/RebarLockMetadata" + }, + { + "$ref": "#/$defs/RpmMetadata" + } + ] + } + }, + "type": "object", + "required": [ + "id", + "name", + "version", + "type", + "foundBy", + "locations", + "licenses", + "language", + "cpes", + "purl" + ] + }, + "PhpComposerAuthors": { + "properties": { + "name": { + "type": "string" + }, + "email": { + "type": "string" + }, + "homepage": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name" + ] + }, + "PhpComposerExternalReference": { + "properties": { + "type": { + "type": "string" + }, + "url": { + "type": "string" + }, + "reference": { + "type": "string" + }, + "shasum": { + "type": "string" + } + }, + "type": "object", + "required": [ + "type", + "url", + "reference" + ] + }, + "PhpComposerJSONMetadata": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "source": { + "$ref": "#/$defs/PhpComposerExternalReference" + }, + "dist": { + "$ref": "#/$defs/PhpComposerExternalReference" + }, + "require": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "provide": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "require-dev": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "suggest": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + }, + "license": { + "items": { + "type": "string" + }, + "type": "array" + }, + "type": { + "type": "string" + }, + "notification-url": { + "type": "string" + }, + "bin": { + "items": { + "type": "string" + }, + "type": "array" + }, + "authors": { + "items": { + "$ref": "#/$defs/PhpComposerAuthors" + }, + "type": "array" + }, + "description": { + "type": "string" + }, + "homepage": { + "type": "string" + }, + "keywords": { + "items": { + "type": "string" + }, + "type": "array" + }, + "time": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version", + "source", + "dist" + ] + }, + "PomParent": { + "properties": { + "groupId": { + "type": "string" + }, + "artifactId": { + "type": "string" + }, + "version": { + "type": "string" + } + }, + "type": "object", + "required": [ + "groupId", + "artifactId", + "version" + ] + }, + "PomProject": { + "properties": { + "path": { + "type": "string" + }, + "parent": { + "$ref": "#/$defs/PomParent" + }, + "groupId": { + "type": "string" + }, + "artifactId": { + "type": "string" + }, + "version": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "type": "object", + "required": [ + "path", + "groupId", + "artifactId", + "version", + "name" + ] + }, + "PomProperties": { + "properties": { + "path": { + "type": "string" + }, + "name": { + "type": "string" + }, + "groupId": { + "type": "string" + }, + "artifactId": { + "type": "string" + }, + "version": { + "type": "string" + }, + "scope": { + "type": "string" + }, + "extraFields": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object", + "required": [ + "path", + "name", + "groupId", + "artifactId", + "version" + ] + }, + "PortageFileRecord": { + "properties": { + "path": { + "type": "string" + }, + "digest": { + "$ref": "#/$defs/Digest" + } + }, + "type": "object", + "required": [ + "path" + ] + }, + "PortageMetadata": { + "properties": { + "installedSize": { + "type": "integer" + }, + "files": { + "items": { + "$ref": "#/$defs/PortageFileRecord" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "installedSize", + "files" + ] + }, + "PythonDirectURLOriginInfo": { + "properties": { + "url": { + "type": "string" + }, + "commitId": { + "type": "string" + }, + "vcs": { + "type": "string" + } + }, + "type": "object", + "required": [ + "url" + ] + }, + "PythonFileDigest": { + "properties": { + "algorithm": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object", + "required": [ + "algorithm", + "value" + ] + }, + "PythonFileRecord": { + "properties": { + "path": { + "type": "string" + }, + "digest": { + "$ref": "#/$defs/PythonFileDigest" + }, + "size": { + "type": "string" + } + }, + "type": "object", + "required": [ + "path" + ] + }, + "PythonPackageMetadata": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "author": { + "type": "string" + }, + "authorEmail": { + "type": "string" + }, + "platform": { + "type": "string" + }, + "files": { + "items": { + "$ref": "#/$defs/PythonFileRecord" + }, + "type": "array" + }, + "sitePackagesRootPath": { + "type": "string" + }, + "topLevelPackages": { + "items": { + "type": "string" + }, + "type": "array" + }, + "directUrlOrigin": { + "$ref": "#/$defs/PythonDirectURLOriginInfo" + } + }, + "type": "object", + "required": [ + "name", + "version", + "author", + "authorEmail", + "platform", + "sitePackagesRootPath" + ] + }, + "PythonPipfileLockMetadata": { + "properties": { + "hashes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "index": { + "type": "string" + } + }, + "type": "object", + "required": [ + "hashes", + "index" + ] + }, + "PythonRequirementsMetadata": { + "properties": { + "name": { + "type": "string" + }, + "extras": { + "items": { + "type": "string" + }, + "type": "array" + }, + "versionConstraint": { + "type": "string" + }, + "url": { + "type": "string" + }, + "markers": { + "patternProperties": { + ".*": { + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object", + "required": [ + "name", + "extras", + "versionConstraint", + "url", + "markers" + ] + }, + "RDescriptionFileMetadata": { + "properties": { + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "author": { + "type": "string" + }, + "maintainer": { + "type": "string" + }, + "url": { + "items": { + "type": "string" + }, + "type": "array" + }, + "repository": { + "type": "string" + }, + "built": { + "type": "string" + }, + "needsCompilation": { + "type": "boolean" + }, + "imports": { + "items": { + "type": "string" + }, + "type": "array" + }, + "depends": { + "items": { + "type": "string" + }, + "type": "array" + }, + "suggests": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "RebarLockMetadata": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "pkgHash": { + "type": "string" + }, + "pkgHashExt": { + "type": "string" + } + }, + "type": "object", + "required": [ + "name", + "version", + "pkgHash", + "pkgHashExt" + ] + }, + "Relationship": { + "properties": { + "parent": { + "type": "string" + }, + "child": { + "type": "string" + }, + "type": { + "type": "string" + }, + "metadata": true + }, + "type": "object", + "required": [ + "parent", + "child", + "type" + ] + }, + "RpmMetadata": { + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "epoch": { + "oneOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ] + }, + "architecture": { + "type": "string" + }, + "release": { + "type": "string" + }, + "sourceRpm": { + "type": "string" + }, + "size": { + "type": "integer" + }, + "vendor": { + "type": "string" + }, + "modularityLabel": { + "type": "string" + }, + "files": { + "items": { + "$ref": "#/$defs/RpmdbFileRecord" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "name", + "version", + "epoch", + "architecture", + "release", + "sourceRpm", + "size", + "vendor", + "modularityLabel", + "files" + ] + }, + "RpmdbFileRecord": { + "properties": { + "path": { + "type": "string" + }, + "mode": { + "type": "integer" + }, + "size": { + "type": "integer" + }, + "digest": { + "$ref": "#/$defs/Digest" + }, + "userName": { + "type": "string" + }, + "groupName": { + "type": "string" + }, + "flags": { + "type": "string" + } + }, + "type": "object", + "required": [ + "path", + "mode", + "size", + "digest", + "userName", + "groupName", + "flags" + ] + }, + "Schema": { + "properties": { + "version": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "type": "object", + "required": [ + "version", + "url" + ] + }, + "SearchResult": { + "properties": { + "classification": { + "type": "string" + }, + "lineNumber": { + "type": "integer" + }, + "lineOffset": { + "type": "integer" + }, + "seekPosition": { + "type": "integer" + }, + "length": { + "type": "integer" + }, + "value": { + "type": "string" + } + }, + "type": "object", + "required": [ + "classification", + "lineNumber", + "lineOffset", + "seekPosition", + "length" + ] + }, + "Secrets": { + "properties": { + "location": { + "$ref": "#/$defs/Coordinates" + }, + "secrets": { + "items": { + "$ref": "#/$defs/SearchResult" + }, + "type": "array" + } + }, + "type": "object", + "required": [ + "location", + "secrets" + ] + }, + "Source": { + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "target": true + }, + "type": "object", + "required": [ + "id", + "type", + "target" + ] + }, + "licenses": { + "items": { + "$ref": "#/$defs/License" + }, + "type": "array" + } + } +} diff --git a/syft/event/parsers/parsers.go b/syft/event/parsers/parsers.go index e7a3d703d4f..3d0c8bfb85b 100644 --- a/syft/event/parsers/parsers.go +++ b/syft/event/parsers/parsers.go @@ -12,7 +12,7 @@ import ( "github.com/anchore/syft/syft/event" "github.com/anchore/syft/syft/event/monitor" - "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/file/cataloger/secrets" "github.com/anchore/syft/syft/pkg/cataloger" ) @@ -54,12 +54,12 @@ func ParsePackageCatalogerStarted(e partybus.Event) (*cataloger.Monitor, error) return &monitor, nil } -func ParseSecretsCatalogingStarted(e partybus.Event) (*file.SecretsMonitor, error) { +func ParseSecretsCatalogingStarted(e partybus.Event) (*secrets.Monitor, error) { if err := checkEventType(e.Type, event.SecretsCatalogerStarted); err != nil { return nil, err } - monitor, ok := e.Value.(file.SecretsMonitor) + monitor, ok := e.Value.(secrets.Monitor) if !ok { return nil, newPayloadErr(e.Type, "Value", e.Value) } diff --git a/syft/file/contents_cataloger.go b/syft/file/cataloger/filecontent/cataloger.go similarity index 71% rename from syft/file/contents_cataloger.go rename to syft/file/cataloger/filecontent/cataloger.go index b4d7802a63d..d108af3931f 100644 --- a/syft/file/contents_cataloger.go +++ b/syft/file/cataloger/filecontent/cataloger.go @@ -1,4 +1,4 @@ -package file +package filecontent import ( "bytes" @@ -8,24 +8,26 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) -type ContentsCataloger struct { +// Deprecated: will be removed in syft v1.0.0 +type Cataloger struct { globs []string skipFilesAboveSizeInBytes int64 } -func NewContentsCataloger(globs []string, skipFilesAboveSize int64) (*ContentsCataloger, error) { - return &ContentsCataloger{ +// Deprecated: will be removed in syft v1.0.0 +func NewCataloger(globs []string, skipFilesAboveSize int64) (*Cataloger, error) { + return &Cataloger{ globs: globs, skipFilesAboveSizeInBytes: skipFilesAboveSize, }, nil } -func (i *ContentsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates]string, error) { - results := make(map[source.Coordinates]string) - var locations []source.Location +func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates]string, error) { + results := make(map[file.Coordinates]string) + var locations []file.Location locations, err := resolver.FilesByGlob(i.globs...) if err != nil { @@ -37,7 +39,7 @@ func (i *ContentsCataloger) Catalog(resolver source.FileResolver) (map[source.Co return nil, err } - if i.skipFilesAboveSizeInBytes > 0 && metadata.Size > i.skipFilesAboveSizeInBytes { + if i.skipFilesAboveSizeInBytes > 0 && metadata.Size() > i.skipFilesAboveSizeInBytes { continue } @@ -56,7 +58,7 @@ func (i *ContentsCataloger) Catalog(resolver source.FileResolver) (map[source.Co return results, nil } -func (i *ContentsCataloger) catalogLocation(resolver source.FileResolver, location source.Location) (string, error) { +func (i *Cataloger) catalogLocation(resolver file.Resolver, location file.Location) (string, error) { contentReader, err := resolver.FileContentsByLocation(location) if err != nil { return "", err diff --git a/syft/file/cataloger/filecontent/cataloger_test.go b/syft/file/cataloger/filecontent/cataloger_test.go new file mode 100644 index 00000000000..719bdd48183 --- /dev/null +++ b/syft/file/cataloger/filecontent/cataloger_test.go @@ -0,0 +1,80 @@ +package filecontent + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/anchore/syft/syft/file" +) + +func TestContentsCataloger(t *testing.T) { + allFiles := []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"} + + tests := []struct { + name string + globs []string + maxSize int64 + files []string + expected map[file.Coordinates]string + }{ + { + name: "multi-pattern", + globs: []string{"test-fixtures/last/*.txt", "test-fixtures/*.txt"}, + files: allFiles, + expected: map[file.Coordinates]string{ + file.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + }, + }, + { + name: "no-patterns", + globs: []string{}, + files: []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"}, + expected: map[file.Coordinates]string{}, + }, + { + name: "all-txt", + globs: []string{"**/*.txt"}, + files: allFiles, + expected: map[file.Coordinates]string{ + file.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + }, + }, + { + name: "subpath", + globs: []string{"test-fixtures/*.txt"}, + files: allFiles, + expected: map[file.Coordinates]string{ + file.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + }, + }, + { + name: "size-filter", + maxSize: 42, + globs: []string{"**/*.txt"}, + files: allFiles, + expected: map[file.Coordinates]string{ + file.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + c, err := NewCataloger(test.globs, test.maxSize) + assert.NoError(t, err) + + resolver := file.NewMockResolverForPaths(test.files...) + actual, err := c.Catalog(resolver) + assert.NoError(t, err) + assert.Equal(t, test.expected, actual, "mismatched contents") + + }) + } +} diff --git a/syft/file/test-fixtures/a-path.txt b/syft/file/cataloger/filecontent/test-fixtures/a-path.txt similarity index 100% rename from syft/file/test-fixtures/a-path.txt rename to syft/file/cataloger/filecontent/test-fixtures/a-path.txt diff --git a/syft/file/test-fixtures/another-path.txt b/syft/file/cataloger/filecontent/test-fixtures/another-path.txt similarity index 100% rename from syft/file/test-fixtures/another-path.txt rename to syft/file/cataloger/filecontent/test-fixtures/another-path.txt diff --git a/syft/file/test-fixtures/last/empty/empty b/syft/file/cataloger/filecontent/test-fixtures/last/empty/empty similarity index 100% rename from syft/file/test-fixtures/last/empty/empty rename to syft/file/cataloger/filecontent/test-fixtures/last/empty/empty diff --git a/syft/file/test-fixtures/last/path.txt b/syft/file/cataloger/filecontent/test-fixtures/last/path.txt similarity index 100% rename from syft/file/test-fixtures/last/path.txt rename to syft/file/cataloger/filecontent/test-fixtures/last/path.txt diff --git a/syft/file/cataloger/filedigest/cataloger.go b/syft/file/cataloger/filedigest/cataloger.go new file mode 100644 index 00000000000..e06c05a3514 --- /dev/null +++ b/syft/file/cataloger/filedigest/cataloger.go @@ -0,0 +1,109 @@ +package filedigest + +import ( + "crypto" + "errors" + + "github.com/wagoodman/go-partybus" + "github.com/wagoodman/go-progress" + + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" + "github.com/anchore/syft/internal" + "github.com/anchore/syft/internal/bus" + "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/event" + "github.com/anchore/syft/syft/file" + internal2 "github.com/anchore/syft/syft/file/cataloger/internal" +) + +var ErrUndigestableFile = errors.New("undigestable file") + +type Cataloger struct { + hashes []crypto.Hash +} + +func NewCataloger(hashes []crypto.Hash) *Cataloger { + return &Cataloger{ + hashes: hashes, + } +} + +func (i *Cataloger) Catalog(resolver file.Resolver, coordinates ...file.Coordinates) (map[file.Coordinates][]file.Digest, error) { + results := make(map[file.Coordinates][]file.Digest) + var locations []file.Location + + if len(coordinates) == 0 { + locations = internal2.AllRegularFiles(resolver) + } else { + for _, c := range coordinates { + locations = append(locations, file.NewLocationFromCoordinates(c)) + } + } + + stage, prog := digestsCatalogingProgress(int64(len(locations))) + for _, location := range locations { + stage.Current = location.RealPath + result, err := i.catalogLocation(resolver, location) + + if errors.Is(err, ErrUndigestableFile) { + continue + } + + if internal.IsErrPathPermission(err) { + log.Debugf("file digests cataloger skipping %q: %+v", location.RealPath, err) + continue + } + + if err != nil { + return nil, err + } + prog.Increment() + results[location.Coordinates] = result + } + log.Debugf("file digests cataloger processed %d files", prog.Current()) + prog.SetCompleted() + return results, nil +} + +func (i *Cataloger) catalogLocation(resolver file.Resolver, location file.Location) ([]file.Digest, error) { + meta, err := resolver.FileMetadataByLocation(location) + if err != nil { + return nil, err + } + + // we should only attempt to report digests for files that are regular files (don't attempt to resolve links) + if meta.Type != stereoscopeFile.TypeRegular { + return nil, ErrUndigestableFile + } + + contentReader, err := resolver.FileContentsByLocation(location) + if err != nil { + return nil, err + } + defer internal.CloseAndLogError(contentReader, location.VirtualPath) + + digests, err := file.NewDigestsFromFile(contentReader, i.hashes) + if err != nil { + return nil, internal.ErrPath{Context: "digests-cataloger", Path: location.RealPath, Err: err} + } + + return digests, nil +} + +func digestsCatalogingProgress(locations int64) (*progress.Stage, *progress.Manual) { + stage := &progress.Stage{} + prog := progress.NewManual(locations) + + bus.Publish(partybus.Event{ + Type: event.FileDigestsCatalogerStarted, + Value: struct { + progress.Stager + progress.Progressable + }{ + Stager: progress.Stager(stage), + Progressable: prog, + }, + }) + + return stage, prog +} diff --git a/syft/file/digest_cataloger_test.go b/syft/file/cataloger/filedigest/cataloger_test.go similarity index 75% rename from syft/file/digest_cataloger_test.go rename to syft/file/cataloger/filedigest/cataloger_test.go index bf548ccc70e..ed8562cbd38 100644 --- a/syft/file/digest_cataloger_test.go +++ b/syft/file/cataloger/filedigest/cataloger_test.go @@ -1,4 +1,4 @@ -package file +package filedigest import ( "crypto" @@ -11,13 +11,14 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/imagetest" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/source" ) -func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Hash) map[source.Coordinates][]Digest { - digests := make(map[source.Coordinates][]Digest) +func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Hash) map[file.Coordinates][]file.Digest { + digests := make(map[file.Coordinates][]file.Digest) for _, f := range files { fh, err := os.Open(filepath.Join(root, f)) @@ -29,11 +30,17 @@ func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Has t.Fatalf("could not read %q : %+v", f, err) } + if len(b) == 0 { + // we don't keep digests for empty files + digests[file.NewLocation(f).Coordinates] = []file.Digest{} + continue + } + for _, hash := range hashes { h := hash.New() h.Write(b) - digests[source.NewLocation(f).Coordinates] = append(digests[source.NewLocation(f).Coordinates], Digest{ - Algorithm: CleanDigestAlgorithmName(hash.String()), + digests[file.NewLocation(f).Coordinates] = append(digests[file.NewLocation(f).Coordinates], file.Digest{ + Algorithm: file.CleanDigestAlgorithmName(hash.String()), Value: fmt.Sprintf("%x", h.Sum(nil)), }) } @@ -48,7 +55,7 @@ func TestDigestsCataloger(t *testing.T) { name string digests []crypto.Hash files []string - expected map[source.Coordinates][]Digest + expected map[file.Coordinates][]file.Digest }{ { name: "md5", @@ -66,8 +73,7 @@ func TestDigestsCataloger(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - c, err := NewDigestsCataloger(test.digests) - require.NoError(t, err) + c := NewCataloger(test.digests) src, err := source.NewFromDirectory("test-fixtures/last/") require.NoError(t, err) @@ -86,11 +92,7 @@ func TestDigestsCataloger(t *testing.T) { func TestDigestsCataloger_MixFileTypes(t *testing.T) { testImage := "image-file-type-mix" - if *updateImageGoldenFiles { - imagetest.UpdateGoldenFixtureImage(t, testImage) - } - - img := imagetest.GetGoldenFixtureImage(t, testImage) + img := imagetest.GetFixtureImage(t, "docker-archive", testImage) src, err := source.NewFromImage(img, "---") if err != nil { @@ -110,9 +112,10 @@ func TestDigestsCataloger_MixFileTypes(t *testing.T) { path: "/file-1.txt", expected: "888c139e550867814eb7c33b84d76e4d", }, - { - path: "/hardlink-1", - }, + // this is difficult to reproduce in a cross-platform way + //{ + // path: "/hardlink-1", + //}, { path: "/symlink-1", }, @@ -132,21 +135,18 @@ func TestDigestsCataloger_MixFileTypes(t *testing.T) { for _, test := range tests { t.Run(test.path, func(t *testing.T) { - c, err := NewDigestsCataloger([]crypto.Hash{crypto.MD5}) - if err != nil { - t.Fatalf("unable to get cataloger: %+v", err) - } + c := NewCataloger([]crypto.Hash{crypto.MD5}) actual, err := c.Catalog(resolver) if err != nil { t.Fatalf("could not catalog: %+v", err) } - _, ref, err := img.SquashedTree().File(file.Path(test.path)) + _, ref, err := img.SquashedTree().File(stereoscopeFile.Path(test.path)) if err != nil { t.Fatalf("unable to get file=%q : %+v", test.path, err) } - l := source.NewLocationFromImage(test.path, *ref.Reference, img) + l := file.NewLocationFromImage(test.path, *ref.Reference, img) if len(actual[l.Coordinates]) == 0 { if test.expected != "" { diff --git a/syft/file/test-fixtures/image-file-type-mix/Dockerfile b/syft/file/cataloger/filedigest/test-fixtures/image-file-type-mix/Dockerfile similarity index 74% rename from syft/file/test-fixtures/image-file-type-mix/Dockerfile rename to syft/file/cataloger/filedigest/test-fixtures/image-file-type-mix/Dockerfile index c2d61ef4da9..6ede1b68da8 100644 --- a/syft/file/test-fixtures/image-file-type-mix/Dockerfile +++ b/syft/file/cataloger/filedigest/test-fixtures/image-file-type-mix/Dockerfile @@ -1,4 +1,4 @@ -FROM busybox:latest +FROM busybox:1.28.1@sha256:c7b0a24019b0e6eda714ec0fa137ad42bc44a754d9cea17d14fba3a80ccc1ee4 ADD file-1.txt . RUN chmod 644 file-1.txt diff --git a/syft/file/test-fixtures/image-file-type-mix/file-1.txt b/syft/file/cataloger/filedigest/test-fixtures/image-file-type-mix/file-1.txt similarity index 100% rename from syft/file/test-fixtures/image-file-type-mix/file-1.txt rename to syft/file/cataloger/filedigest/test-fixtures/image-file-type-mix/file-1.txt diff --git a/syft/source/test-fixtures/symlinks-base/base b/syft/file/cataloger/filedigest/test-fixtures/last/empty/empty similarity index 100% rename from syft/source/test-fixtures/symlinks-base/base rename to syft/file/cataloger/filedigest/test-fixtures/last/empty/empty diff --git a/syft/file/cataloger/filedigest/test-fixtures/last/path.txt b/syft/file/cataloger/filedigest/test-fixtures/last/path.txt new file mode 100644 index 00000000000..3d4a165ab88 --- /dev/null +++ b/syft/file/cataloger/filedigest/test-fixtures/last/path.txt @@ -0,0 +1 @@ +test-fixtures/last/path.txt file contents! \ No newline at end of file diff --git a/syft/file/metadata_cataloger.go b/syft/file/cataloger/filemetadata/cataloger.go similarity index 59% rename from syft/file/metadata_cataloger.go rename to syft/file/cataloger/filemetadata/cataloger.go index 44d46f030ce..bae2344f479 100644 --- a/syft/file/metadata_cataloger.go +++ b/syft/file/cataloger/filemetadata/cataloger.go @@ -1,4 +1,4 @@ -package file +package filemetadata import ( "github.com/wagoodman/go-partybus" @@ -7,24 +7,37 @@ import ( "github.com/anchore/syft/internal/bus" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/event" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) -type MetadataCataloger struct { +type Cataloger struct { } -func NewMetadataCataloger() *MetadataCataloger { - return &MetadataCataloger{} +func NewCataloger() *Cataloger { + return &Cataloger{} } -func (i *MetadataCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates]source.FileMetadata, error) { - results := make(map[source.Coordinates]source.FileMetadata) - var locations []source.Location - for location := range resolver.AllLocations() { - locations = append(locations, location) +func (i *Cataloger) Catalog(resolver file.Resolver, coordinates ...file.Coordinates) (map[file.Coordinates]file.Metadata, error) { + results := make(map[file.Coordinates]file.Metadata) + var locations <-chan file.Location + + if len(coordinates) == 0 { + locations = resolver.AllLocations() + } else { + locations = func() <-chan file.Location { + ch := make(chan file.Location) + go func() { + close(ch) + for _, c := range coordinates { + ch <- file.NewLocationFromCoordinates(c) + } + }() + return ch + }() } + stage, prog := metadataCatalogingProgress(int64(len(locations))) - for _, location := range locations { + for location := range locations { stage.Current = location.RealPath metadata, err := resolver.FileMetadataByLocation(location) if err != nil { diff --git a/syft/file/cataloger/filemetadata/cataloger_test.go b/syft/file/cataloger/filemetadata/cataloger_test.go new file mode 100644 index 00000000000..99dfa908a9f --- /dev/null +++ b/syft/file/cataloger/filemetadata/cataloger_test.go @@ -0,0 +1,158 @@ +package filemetadata + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" + "github.com/anchore/stereoscope/pkg/imagetest" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/source" +) + +func TestFileMetadataCataloger(t *testing.T) { + testImage := "image-file-type-mix" + + img := imagetest.GetFixtureImage(t, "docker-archive", testImage) + + c := NewCataloger() + + src, err := source.NewFromImage(img, "---") + if err != nil { + t.Fatalf("could not create source: %+v", err) + } + + resolver, err := src.FileResolver(source.SquashedScope) + if err != nil { + t.Fatalf("could not create resolver: %+v", err) + } + + actual, err := c.Catalog(resolver) + if err != nil { + t.Fatalf("could not catalog: %+v", err) + } + + tests := []struct { + path string + exists bool + expected file.Metadata + err bool + }{ + // note: it is difficult to add a hardlink-based test in a cross-platform way and is already covered well in stereoscope + { + path: "/file-1.txt", + exists: true, + expected: file.Metadata{ + FileInfo: stereoscopeFile.ManualInfo{ + NameValue: "file-1.txt", + ModeValue: 0644, + SizeValue: 7, + }, + Path: "/file-1.txt", + Type: stereoscopeFile.TypeRegular, + UserID: 1, + GroupID: 2, + MIMEType: "text/plain", + }, + }, + { + path: "/symlink-1", + exists: true, + expected: file.Metadata{ + Path: "/symlink-1", + FileInfo: stereoscopeFile.ManualInfo{ + NameValue: "symlink-1", + ModeValue: 0777 | os.ModeSymlink, + }, + Type: stereoscopeFile.TypeSymLink, + LinkDestination: "file-1.txt", + UserID: 0, + GroupID: 0, + MIMEType: "", + }, + }, + { + path: "/char-device-1", + exists: true, + expected: file.Metadata{ + Path: "/char-device-1", + FileInfo: stereoscopeFile.ManualInfo{ + NameValue: "char-device-1", + ModeValue: 0644 | os.ModeDevice | os.ModeCharDevice, + }, + Type: stereoscopeFile.TypeCharacterDevice, + UserID: 0, + GroupID: 0, + MIMEType: "", + }, + }, + { + path: "/block-device-1", + exists: true, + expected: file.Metadata{ + Path: "/block-device-1", + FileInfo: stereoscopeFile.ManualInfo{ + NameValue: "block-device-1", + ModeValue: 0644 | os.ModeDevice, + }, + Type: stereoscopeFile.TypeBlockDevice, + UserID: 0, + GroupID: 0, + MIMEType: "", + }, + }, + { + path: "/fifo-1", + exists: true, + expected: file.Metadata{ + Path: "/fifo-1", + FileInfo: stereoscopeFile.ManualInfo{ + NameValue: "fifo-1", + ModeValue: 0644 | os.ModeNamedPipe, + }, + Type: stereoscopeFile.TypeFIFO, + UserID: 0, + GroupID: 0, + MIMEType: "", + }, + }, + { + path: "/bin", + exists: true, + expected: file.Metadata{ + Path: "/bin", + FileInfo: stereoscopeFile.ManualInfo{ + NameValue: "bin", + ModeValue: 0755 | os.ModeDir, + }, + Type: stereoscopeFile.TypeDirectory, + UserID: 0, + GroupID: 0, + MIMEType: "", + }, + }, + } + + for _, test := range tests { + t.Run(test.path, func(t *testing.T) { + _, ref, err := img.SquashedTree().File(stereoscopeFile.Path(test.path)) + require.NoError(t, err) + + l := file.NewLocationFromImage(test.path, *ref.Reference, img) + + if _, ok := actual[l.Coordinates]; ok { + // we're not interested in keeping the test fixtures up to date with the latest file modification times + // thus ModTime is not under test + fi := test.expected.FileInfo.(stereoscopeFile.ManualInfo) + fi.ModTimeValue = actual[l.Coordinates].ModTime() + test.expected.FileInfo = fi + } + + assert.True(t, test.expected.Equal(actual[l.Coordinates])) + }) + } + +} diff --git a/syft/file/cataloger/filemetadata/test-fixtures/image-file-type-mix/Dockerfile b/syft/file/cataloger/filemetadata/test-fixtures/image-file-type-mix/Dockerfile new file mode 100644 index 00000000000..6ede1b68da8 --- /dev/null +++ b/syft/file/cataloger/filemetadata/test-fixtures/image-file-type-mix/Dockerfile @@ -0,0 +1,13 @@ +FROM busybox:1.28.1@sha256:c7b0a24019b0e6eda714ec0fa137ad42bc44a754d9cea17d14fba3a80ccc1ee4 + +ADD file-1.txt . +RUN chmod 644 file-1.txt +RUN chown 1:2 file-1.txt +RUN ln -s file-1.txt symlink-1 +# note: hard links may behave inconsistently, this should be a golden image +RUN ln file-1.txt hardlink-1 +RUN mknod char-device-1 c 89 1 +RUN mknod block-device-1 b 0 1 +RUN mknod fifo-1 p +RUN mkdir /dir +RUN rm -rf home etc/group etc/localtime etc/mtab etc/network etc/passwd etc/shadow var usr bin/* \ No newline at end of file diff --git a/syft/source/test-fixtures/image-symlinks/file-1.txt b/syft/file/cataloger/filemetadata/test-fixtures/image-file-type-mix/file-1.txt similarity index 100% rename from syft/source/test-fixtures/image-symlinks/file-1.txt rename to syft/file/cataloger/filemetadata/test-fixtures/image-file-type-mix/file-1.txt diff --git a/syft/file/all_regular_files.go b/syft/file/cataloger/internal/all_regular_files.go similarity index 70% rename from syft/file/all_regular_files.go rename to syft/file/cataloger/internal/all_regular_files.go index 5dcf8974430..ccc1b3813ba 100644 --- a/syft/file/all_regular_files.go +++ b/syft/file/cataloger/internal/all_regular_files.go @@ -1,12 +1,12 @@ -package file +package internal import ( - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/syft/internal/log" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) -func allRegularFiles(resolver source.FileResolver) (locations []source.Location) { +func AllRegularFiles(resolver file.Resolver) (locations []file.Location) { for location := range resolver.AllLocations() { resolvedLocations, err := resolver.FilesByPath(location.RealPath) if err != nil { @@ -21,7 +21,7 @@ func allRegularFiles(resolver source.FileResolver) (locations []source.Location) continue } - if metadata.Type != file.TypeRegular { + if metadata.Type != stereoscopeFile.TypeRegular { continue } locations = append(locations, resolvedLocation) diff --git a/syft/file/all_regular_files_test.go b/syft/file/cataloger/internal/all_regular_files_test.go similarity index 78% rename from syft/file/all_regular_files_test.go rename to syft/file/cataloger/internal/all_regular_files_test.go index 096480721a5..714e733e689 100644 --- a/syft/file/all_regular_files_test.go +++ b/syft/file/cataloger/internal/all_regular_files_test.go @@ -1,4 +1,4 @@ -package file +package internal import ( "testing" @@ -9,30 +9,23 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/stereoscope/pkg/imagetest" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/source" ) func Test_allRegularFiles(t *testing.T) { - type access struct { - realPath string - virtualPath string - } tests := []struct { name string - setup func() source.FileResolver + setup func() file.Resolver wantRealPaths *strset.Set wantVirtualPaths *strset.Set }{ { name: "image", - setup: func() source.FileResolver { + setup: func() file.Resolver { testImage := "image-file-type-mix" - if *updateImageGoldenFiles { - imagetest.UpdateGoldenFixtureImage(t, testImage) - } - - img := imagetest.GetGoldenFixtureImage(t, testImage) + img := imagetest.GetFixtureImage(t, "docker-archive", testImage) s, err := source.NewFromImage(img, "---") require.NoError(t, err) @@ -47,7 +40,7 @@ func Test_allRegularFiles(t *testing.T) { }, { name: "directory", - setup: func() source.FileResolver { + setup: func() file.Resolver { s, err := source.NewFromDirectory("test-fixtures/symlinked-root/nested/link-root") require.NoError(t, err) r, err := s.FileResolver(source.SquashedScope) @@ -61,7 +54,7 @@ func Test_allRegularFiles(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { resolver := tt.setup() - locations := allRegularFiles(resolver) + locations := AllRegularFiles(resolver) realLocations := strset.New() virtualLocations := strset.New() for _, l := range locations { @@ -70,6 +63,13 @@ func Test_allRegularFiles(t *testing.T) { virtualLocations.Add(l.VirtualPath) } } + + // this is difficult to reproduce in a cross-platform way + realLocations.Remove("/hardlink-1") + virtualLocations.Remove("/hardlink-1") + tt.wantRealPaths.Remove("/hardlink-1") + tt.wantVirtualPaths.Remove("/hardlink-1") + assert.ElementsMatch(t, tt.wantRealPaths.List(), realLocations.List(), "real paths differ: "+cmp.Diff(tt.wantRealPaths.List(), realLocations.List())) assert.ElementsMatch(t, tt.wantVirtualPaths.List(), virtualLocations.List(), "virtual paths differ: "+cmp.Diff(tt.wantVirtualPaths.List(), virtualLocations.List())) }) diff --git a/syft/file/cataloger/internal/test-fixtures/image-file-type-mix/Dockerfile b/syft/file/cataloger/internal/test-fixtures/image-file-type-mix/Dockerfile new file mode 100644 index 00000000000..6ede1b68da8 --- /dev/null +++ b/syft/file/cataloger/internal/test-fixtures/image-file-type-mix/Dockerfile @@ -0,0 +1,13 @@ +FROM busybox:1.28.1@sha256:c7b0a24019b0e6eda714ec0fa137ad42bc44a754d9cea17d14fba3a80ccc1ee4 + +ADD file-1.txt . +RUN chmod 644 file-1.txt +RUN chown 1:2 file-1.txt +RUN ln -s file-1.txt symlink-1 +# note: hard links may behave inconsistently, this should be a golden image +RUN ln file-1.txt hardlink-1 +RUN mknod char-device-1 c 89 1 +RUN mknod block-device-1 b 0 1 +RUN mknod fifo-1 p +RUN mkdir /dir +RUN rm -rf home etc/group etc/localtime etc/mtab etc/network etc/passwd etc/shadow var usr bin/* \ No newline at end of file diff --git a/syft/file/cataloger/internal/test-fixtures/image-file-type-mix/file-1.txt b/syft/file/cataloger/internal/test-fixtures/image-file-type-mix/file-1.txt new file mode 100644 index 00000000000..d86db8155c3 --- /dev/null +++ b/syft/file/cataloger/internal/test-fixtures/image-file-type-mix/file-1.txt @@ -0,0 +1 @@ +file 1! \ No newline at end of file diff --git a/syft/file/test-fixtures/symlinked-root/nested/link-root b/syft/file/cataloger/internal/test-fixtures/symlinked-root/nested/link-root similarity index 100% rename from syft/file/test-fixtures/symlinked-root/nested/link-root rename to syft/file/cataloger/internal/test-fixtures/symlinked-root/nested/link-root diff --git a/syft/file/test-fixtures/symlinked-root/real-root/file1.txt b/syft/file/cataloger/internal/test-fixtures/symlinked-root/real-root/file1.txt similarity index 100% rename from syft/file/test-fixtures/symlinked-root/real-root/file1.txt rename to syft/file/cataloger/internal/test-fixtures/symlinked-root/real-root/file1.txt diff --git a/syft/file/test-fixtures/symlinked-root/real-root/nested/file2.txt b/syft/file/cataloger/internal/test-fixtures/symlinked-root/real-root/nested/file2.txt similarity index 100% rename from syft/file/test-fixtures/symlinked-root/real-root/nested/file2.txt rename to syft/file/cataloger/internal/test-fixtures/symlinked-root/real-root/nested/file2.txt diff --git a/syft/file/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt b/syft/file/cataloger/internal/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt similarity index 100% rename from syft/file/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt rename to syft/file/cataloger/internal/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt diff --git a/syft/file/secrets_cataloger.go b/syft/file/cataloger/secrets/cataloger.go similarity index 80% rename from syft/file/secrets_cataloger.go rename to syft/file/cataloger/secrets/cataloger.go index d30e16068f3..488f849b53b 100644 --- a/syft/file/secrets_cataloger.go +++ b/syft/file/cataloger/secrets/cataloger.go @@ -1,4 +1,4 @@ -package file +package secrets import ( "bytes" @@ -14,7 +14,8 @@ import ( "github.com/anchore/syft/internal/bus" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/event" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" + internal2 "github.com/anchore/syft/syft/file/cataloger/internal" ) var DefaultSecretsPatterns = map[string]string{ @@ -25,23 +26,25 @@ var DefaultSecretsPatterns = map[string]string{ "generic-api-key": `(?i)api(-|_)?key["'=:\s]*?(?P[A-Z0-9]{20,60})["']?(\s|$)`, } -type SecretsCataloger struct { +// Deprecated: will be removed in syft v1.0.0 +type Cataloger struct { patterns map[string]*regexp.Regexp revealValues bool skipFilesAboveSize int64 } -func NewSecretsCataloger(patterns map[string]*regexp.Regexp, revealValues bool, maxFileSize int64) (*SecretsCataloger, error) { - return &SecretsCataloger{ +// Deprecated: will be removed in syft v1.0.0 +func NewCataloger(patterns map[string]*regexp.Regexp, revealValues bool, maxFileSize int64) (*Cataloger, error) { + return &Cataloger{ patterns: patterns, revealValues: revealValues, skipFilesAboveSize: maxFileSize, }, nil } -func (i *SecretsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]SearchResult, error) { - results := make(map[source.Coordinates][]SearchResult) - locations := allRegularFiles(resolver) +func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates][]file.SearchResult, error) { + results := make(map[file.Coordinates][]file.SearchResult) + locations := internal2.AllRegularFiles(resolver) stage, prog, secretsDiscovered := secretsCatalogingProgress(int64(len(locations))) for _, location := range locations { stage.Current = location.RealPath @@ -65,17 +68,17 @@ func (i *SecretsCataloger) Catalog(resolver source.FileResolver) (map[source.Coo return results, nil } -func (i *SecretsCataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]SearchResult, error) { +func (i *Cataloger) catalogLocation(resolver file.Resolver, location file.Location) ([]file.SearchResult, error) { metadata, err := resolver.FileMetadataByLocation(location) if err != nil { return nil, err } - if metadata.Size == 0 { + if metadata.Size() == 0 { return nil, nil } - if i.skipFilesAboveSize > 0 && metadata.Size > i.skipFilesAboveSize { + if i.skipFilesAboveSize > 0 && metadata.Size() > i.skipFilesAboveSize { return nil, nil } @@ -103,7 +106,7 @@ func (i *SecretsCataloger) catalogLocation(resolver source.FileResolver, locatio return secrets, nil } -func extractValue(resolver source.FileResolver, location source.Location, start, length int64) (string, error) { +func extractValue(resolver file.Resolver, location file.Location, start, length int64) (string, error) { readCloser, err := resolver.FileContentsByLocation(location) if err != nil { return "", fmt.Errorf("unable to fetch reader for location=%q : %w", location, err) @@ -130,7 +133,7 @@ func extractValue(resolver source.FileResolver, location source.Location, start, return buf.String(), nil } -type SecretsMonitor struct { +type Monitor struct { progress.Stager SecretsDiscovered progress.Monitorable progress.Progressable @@ -144,7 +147,7 @@ func secretsCatalogingProgress(locations int64) (*progress.Stage, *progress.Manu bus.Publish(partybus.Event{ Type: event.SecretsCatalogerStarted, Source: secretsDiscovered, - Value: SecretsMonitor{ + Value: Monitor{ Stager: progress.Stager(stage), SecretsDiscovered: secretsDiscovered, Progressable: prog, diff --git a/syft/file/secrets_cataloger_test.go b/syft/file/cataloger/secrets/cataloger_test.go similarity index 92% rename from syft/file/secrets_cataloger_test.go rename to syft/file/cataloger/secrets/cataloger_test.go index b2c55a9f3f0..2a44417ba5d 100644 --- a/syft/file/secrets_cataloger_test.go +++ b/syft/file/cataloger/secrets/cataloger_test.go @@ -1,4 +1,4 @@ -package file +package secrets import ( "regexp" @@ -6,8 +6,8 @@ import ( "github.com/stretchr/testify/assert" - "github.com/anchore/syft/internal/file" - "github.com/anchore/syft/syft/source" + intFile "github.com/anchore/syft/internal/file" + "github.com/anchore/syft/syft/file" ) func TestSecretsCataloger(t *testing.T) { @@ -17,7 +17,7 @@ func TestSecretsCataloger(t *testing.T) { reveal bool maxSize int64 patterns map[string]string - expected []SearchResult + expected []file.SearchResult constructorErr bool catalogErr bool }{ @@ -28,7 +28,7 @@ func TestSecretsCataloger(t *testing.T) { patterns: map[string]string{ "simple-secret-key": `^secret_key=.*`, }, - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "simple-secret-key", LineNumber: 2, @@ -46,7 +46,7 @@ func TestSecretsCataloger(t *testing.T) { patterns: map[string]string{ "simple-secret-key": `^secret_key=.*`, }, - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "simple-secret-key", LineNumber: 2, @@ -64,7 +64,7 @@ func TestSecretsCataloger(t *testing.T) { patterns: map[string]string{ "simple-secret-key": `^secret_key=(?P.*)`, }, - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "simple-secret-key", LineNumber: 2, @@ -82,7 +82,7 @@ func TestSecretsCataloger(t *testing.T) { patterns: map[string]string{ "simple-secret-key": `secret_key=.*`, }, - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "simple-secret-key", LineNumber: 1, @@ -125,7 +125,7 @@ func TestSecretsCataloger(t *testing.T) { patterns: map[string]string{ "simple-secret-key": `secret_key=(?P.*)`, }, - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "simple-secret-key", LineNumber: 1, @@ -176,7 +176,7 @@ func TestSecretsCataloger(t *testing.T) { regexObjs[name] = obj } - c, err := NewSecretsCataloger(regexObjs, test.reveal, test.maxSize) + c, err := NewCataloger(regexObjs, test.reveal, test.maxSize) if err != nil && !test.constructorErr { t.Fatalf("could not create cataloger (but should have been able to): %+v", err) } else if err == nil && test.constructorErr { @@ -185,7 +185,7 @@ func TestSecretsCataloger(t *testing.T) { return } - resolver := source.NewMockResolverForPaths(test.fixture) + resolver := file.NewMockResolverForPaths(test.fixture) actualResults, err := c.Catalog(resolver) if err != nil && !test.catalogErr { @@ -196,7 +196,7 @@ func TestSecretsCataloger(t *testing.T) { return } - loc := source.NewLocation(test.fixture) + loc := file.NewLocation(test.fixture) if _, exists := actualResults[loc.Coordinates]; !exists { t.Fatalf("could not find location=%q in results", loc) } @@ -214,11 +214,11 @@ func TestSecretsCataloger_DefaultSecrets(t *testing.T) { tests := []struct { fixture string - expected []SearchResult + expected []file.SearchResult }{ { fixture: "test-fixtures/secrets/default/aws.env", - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "aws-access-key", LineNumber: 2, @@ -239,7 +239,7 @@ func TestSecretsCataloger_DefaultSecrets(t *testing.T) { }, { fixture: "test-fixtures/secrets/default/aws.ini", - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "aws-access-key", LineNumber: 3, @@ -260,7 +260,7 @@ func TestSecretsCataloger_DefaultSecrets(t *testing.T) { }, { fixture: "test-fixtures/secrets/default/private-key.pem", - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "pem-private-key", LineNumber: 2, @@ -280,7 +280,7 @@ z3P668YfhUbKdRF6S42Cg6zn }, { fixture: "test-fixtures/secrets/default/private-key-openssl.pem", - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "pem-private-key", LineNumber: 2, @@ -302,7 +302,7 @@ z3P668YfhUbKdRF6S42Cg6zn // note: this test proves that the PEM regex matches the smallest possible match // since the test catches two adjacent secrets fixture: "test-fixtures/secrets/default/private-keys.pem", - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "pem-private-key", LineNumber: 1, @@ -345,7 +345,7 @@ j4f668YfhUbKdRF6S6734856 // 2. a named capture group with the correct line number and line offset case // 3. the named capture group is in a different line than the match start, and both the match start and the capture group have different line offsets fixture: "test-fixtures/secrets/default/docker-config.json", - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "docker-config-auth", LineNumber: 5, @@ -362,7 +362,7 @@ j4f668YfhUbKdRF6S6734856 }, { fixture: "test-fixtures/secrets/default/api-key.txt", - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "generic-api-key", LineNumber: 2, @@ -418,19 +418,19 @@ j4f668YfhUbKdRF6S6734856 for _, test := range tests { t.Run(test.fixture, func(t *testing.T) { - c, err := NewSecretsCataloger(regexObjs, true, 10*file.MB) + c, err := NewCataloger(regexObjs, true, 10*intFile.MB) if err != nil { t.Fatalf("could not create cataloger: %+v", err) } - resolver := source.NewMockResolverForPaths(test.fixture) + resolver := file.NewMockResolverForPaths(test.fixture) actualResults, err := c.Catalog(resolver) if err != nil { t.Fatalf("could not catalog: %+v", err) } - loc := source.NewLocation(test.fixture) + loc := file.NewLocation(test.fixture) if _, exists := actualResults[loc.Coordinates]; !exists && test.expected != nil { t.Fatalf("could not find location=%q in results", loc) } else if !exists && test.expected == nil { diff --git a/syft/file/generate_search_patterns.go b/syft/file/cataloger/secrets/generate_search_patterns.go similarity index 98% rename from syft/file/generate_search_patterns.go rename to syft/file/cataloger/secrets/generate_search_patterns.go index 5e2c074dc35..a46ff483cc4 100644 --- a/syft/file/generate_search_patterns.go +++ b/syft/file/cataloger/secrets/generate_search_patterns.go @@ -1,4 +1,4 @@ -package file +package secrets import ( "fmt" diff --git a/syft/file/generate_search_patterns_test.go b/syft/file/cataloger/secrets/generate_search_patterns_test.go similarity index 99% rename from syft/file/generate_search_patterns_test.go rename to syft/file/cataloger/secrets/generate_search_patterns_test.go index bdd6d422bdf..37dc3441d1a 100644 --- a/syft/file/generate_search_patterns_test.go +++ b/syft/file/cataloger/secrets/generate_search_patterns_test.go @@ -1,4 +1,4 @@ -package file +package secrets import ( "testing" diff --git a/syft/file/newline_counter.go b/syft/file/cataloger/secrets/newline_counter.go similarity index 97% rename from syft/file/newline_counter.go rename to syft/file/cataloger/secrets/newline_counter.go index fec905523cd..d3c8ef894fc 100644 --- a/syft/file/newline_counter.go +++ b/syft/file/cataloger/secrets/newline_counter.go @@ -1,4 +1,4 @@ -package file +package secrets import "io" diff --git a/syft/file/newline_counter_test.go b/syft/file/cataloger/secrets/newline_counter_test.go similarity index 98% rename from syft/file/newline_counter_test.go rename to syft/file/cataloger/secrets/newline_counter_test.go index 24282bceb84..0760e892cd3 100644 --- a/syft/file/newline_counter_test.go +++ b/syft/file/cataloger/secrets/newline_counter_test.go @@ -1,4 +1,4 @@ -package file +package secrets import ( "bufio" diff --git a/syft/file/secrets_search_by_line_strategy.go b/syft/file/cataloger/secrets/secrets_search_by_line_strategy.go similarity index 86% rename from syft/file/secrets_search_by_line_strategy.go rename to syft/file/cataloger/secrets/secrets_search_by_line_strategy.go index d241846fab1..f1beeb525e7 100644 --- a/syft/file/secrets_search_by_line_strategy.go +++ b/syft/file/cataloger/secrets/secrets_search_by_line_strategy.go @@ -1,4 +1,4 @@ -package file +package secrets import ( "bufio" @@ -8,10 +8,10 @@ import ( "regexp" "github.com/anchore/syft/internal" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) -func catalogLocationByLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp) ([]SearchResult, error) { +func catalogLocationByLine(resolver file.Resolver, location file.Location, patterns map[string]*regexp.Regexp) ([]file.SearchResult, error) { readCloser, err := resolver.FileContentsByLocation(location) if err != nil { return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err) @@ -20,7 +20,7 @@ func catalogLocationByLine(resolver source.FileResolver, location source.Locatio var scanner = bufio.NewReader(readCloser) var position int64 - var allSecrets []SearchResult + var allSecrets []file.SearchResult var lineNo int64 var readErr error for !errors.Is(readErr, io.EOF) { @@ -43,8 +43,8 @@ func catalogLocationByLine(resolver source.FileResolver, location source.Locatio return allSecrets, nil } -func searchForSecretsWithinLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp, line []byte, lineNo int64, position int64) ([]SearchResult, error) { - var secrets []SearchResult +func searchForSecretsWithinLine(resolver file.Resolver, location file.Location, patterns map[string]*regexp.Regexp, line []byte, lineNo int64, position int64) ([]file.SearchResult, error) { + var secrets []file.SearchResult for name, pattern := range patterns { matches := pattern.FindAllIndex(line, -1) for i, match := range matches { @@ -72,7 +72,7 @@ func searchForSecretsWithinLine(resolver source.FileResolver, location source.Lo return secrets, nil } -func readerAtPosition(resolver source.FileResolver, location source.Location, seekPosition int64) (io.ReadCloser, error) { +func readerAtPosition(resolver file.Resolver, location file.Location, seekPosition int64) (io.ReadCloser, error) { readCloser, err := resolver.FileContentsByLocation(location) if err != nil { return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err) @@ -89,7 +89,7 @@ func readerAtPosition(resolver source.FileResolver, location source.Location, se return readCloser, nil } -func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *regexp.Regexp, lineNo, lineOffset, seekPosition int64) *SearchResult { +func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *regexp.Regexp, lineNo, lineOffset, seekPosition int64) *file.SearchResult { reader := &newlineCounter{RuneReader: bufio.NewReader(readCloser)} positions := pattern.FindReaderSubmatchIndex(reader) if len(positions) == 0 { @@ -125,7 +125,7 @@ func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *r lineOffsetOfSecret += lineOffset } - return &SearchResult{ + return &file.SearchResult{ Classification: name, SeekPosition: start + seekPosition, Length: stop - start, diff --git a/syft/file/test-fixtures/secrets/default/api-key.txt b/syft/file/cataloger/secrets/test-fixtures/secrets/default/api-key.txt similarity index 100% rename from syft/file/test-fixtures/secrets/default/api-key.txt rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/api-key.txt diff --git a/syft/file/test-fixtures/secrets/default/aws.env b/syft/file/cataloger/secrets/test-fixtures/secrets/default/aws.env similarity index 100% rename from syft/file/test-fixtures/secrets/default/aws.env rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/aws.env diff --git a/syft/file/test-fixtures/secrets/default/aws.ini b/syft/file/cataloger/secrets/test-fixtures/secrets/default/aws.ini similarity index 100% rename from syft/file/test-fixtures/secrets/default/aws.ini rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/aws.ini diff --git a/syft/file/test-fixtures/secrets/default/docker-config.json b/syft/file/cataloger/secrets/test-fixtures/secrets/default/docker-config.json similarity index 100% rename from syft/file/test-fixtures/secrets/default/docker-config.json rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/docker-config.json diff --git a/syft/file/test-fixtures/secrets/default/not-docker-config.json b/syft/file/cataloger/secrets/test-fixtures/secrets/default/not-docker-config.json similarity index 100% rename from syft/file/test-fixtures/secrets/default/not-docker-config.json rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/not-docker-config.json diff --git a/syft/file/test-fixtures/secrets/default/private-key-false-positive.pem b/syft/file/cataloger/secrets/test-fixtures/secrets/default/private-key-false-positive.pem similarity index 100% rename from syft/file/test-fixtures/secrets/default/private-key-false-positive.pem rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/private-key-false-positive.pem diff --git a/syft/file/test-fixtures/secrets/default/private-key-openssl.pem b/syft/file/cataloger/secrets/test-fixtures/secrets/default/private-key-openssl.pem similarity index 100% rename from syft/file/test-fixtures/secrets/default/private-key-openssl.pem rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/private-key-openssl.pem diff --git a/syft/file/test-fixtures/secrets/default/private-key.pem b/syft/file/cataloger/secrets/test-fixtures/secrets/default/private-key.pem similarity index 100% rename from syft/file/test-fixtures/secrets/default/private-key.pem rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/private-key.pem diff --git a/syft/file/test-fixtures/secrets/default/private-keys.pem b/syft/file/cataloger/secrets/test-fixtures/secrets/default/private-keys.pem similarity index 100% rename from syft/file/test-fixtures/secrets/default/private-keys.pem rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/private-keys.pem diff --git a/syft/file/test-fixtures/secrets/multiple.txt b/syft/file/cataloger/secrets/test-fixtures/secrets/multiple.txt similarity index 100% rename from syft/file/test-fixtures/secrets/multiple.txt rename to syft/file/cataloger/secrets/test-fixtures/secrets/multiple.txt diff --git a/syft/file/test-fixtures/secrets/simple.txt b/syft/file/cataloger/secrets/test-fixtures/secrets/simple.txt similarity index 100% rename from syft/file/test-fixtures/secrets/simple.txt rename to syft/file/cataloger/secrets/test-fixtures/secrets/simple.txt diff --git a/syft/file/contents_cataloger_test.go b/syft/file/contents_cataloger_test.go deleted file mode 100644 index 526baae5c33..00000000000 --- a/syft/file/contents_cataloger_test.go +++ /dev/null @@ -1,80 +0,0 @@ -package file - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/anchore/syft/syft/source" -) - -func TestContentsCataloger(t *testing.T) { - allFiles := []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"} - - tests := []struct { - name string - globs []string - maxSize int64 - files []string - expected map[source.Coordinates]string - }{ - { - name: "multi-pattern", - globs: []string{"test-fixtures/last/*.txt", "test-fixtures/*.txt"}, - files: allFiles, - expected: map[source.Coordinates]string{ - source.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - }, - }, - { - name: "no-patterns", - globs: []string{}, - files: []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"}, - expected: map[source.Coordinates]string{}, - }, - { - name: "all-txt", - globs: []string{"**/*.txt"}, - files: allFiles, - expected: map[source.Coordinates]string{ - source.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - }, - }, - { - name: "subpath", - globs: []string{"test-fixtures/*.txt"}, - files: allFiles, - expected: map[source.Coordinates]string{ - source.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - }, - }, - { - name: "size-filter", - maxSize: 42, - globs: []string{"**/*.txt"}, - files: allFiles, - expected: map[source.Coordinates]string{ - source.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - }, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - c, err := NewContentsCataloger(test.globs, test.maxSize) - assert.NoError(t, err) - - resolver := source.NewMockResolverForPaths(test.files...) - actual, err := c.Catalog(resolver) - assert.NoError(t, err) - assert.Equal(t, test.expected, actual, "mismatched contents") - - }) - } -} diff --git a/syft/source/coordinate_set.go b/syft/file/coordinate_set.go similarity index 99% rename from syft/source/coordinate_set.go rename to syft/file/coordinate_set.go index 0ae080c270f..ba56005d302 100644 --- a/syft/source/coordinate_set.go +++ b/syft/file/coordinate_set.go @@ -1,4 +1,4 @@ -package source +package file import ( "sort" diff --git a/syft/source/coordinate_set_test.go b/syft/file/coordinate_set_test.go similarity index 99% rename from syft/source/coordinate_set_test.go rename to syft/file/coordinate_set_test.go index 7f50a61ffd2..6fae658e76b 100644 --- a/syft/source/coordinate_set_test.go +++ b/syft/file/coordinate_set_test.go @@ -1,4 +1,4 @@ -package source +package file import ( "testing" diff --git a/syft/source/coordinates.go b/syft/file/coordinates.go similarity index 98% rename from syft/source/coordinates.go rename to syft/file/coordinates.go index c35d3dcc297..24ba486ae91 100644 --- a/syft/source/coordinates.go +++ b/syft/file/coordinates.go @@ -1,4 +1,4 @@ -package source +package file import ( "fmt" diff --git a/syft/file/digest.go b/syft/file/digest.go index 87b53dbb833..23219e68875 100644 --- a/syft/file/digest.go +++ b/syft/file/digest.go @@ -1,6 +1,76 @@ package file +import ( + "crypto" + "fmt" + "hash" + "io" + "strings" +) + type Digest struct { Algorithm string `json:"algorithm"` Value string `json:"value"` } + +func NewDigestsFromFile(closer io.ReadCloser, hashes []crypto.Hash) ([]Digest, error) { + // create a set of hasher objects tied together with a single writer to feed content into + hashers := make([]hash.Hash, len(hashes)) + writers := make([]io.Writer, len(hashes)) + for idx, hashObj := range hashes { + hashers[idx] = hashObj.New() + writers[idx] = hashers[idx] + } + + size, err := io.Copy(io.MultiWriter(writers...), closer) + if err != nil { + return nil, err + } + + if size == 0 { + return make([]Digest, 0), nil + } + + result := make([]Digest, len(hashes)) + // only capture digests when there is content. It is important to do this based on SIZE and not + // FILE TYPE. The reasoning is that it is possible for a tar to be crafted with a header-only + // file type but a body is still allowed. + for idx, hasher := range hashers { + result[idx] = Digest{ + Algorithm: DigestAlgorithmName(hashes[idx]), + Value: fmt.Sprintf("%+x", hasher.Sum(nil)), + } + } + + return result, nil +} + +func Hashers(names ...string) ([]crypto.Hash, error) { + supportedHashAlgorithms := make(map[string]crypto.Hash) + for _, h := range []crypto.Hash{ + crypto.MD5, + crypto.SHA1, + crypto.SHA256, + } { + supportedHashAlgorithms[DigestAlgorithmName(h)] = h + } + + var hashers []crypto.Hash + for _, hashStr := range names { + hashObj, ok := supportedHashAlgorithms[CleanDigestAlgorithmName(hashStr)] + if !ok { + return nil, fmt.Errorf("unsupported hash algorithm: %s", hashStr) + } + hashers = append(hashers, hashObj) + } + return hashers, nil +} + +func DigestAlgorithmName(hash crypto.Hash) string { + return CleanDigestAlgorithmName(hash.String()) +} + +func CleanDigestAlgorithmName(name string) string { + lower := strings.ToLower(name) + return strings.ReplaceAll(lower, "-", "") +} diff --git a/syft/file/digest_cataloger.go b/syft/file/digest_cataloger.go deleted file mode 100644 index db2d468a31f..00000000000 --- a/syft/file/digest_cataloger.go +++ /dev/null @@ -1,140 +0,0 @@ -package file - -import ( - "crypto" - "errors" - "fmt" - "hash" - "io" - "strings" - - "github.com/wagoodman/go-partybus" - "github.com/wagoodman/go-progress" - - "github.com/anchore/stereoscope/pkg/file" - "github.com/anchore/syft/internal" - "github.com/anchore/syft/internal/bus" - "github.com/anchore/syft/internal/log" - "github.com/anchore/syft/syft/event" - "github.com/anchore/syft/syft/source" -) - -var errUndigestableFile = errors.New("undigestable file") - -type DigestsCataloger struct { - hashes []crypto.Hash -} - -func NewDigestsCataloger(hashes []crypto.Hash) (*DigestsCataloger, error) { - return &DigestsCataloger{ - hashes: hashes, - }, nil -} - -func (i *DigestsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]Digest, error) { - results := make(map[source.Coordinates][]Digest) - locations := allRegularFiles(resolver) - stage, prog := digestsCatalogingProgress(int64(len(locations))) - for _, location := range locations { - stage.Current = location.RealPath - result, err := i.catalogLocation(resolver, location) - - if errors.Is(err, errUndigestableFile) { - continue - } - - if internal.IsErrPathPermission(err) { - log.Debugf("file digests cataloger skipping %q: %+v", location.RealPath, err) - continue - } - - if err != nil { - return nil, err - } - prog.Increment() - results[location.Coordinates] = result - } - log.Debugf("file digests cataloger processed %d files", prog.Current()) - prog.SetCompleted() - return results, nil -} - -func (i *DigestsCataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]Digest, error) { - meta, err := resolver.FileMetadataByLocation(location) - if err != nil { - return nil, err - } - - // we should only attempt to report digests for files that are regular files (don't attempt to resolve links) - if meta.Type != file.TypeRegular { - return nil, errUndigestableFile - } - - contentReader, err := resolver.FileContentsByLocation(location) - if err != nil { - return nil, err - } - defer internal.CloseAndLogError(contentReader, location.VirtualPath) - - digests, err := DigestsFromFile(contentReader, i.hashes) - if err != nil { - return nil, internal.ErrPath{Context: "digests-cataloger", Path: location.RealPath, Err: err} - } - - return digests, nil -} - -func DigestsFromFile(closer io.ReadCloser, hashes []crypto.Hash) ([]Digest, error) { - // create a set of hasher objects tied together with a single writer to feed content into - hashers := make([]hash.Hash, len(hashes)) - writers := make([]io.Writer, len(hashes)) - for idx, hashObj := range hashes { - hashers[idx] = hashObj.New() - writers[idx] = hashers[idx] - } - - _, err := io.Copy(io.MultiWriter(writers...), closer) - if err != nil { - return nil, err - } - - result := make([]Digest, len(hashes)) - // only capture digests when there is content. It is important to do this based on SIZE and not - // FILE TYPE. The reasoning is that it is possible for a tar to be crafted with a header-only - // file type but a body is still allowed. - for idx, hasher := range hashers { - result[idx] = Digest{ - Algorithm: DigestAlgorithmName(hashes[idx]), - Value: fmt.Sprintf("%+x", hasher.Sum(nil)), - } - } - - return result, nil -} - -func DigestAlgorithmName(hash crypto.Hash) string { - return CleanDigestAlgorithmName(hash.String()) -} - -func CleanDigestAlgorithmName(name string) string { - lower := strings.ToLower(name) - return strings.ReplaceAll(lower, "-", "") -} - -func digestsCatalogingProgress(locations int64) (*progress.Stage, *progress.Manual) { - stage := &progress.Stage{} - prog := progress.NewManual(locations) - - bus.Publish(partybus.Event{ - Type: event.FileDigestsCatalogerStarted, - Value: struct { - progress.Stager - progress.Progressable - }{ - Stager: progress.Stager(stage), - Progressable: prog, - }, - }) - - return stage, prog -} diff --git a/syft/source/location.go b/syft/file/location.go similarity index 94% rename from syft/source/location.go rename to syft/file/location.go index 3abadd3fb1f..65af91c5164 100644 --- a/syft/source/location.go +++ b/syft/file/location.go @@ -1,4 +1,4 @@ -package source +package file import ( "fmt" @@ -24,6 +24,10 @@ type LocationData struct { ref file.Reference `hash:"ignore"` // The file reference relative to the stereoscope.FileCatalog that has more information about this location. } +func (l LocationData) Reference() file.Reference { + return l.ref +} + type LocationMetadata struct { Annotations map[string]string `json:"annotations,omitempty"` // Arbitrary key-value pairs that can be used to annotate a location } @@ -108,7 +112,7 @@ func NewVirtualLocationFromCoordinates(coordinates Coordinates, virtualPath stri }} } -// NewLocationFromImage creates a new Location representing the given path (extracted from the ref) relative to the given image. +// NewLocationFromImage creates a new Location representing the given path (extracted from the Reference) relative to the given image. func NewLocationFromImage(virtualPath string, ref file.Reference, img *image.Image) Location { layer := img.FileCatalog.Layer(ref) return Location{ @@ -126,7 +130,7 @@ func NewLocationFromImage(virtualPath string, ref file.Reference, img *image.Ima } } -// NewLocationFromDirectory creates a new Location representing the given path (extracted from the ref) relative to the given directory. +// NewLocationFromDirectory creates a new Location representing the given path (extracted from the Reference) relative to the given directory. func NewLocationFromDirectory(responsePath string, ref file.Reference) Location { return Location{ LocationData: LocationData{ @@ -141,7 +145,7 @@ func NewLocationFromDirectory(responsePath string, ref file.Reference) Location } } -// NewVirtualLocationFromDirectory creates a new Location representing the given path (extracted from the ref) relative to the given directory with a separate virtual access path. +// NewVirtualLocationFromDirectory creates a new Location representing the given path (extracted from the Reference) relative to the given directory with a separate virtual access path. func NewVirtualLocationFromDirectory(responsePath, virtualResponsePath string, ref file.Reference) Location { if responsePath == virtualResponsePath { return NewLocationFromDirectory(responsePath, ref) diff --git a/syft/source/location_read_closer.go b/syft/file/location_read_closer.go similarity index 94% rename from syft/source/location_read_closer.go rename to syft/file/location_read_closer.go index b5aa2b6efb5..480a0b50fe1 100644 --- a/syft/source/location_read_closer.go +++ b/syft/file/location_read_closer.go @@ -1,4 +1,4 @@ -package source +package file import "io" diff --git a/syft/source/location_set.go b/syft/file/location_set.go similarity index 99% rename from syft/source/location_set.go rename to syft/file/location_set.go index 100bf95e5d4..8e9ed2fc1f4 100644 --- a/syft/source/location_set.go +++ b/syft/file/location_set.go @@ -1,4 +1,4 @@ -package source +package file import ( "sort" diff --git a/syft/source/location_set_test.go b/syft/file/location_set_test.go similarity index 99% rename from syft/source/location_set_test.go rename to syft/file/location_set_test.go index b3d53ae5856..1613c71a539 100644 --- a/syft/source/location_set_test.go +++ b/syft/file/location_set_test.go @@ -1,4 +1,4 @@ -package source +package file import ( "testing" diff --git a/syft/source/location_test.go b/syft/file/location_test.go similarity index 98% rename from syft/source/location_test.go rename to syft/file/location_test.go index 96f0e3fcd3d..261ae9caa0f 100644 --- a/syft/source/location_test.go +++ b/syft/file/location_test.go @@ -1,4 +1,4 @@ -package source +package file import ( "testing" diff --git a/syft/source/locations.go b/syft/file/locations.go similarity index 96% rename from syft/source/locations.go rename to syft/file/locations.go index 045d1ed9d3c..da298643ec9 100644 --- a/syft/source/locations.go +++ b/syft/file/locations.go @@ -1,4 +1,4 @@ -package source +package file type Locations []Location diff --git a/syft/file/metadata.go b/syft/file/metadata.go new file mode 100644 index 00000000000..b5e0669d59c --- /dev/null +++ b/syft/file/metadata.go @@ -0,0 +1,5 @@ +package file + +import "github.com/anchore/stereoscope/pkg/file" + +type Metadata = file.Metadata diff --git a/syft/file/metadata_cataloger_test.go b/syft/file/metadata_cataloger_test.go deleted file mode 100644 index 93f8758fb0a..00000000000 --- a/syft/file/metadata_cataloger_test.go +++ /dev/null @@ -1,159 +0,0 @@ -package file - -import ( - "flag" - "os" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/anchore/stereoscope/pkg/file" - "github.com/anchore/stereoscope/pkg/imagetest" - "github.com/anchore/syft/syft/source" -) - -var updateImageGoldenFiles = flag.Bool("update-image", false, "update the golden fixture images used for testing") - -func TestFileMetadataCataloger(t *testing.T) { - testImage := "image-file-type-mix" - - if *updateImageGoldenFiles { - imagetest.UpdateGoldenFixtureImage(t, testImage) - } - - img := imagetest.GetGoldenFixtureImage(t, testImage) - - c := NewMetadataCataloger() - - src, err := source.NewFromImage(img, "---") - if err != nil { - t.Fatalf("could not create source: %+v", err) - } - - resolver, err := src.FileResolver(source.SquashedScope) - if err != nil { - t.Fatalf("could not create resolver: %+v", err) - } - - actual, err := c.Catalog(resolver) - if err != nil { - t.Fatalf("could not catalog: %+v", err) - } - - tests := []struct { - path string - exists bool - expected source.FileMetadata - err bool - }{ - { - path: "/file-1.txt", - exists: true, - expected: source.FileMetadata{ - Path: "/file-1.txt", - Mode: 0644, - Type: file.TypeRegular, - UserID: 1, - GroupID: 2, - Size: 7, - MIMEType: "text/plain", - }, - }, - { - path: "/hardlink-1", - exists: true, - expected: source.FileMetadata{ - Path: "/hardlink-1", - Mode: 0644, - Type: file.TypeHardLink, - LinkDestination: "file-1.txt", - UserID: 1, - GroupID: 2, - MIMEType: "", - }, - }, - { - path: "/symlink-1", - exists: true, - expected: source.FileMetadata{ - Path: "/symlink-1", - Mode: 0777 | os.ModeSymlink, - Type: file.TypeSymLink, - LinkDestination: "file-1.txt", - UserID: 0, - GroupID: 0, - MIMEType: "", - }, - }, - { - path: "/char-device-1", - exists: true, - expected: source.FileMetadata{ - Path: "/char-device-1", - Mode: 0644 | os.ModeDevice | os.ModeCharDevice, - Type: file.TypeCharacterDevice, - UserID: 0, - GroupID: 0, - MIMEType: "", - }, - }, - { - path: "/block-device-1", - exists: true, - expected: source.FileMetadata{ - Path: "/block-device-1", - Mode: 0644 | os.ModeDevice, - Type: file.TypeBlockDevice, - UserID: 0, - GroupID: 0, - MIMEType: "", - }, - }, - { - path: "/fifo-1", - exists: true, - expected: source.FileMetadata{ - Path: "/fifo-1", - Mode: 0644 | os.ModeNamedPipe, - Type: file.TypeFIFO, - UserID: 0, - GroupID: 0, - MIMEType: "", - }, - }, - { - path: "/bin", - exists: true, - expected: source.FileMetadata{ - Path: "/bin", - Mode: 0755 | os.ModeDir, - Type: file.TypeDirectory, - UserID: 0, - GroupID: 0, - MIMEType: "", - IsDir: true, - }, - }, - } - - for _, test := range tests { - t.Run(test.path, func(t *testing.T) { - _, ref, err := img.SquashedTree().File(file.Path(test.path)) - require.NoError(t, err) - - l := source.NewLocationFromImage(test.path, *ref.Reference, img) - - if _, ok := actual[l.Coordinates]; ok { - redact := actual[l.Coordinates] - redact.ModTime = time.Time{} - actual[l.Coordinates] = redact - } - - assert.Equal(t, test.expected, actual[l.Coordinates], "mismatched metadata") - - }) - } - -} diff --git a/syft/source/mock_resolver.go b/syft/file/mock_resolver.go similarity index 91% rename from syft/source/mock_resolver.go rename to syft/file/mock_resolver.go index 74cffac3409..7a0f89ffd00 100644 --- a/syft/source/mock_resolver.go +++ b/syft/file/mock_resolver.go @@ -1,4 +1,4 @@ -package source +package file import ( "fmt" @@ -11,14 +11,14 @@ import ( "github.com/anchore/stereoscope/pkg/file" ) -var _ FileResolver = (*MockResolver)(nil) +var _ Resolver = (*MockResolver)(nil) // MockResolver implements the FileResolver interface and is intended for use *only in test code*. // It provides an implementation that can resolve local filesystem paths using only a provided discrete list of file // paths, which are typically paths to test fixtures. type MockResolver struct { locations []Location - metadata map[Coordinates]FileMetadata + metadata map[Coordinates]Metadata mimeTypeIndex map[string][]Location extension map[string][]Location basename map[string][]Location @@ -41,13 +41,13 @@ func NewMockResolverForPaths(paths ...string) *MockResolver { return &MockResolver{ locations: locations, - metadata: make(map[Coordinates]FileMetadata), + metadata: make(map[Coordinates]Metadata), extension: extension, basename: basename, } } -func NewMockResolverForPathsWithMetadata(metadata map[Coordinates]FileMetadata) *MockResolver { +func NewMockResolverForPathsWithMetadata(metadata map[Coordinates]Metadata) *MockResolver { var locations []Location var mimeTypeIndex = make(map[string][]Location) extension := make(map[string][]Location) @@ -155,10 +155,10 @@ func (r MockResolver) AllLocations() <-chan Location { return results } -func (r MockResolver) FileMetadataByLocation(l Location) (FileMetadata, error) { +func (r MockResolver) FileMetadataByLocation(l Location) (Metadata, error) { info, err := os.Stat(l.RealPath) if err != nil { - return FileMetadata{}, err + return Metadata{}, err } // other types not supported @@ -167,12 +167,11 @@ func (r MockResolver) FileMetadataByLocation(l Location) (FileMetadata, error) { ty = file.TypeDirectory } - return FileMetadata{ - Mode: info.Mode(), - Type: ty, - UserID: 0, // not supported - GroupID: 0, // not supported - Size: info.Size(), + return Metadata{ + FileInfo: info, + Type: ty, + UserID: 0, // not supported + GroupID: 0, // not supported }, nil } diff --git a/syft/source/file_resolver.go b/syft/file/resolver.go similarity index 75% rename from syft/source/file_resolver.go rename to syft/file/resolver.go index 414be6373f9..57726b9b22c 100644 --- a/syft/source/file_resolver.go +++ b/syft/file/resolver.go @@ -1,28 +1,26 @@ -package source +package file -import ( - "io" -) +import "io" -// FileResolver is an interface that encompasses how to get specific file references and file contents for a generic data source. -type FileResolver interface { - FileContentResolver - FilePathResolver - FileLocationResolver - FileMetadataResolver +// Resolver is an interface that encompasses how to get specific file references and file contents for a generic data source. +type Resolver interface { + ContentResolver + PathResolver + LocationResolver + MetadataResolver } -// FileContentResolver knows how to get file content for a given Location -type FileContentResolver interface { +// ContentResolver knows how to get file content for a given Location +type ContentResolver interface { FileContentsByLocation(Location) (io.ReadCloser, error) } -type FileMetadataResolver interface { - FileMetadataByLocation(Location) (FileMetadata, error) +type MetadataResolver interface { + FileMetadataByLocation(Location) (Metadata, error) } -// FilePathResolver knows how to get a Location for given string paths and globs -type FilePathResolver interface { +// PathResolver knows how to get a Location for given string paths and globs +type PathResolver interface { // HasPath indicates if the given path exists in the underlying source. // The implementation for this may vary, however, generally the following considerations should be made: // - full symlink resolution should be performed on all requests @@ -50,7 +48,7 @@ type FilePathResolver interface { RelativeFileByPath(_ Location, path string) *Location } -type FileLocationResolver interface { +type LocationResolver interface { // AllLocations returns a channel of all file references from the underlying source. // The implementation for this may vary, however, generally the following considerations should be made: // - NO symlink resolution should be performed on results @@ -58,8 +56,8 @@ type FileLocationResolver interface { AllLocations() <-chan Location } -type WritableFileResolver interface { - FileResolver +type WritableResolver interface { + Resolver Write(location Location, reader io.Reader) error } diff --git a/syft/file/test-fixtures/snapshot/stereoscope-fixture-image-file-type-mix.golden b/syft/file/test-fixtures/snapshot/stereoscope-fixture-image-file-type-mix.golden deleted file mode 100644 index e85036214d9..00000000000 Binary files a/syft/file/test-fixtures/snapshot/stereoscope-fixture-image-file-type-mix.golden and /dev/null differ diff --git a/syft/formats/common/cyclonedxhelpers/component.go b/syft/formats/common/cyclonedxhelpers/component.go index e51c9d11250..d7c79875025 100644 --- a/syft/formats/common/cyclonedxhelpers/component.go +++ b/syft/formats/common/cyclonedxhelpers/component.go @@ -6,9 +6,9 @@ import ( "github.com/CycloneDX/cyclonedx-go" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/formats/common" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func encodeComponent(p pkg.Package) cyclonedx.Component { @@ -100,13 +100,13 @@ func decodeComponent(c *cyclonedx.Component) *pkg.Package { return p } -func decodeLocations(vals map[string]string) source.LocationSet { - v := common.Decode(reflect.TypeOf([]source.Location{}), vals, "syft:location", CycloneDXFields) - out, ok := v.([]source.Location) +func decodeLocations(vals map[string]string) file.LocationSet { + v := common.Decode(reflect.TypeOf([]file.Location{}), vals, "syft:location", CycloneDXFields) + out, ok := v.([]file.Location) if !ok { out = nil } - return source.NewLocationSet(out...) + return file.NewLocationSet(out...) } func decodePackageMetadata(vals map[string]string, c *cyclonedx.Component, typ pkg.MetadataType) interface{} { diff --git a/syft/formats/common/cyclonedxhelpers/component_test.go b/syft/formats/common/cyclonedxhelpers/component_test.go index 4ee69aa0c21..ed217fa3422 100644 --- a/syft/formats/common/cyclonedxhelpers/component_test.go +++ b/syft/formats/common/cyclonedxhelpers/component_test.go @@ -8,8 +8,8 @@ import ( "github.com/CycloneDX/cyclonedx-go" "github.com/stretchr/testify/assert" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func Test_encodeComponentProperties(t *testing.T) { @@ -28,8 +28,8 @@ func Test_encodeComponentProperties(t *testing.T) { name: "from apk", input: pkg.Package{ FoundBy: "cataloger", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates(source.Coordinates{RealPath: "test"}), + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates(file.Coordinates{RealPath: "test"}), ), Metadata: pkg.ApkMetadata{ Package: "libc-utils", diff --git a/syft/formats/common/spdxhelpers/source_info_test.go b/syft/formats/common/spdxhelpers/source_info_test.go index a56efff9338..a7b2dc14299 100644 --- a/syft/formats/common/spdxhelpers/source_info_test.go +++ b/syft/formats/common/spdxhelpers/source_info_test.go @@ -5,8 +5,8 @@ import ( "github.com/stretchr/testify/assert" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func Test_SourceInfo(t *testing.T) { @@ -19,9 +19,9 @@ func Test_SourceInfo(t *testing.T) { name: "locations are captured", input: pkg.Package{ // note: no type given - Locations: source.NewLocationSet( - source.NewVirtualLocation("/a-place", "/b-place"), - source.NewVirtualLocation("/c-place", "/d-place"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/a-place", "/b-place"), + file.NewVirtualLocation("/c-place", "/d-place"), ), }, expected: []string{ diff --git a/syft/formats/common/spdxhelpers/to_format_model.go b/syft/formats/common/spdxhelpers/to_format_model.go index 4c39dbe3bee..6b412a25585 100644 --- a/syft/formats/common/spdxhelpers/to_format_model.go +++ b/syft/formats/common/spdxhelpers/to_format_model.go @@ -21,7 +21,6 @@ import ( "github.com/anchore/syft/syft/formats/common/util" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/sbom" - "github.com/anchore/syft/syft/source" ) const ( @@ -137,7 +136,7 @@ func toSPDXID(identifiable artifact.Identifiable) spdx.ElementID { switch it := identifiable.(type) { case pkg.Package: id = SanitizeElementID(fmt.Sprintf("Package-%s-%s-%s", it.Type, it.Name, it.ID())) - case source.Coordinates: + case file.Coordinates: p := "" parts := strings.Split(it.RealPath, "/") for i := len(parts); i > 0; i-- { @@ -437,7 +436,7 @@ func toFiles(s sbom.SBOM) (results []*spdx.File) { artifacts := s.Artifacts for _, coordinates := range s.AllCoordinates() { - var metadata *source.FileMetadata + var metadata *file.Metadata if metadataForLocation, exists := artifacts.FileMetadata[coordinates]; exists { metadata = &metadataForLocation } @@ -500,7 +499,7 @@ func toChecksumAlgorithm(algorithm string) spdx.ChecksumAlgorithm { return spdx.ChecksumAlgorithm(strings.ToUpper(algorithm)) } -func toFileTypes(metadata *source.FileMetadata) (ty []string) { +func toFileTypes(metadata *file.Metadata) (ty []string) { if metadata == nil { return nil } diff --git a/syft/formats/common/spdxhelpers/to_format_model_test.go b/syft/formats/common/spdxhelpers/to_format_model_test.go index 170de95ea8c..411eed81da7 100644 --- a/syft/formats/common/spdxhelpers/to_format_model_test.go +++ b/syft/formats/common/spdxhelpers/to_format_model_test.go @@ -13,7 +13,6 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/sbom" - "github.com/anchore/syft/syft/source" ) // TODO: Add ToFormatModel tests @@ -115,12 +114,12 @@ func Test_toFileTypes(t *testing.T) { tests := []struct { name string - metadata source.FileMetadata + metadata file.Metadata expected []string }{ { name: "application", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "application/vnd.unknown", }, expected: []string{ @@ -129,7 +128,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "archive", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "application/zip", }, expected: []string{ @@ -139,7 +138,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "audio", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "audio/ogg", }, expected: []string{ @@ -148,7 +147,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "video", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "video/3gpp", }, expected: []string{ @@ -157,7 +156,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "text", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "text/html", }, expected: []string{ @@ -166,7 +165,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "image", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "image/png", }, expected: []string{ @@ -175,7 +174,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "binary", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "application/x-sharedlib", }, expected: []string{ @@ -276,7 +275,7 @@ func Test_fileIDsForPackage(t *testing.T) { Name: "bogus", } - c := source.Coordinates{ + c := file.Coordinates{ RealPath: "/path", FileSystemID: "nowhere", } @@ -505,14 +504,14 @@ func Test_toSPDXID(t *testing.T) { }{ { name: "short filename", - it: source.Coordinates{ + it: file.Coordinates{ RealPath: "/short/path/file.txt", }, expected: "File-short-path-file.txt", }, { name: "long filename", - it: source.Coordinates{ + it: file.Coordinates{ RealPath: "/some/long/path/with/a/lot/of-text/that-contains-a/file.txt", }, expected: "File-...a-lot-of-text-that-contains-a-file.txt", diff --git a/syft/formats/common/spdxhelpers/to_syft_model.go b/syft/formats/common/spdxhelpers/to_syft_model.go index a31cee81218..fd34541df99 100644 --- a/syft/formats/common/spdxhelpers/to_syft_model.go +++ b/syft/formats/common/spdxhelpers/to_syft_model.go @@ -35,8 +35,8 @@ func ToSyftModel(doc *spdx.Document) (*sbom.SBOM, error) { Source: src, Artifacts: sbom.Artifacts{ Packages: pkg.NewCollection(), - FileMetadata: map[source.Coordinates]source.FileMetadata{}, - FileDigests: map[source.Coordinates][]file.Digest{}, + FileMetadata: map[file.Coordinates]file.Metadata{}, + FileDigests: map[file.Coordinates][]file.Digest{}, LinuxDistribution: findLinuxReleaseByPURL(doc), }, } @@ -135,7 +135,7 @@ func toFileDigests(f *spdx.File) (digests []file.Digest) { return digests } -func toFileMetadata(f *spdx.File) (meta source.FileMetadata) { +func toFileMetadata(f *spdx.File) (meta file.Metadata) { // FIXME Syft is currently lossy due to the SPDX 2.2.1 spec not supporting arbitrary mimetypes for _, typ := range f.FileTypes { switch FileType(typ) { @@ -169,7 +169,7 @@ func toSyftRelationships(spdxIDMap map[string]interface{}, doc *spdx.Document) [ b := spdxIDMap[string(r.RefB.ElementRefID)] from, fromOk := a.(*pkg.Package) toPackage, toPackageOk := b.(*pkg.Package) - toLocation, toLocationOk := b.(*source.Location) + toLocation, toLocationOk := b.(*file.Location) if !fromOk || !(toPackageOk || toLocationOk) { log.Debugf("unable to find valid relationship mapping from SPDX 2.2 JSON, ignoring: (from: %+v) (to: %+v)", a, b) continue @@ -212,7 +212,7 @@ func toSyftRelationships(spdxIDMap map[string]interface{}, doc *spdx.Document) [ return out } -func toSyftCoordinates(f *spdx.File) source.Coordinates { +func toSyftCoordinates(f *spdx.File) file.Coordinates { const layerIDPrefix = "layerID: " var fileSystemID string if strings.Index(f.FileComment, layerIDPrefix) == 0 { @@ -221,14 +221,14 @@ func toSyftCoordinates(f *spdx.File) source.Coordinates { if strings.Index(string(f.FileSPDXIdentifier), layerIDPrefix) == 0 { fileSystemID = strings.TrimPrefix(string(f.FileSPDXIdentifier), layerIDPrefix) } - return source.Coordinates{ + return file.Coordinates{ RealPath: f.FileName, FileSystemID: fileSystemID, } } -func toSyftLocation(f *spdx.File) *source.Location { - l := source.NewVirtualLocationFromCoordinates(toSyftCoordinates(f), f.FileName) +func toSyftLocation(f *spdx.File) *file.Location { + l := file.NewVirtualLocationFromCoordinates(toSyftCoordinates(f), f.FileName) return &l } diff --git a/syft/formats/common/spdxhelpers/to_syft_model_test.go b/syft/formats/common/spdxhelpers/to_syft_model_test.go index a4b5c1e81d9..e4a98f5abd2 100644 --- a/syft/formats/common/spdxhelpers/to_syft_model_test.go +++ b/syft/formats/common/spdxhelpers/to_syft_model_test.go @@ -9,6 +9,7 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/source" ) @@ -336,7 +337,7 @@ func Test_toSyftRelationships(t *testing.T) { } pkg3.SetID() - loc1 := source.NewLocationFromCoordinates(source.Coordinates{ + loc1 := file.NewLocationFromCoordinates(file.Coordinates{ RealPath: "/somewhere/real", FileSystemID: "abc", }) diff --git a/syft/formats/github/encoder_test.go b/syft/formats/github/encoder_test.go index ba405dad63c..a0770f2520e 100644 --- a/syft/formats/github/encoder_test.go +++ b/syft/formats/github/encoder_test.go @@ -7,6 +7,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/sbom" @@ -35,8 +36,8 @@ func Test_toGithubModel(t *testing.T) { { Name: "pkg-1", Version: "1.0.1", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates(source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates(file.Coordinates{ RealPath: "/usr/lib", FileSystemID: "fsid-1", }), @@ -45,8 +46,8 @@ func Test_toGithubModel(t *testing.T) { { Name: "pkg-2", Version: "2.0.2", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates(source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates(file.Coordinates{ RealPath: "/usr/lib", FileSystemID: "fsid-1", }), @@ -55,8 +56,8 @@ func Test_toGithubModel(t *testing.T) { { Name: "pkg-3", Version: "3.0.3", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates(source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates(file.Coordinates{ RealPath: "/etc", FileSystemID: "fsid-1", }), diff --git a/syft/formats/internal/testutils/utils.go b/syft/formats/internal/testutils/utils.go index 7ddf942173b..f9f4941d4e0 100644 --- a/syft/formats/internal/testutils/utils.go +++ b/syft/formats/internal/testutils/utils.go @@ -17,6 +17,7 @@ import ( "github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/cpe" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/sbom" @@ -155,8 +156,8 @@ func populateImageCatalog(catalog *pkg.Collection, img *image.Image) { catalog.Add(pkg.Package{ Name: "package-1", Version: "1.0.1", - Locations: source.NewLocationSet( - source.NewLocationFromImage(string(ref1.RealPath), *ref1.Reference, img), + Locations: file.NewLocationSet( + file.NewLocationFromImage(string(ref1.RealPath), *ref1.Reference, img), ), Type: pkg.PythonPkg, FoundBy: "the-cataloger-1", @@ -177,8 +178,8 @@ func populateImageCatalog(catalog *pkg.Collection, img *image.Image) { catalog.Add(pkg.Package{ Name: "package-2", Version: "2.0.1", - Locations: source.NewLocationSet( - source.NewLocationFromImage(string(ref2.RealPath), *ref2.Reference, img), + Locations: file.NewLocationSet( + file.NewLocationFromImage(string(ref2.RealPath), *ref2.Reference, img), ), Type: pkg.DebPkg, FoundBy: "the-cataloger-2", @@ -265,8 +266,8 @@ func newDirectoryCatalog() *pkg.Collection { Version: "1.0.1", Type: pkg.PythonPkg, FoundBy: "the-cataloger-1", - Locations: source.NewLocationSet( - source.NewLocation("/some/path/pkg1"), + Locations: file.NewLocationSet( + file.NewLocation("/some/path/pkg1"), ), Language: pkg.Python, MetadataType: pkg.PythonPackageMetadataType, @@ -292,8 +293,8 @@ func newDirectoryCatalog() *pkg.Collection { Version: "2.0.1", Type: pkg.DebPkg, FoundBy: "the-cataloger-2", - Locations: source.NewLocationSet( - source.NewLocation("/some/path/pkg1"), + Locations: file.NewLocationSet( + file.NewLocation("/some/path/pkg1"), ), MetadataType: pkg.DpkgMetadataType, Metadata: pkg.DpkgMetadata{ @@ -318,8 +319,8 @@ func newDirectoryCatalogWithAuthorField() *pkg.Collection { Version: "1.0.1", Type: pkg.PythonPkg, FoundBy: "the-cataloger-1", - Locations: source.NewLocationSet( - source.NewLocation("/some/path/pkg1"), + Locations: file.NewLocationSet( + file.NewLocation("/some/path/pkg1"), ), Language: pkg.Python, MetadataType: pkg.PythonPackageMetadataType, @@ -346,8 +347,8 @@ func newDirectoryCatalogWithAuthorField() *pkg.Collection { Version: "2.0.1", Type: pkg.DebPkg, FoundBy: "the-cataloger-2", - Locations: source.NewLocationSet( - source.NewLocation("/some/path/pkg1"), + Locations: file.NewLocationSet( + file.NewLocation("/some/path/pkg1"), ), MetadataType: pkg.DpkgMetadataType, Metadata: pkg.DpkgMetadata{ @@ -366,15 +367,15 @@ func newDirectoryCatalogWithAuthorField() *pkg.Collection { //nolint:gosec func AddSampleFileRelationships(s *sbom.SBOM) { catalog := s.Artifacts.Packages.Sorted() - s.Artifacts.FileMetadata = map[source.Coordinates]source.FileMetadata{} + s.Artifacts.FileMetadata = map[file.Coordinates]file.Metadata{} files := []string{"/f1", "/f2", "/d1/f3", "/d2/f4", "/z1/f5", "/a1/f6"} rnd := rand.New(rand.NewSource(time.Now().UnixNano())) rnd.Shuffle(len(files), func(i, j int) { files[i], files[j] = files[j], files[i] }) for _, f := range files { - meta := source.FileMetadata{} - coords := source.Coordinates{RealPath: f} + meta := file.Metadata{} + coords := file.Coordinates{RealPath: f} s.Artifacts.FileMetadata[coords] = meta s.Relationships = append(s.Relationships, artifact.Relationship{ diff --git a/syft/formats/syftjson/encoder_test.go b/syft/formats/syftjson/encoder_test.go index c42cc75c410..231333bb869 100644 --- a/syft/formats/syftjson/encoder_test.go +++ b/syft/formats/syftjson/encoder_test.go @@ -52,8 +52,8 @@ func TestEncodeFullJSONDocument(t *testing.T) { p1 := pkg.Package{ Name: "package-1", Version: "1.0.1", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates(source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates(file.Coordinates{ RealPath: "/a/place/a", }), ), @@ -76,8 +76,8 @@ func TestEncodeFullJSONDocument(t *testing.T) { p2 := pkg.Package{ Name: "package-2", Version: "2.0.1", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates(source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates(file.Coordinates{ RealPath: "/b/place/b", }), ), @@ -101,49 +101,61 @@ func TestEncodeFullJSONDocument(t *testing.T) { s := sbom.SBOM{ Artifacts: sbom.Artifacts{ Packages: catalog, - FileMetadata: map[source.Coordinates]source.FileMetadata{ - source.NewLocation("/a/place").Coordinates: { - Mode: 0775, + FileMetadata: map[file.Coordinates]file.Metadata{ + file.NewLocation("/a/place").Coordinates: { + FileInfo: stereoFile.ManualInfo{ + NameValue: "/a/place", + ModeValue: 0775, + }, Type: stereoFile.TypeDirectory, UserID: 0, GroupID: 0, }, - source.NewLocation("/a/place/a").Coordinates: { - Mode: 0775, + file.NewLocation("/a/place/a").Coordinates: { + FileInfo: stereoFile.ManualInfo{ + NameValue: "/a/place/a", + ModeValue: 0775, + }, Type: stereoFile.TypeRegular, UserID: 0, GroupID: 0, }, - source.NewLocation("/b").Coordinates: { - Mode: 0775, + file.NewLocation("/b").Coordinates: { + FileInfo: stereoFile.ManualInfo{ + NameValue: "/b", + ModeValue: 0775, + }, Type: stereoFile.TypeSymLink, LinkDestination: "/c", UserID: 0, GroupID: 0, }, - source.NewLocation("/b/place/b").Coordinates: { - Mode: 0644, + file.NewLocation("/b/place/b").Coordinates: { + FileInfo: stereoFile.ManualInfo{ + NameValue: "/b/place/b", + ModeValue: 0644, + }, Type: stereoFile.TypeRegular, UserID: 1, GroupID: 2, }, }, - FileDigests: map[source.Coordinates][]file.Digest{ - source.NewLocation("/a/place/a").Coordinates: { + FileDigests: map[file.Coordinates][]file.Digest{ + file.NewLocation("/a/place/a").Coordinates: { { Algorithm: "sha256", Value: "366a3f5653e34673b875891b021647440d0127c2ef041e3b1a22da2a7d4f3703", }, }, - source.NewLocation("/b/place/b").Coordinates: { + file.NewLocation("/b/place/b").Coordinates: { { Algorithm: "sha256", Value: "1b3722da2a7d90d033b87581a2a3f12021647445653e34666ef041e3b4f3707c", }, }, }, - FileContents: map[source.Coordinates]string{ - source.NewLocation("/a/place/a").Coordinates: "the-contents", + FileContents: map[file.Coordinates]string{ + file.NewLocation("/a/place/a").Coordinates: "the-contents", }, LinuxDistribution: &linux.Release{ ID: "redhat", diff --git a/syft/formats/syftjson/model/file.go b/syft/formats/syftjson/model/file.go index 796cecebf1c..757a293154a 100644 --- a/syft/formats/syftjson/model/file.go +++ b/syft/formats/syftjson/model/file.go @@ -2,12 +2,11 @@ package model import ( "github.com/anchore/syft/syft/file" - "github.com/anchore/syft/syft/source" ) type File struct { ID string `json:"id"` - Location source.Coordinates `json:"location"` + Location file.Coordinates `json:"location"` Metadata *FileMetadataEntry `json:"metadata,omitempty"` Contents string `json:"contents,omitempty"` Digests []file.Digest `json:"digests,omitempty"` diff --git a/syft/formats/syftjson/model/package.go b/syft/formats/syftjson/model/package.go index c4fc9580467..fccf04c0bda 100644 --- a/syft/formats/syftjson/model/package.go +++ b/syft/formats/syftjson/model/package.go @@ -7,9 +7,9 @@ import ( "reflect" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/license" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) var errUnknownMetadataType = errors.New("unknown metadata type") @@ -22,26 +22,26 @@ type Package struct { // PackageBasicData contains non-ambiguous values (type-wise) from pkg.Package. type PackageBasicData struct { - ID string `json:"id"` - Name string `json:"name"` - Version string `json:"version"` - Type pkg.Type `json:"type"` - FoundBy string `json:"foundBy"` - Locations []source.Location `json:"locations"` - Licenses licenses `json:"licenses"` - Language pkg.Language `json:"language"` - CPEs []string `json:"cpes"` - PURL string `json:"purl"` + ID string `json:"id"` + Name string `json:"name"` + Version string `json:"version"` + Type pkg.Type `json:"type"` + FoundBy string `json:"foundBy"` + Locations []file.Location `json:"locations"` + Licenses licenses `json:"licenses"` + Language pkg.Language `json:"language"` + CPEs []string `json:"cpes"` + PURL string `json:"purl"` } type licenses []License type License struct { - Value string `json:"value"` - SPDXExpression string `json:"spdxExpression"` - Type license.Type `json:"type"` - URLs []string `json:"urls"` - Locations []source.Location `json:"locations"` + Value string `json:"value"` + SPDXExpression string `json:"spdxExpression"` + Type license.Type `json:"type"` + URLs []string `json:"urls"` + Locations []file.Location `json:"locations"` } func newModelLicensesFromValues(licenses []string) (ml []License) { diff --git a/syft/formats/syftjson/model/secrets.go b/syft/formats/syftjson/model/secrets.go index c5f4685765d..5562b76bb2c 100644 --- a/syft/formats/syftjson/model/secrets.go +++ b/syft/formats/syftjson/model/secrets.go @@ -2,10 +2,9 @@ package model import ( "github.com/anchore/syft/syft/file" - "github.com/anchore/syft/syft/source" ) type Secrets struct { - Location source.Coordinates `json:"location"` + Location file.Coordinates `json:"location"` Secrets []file.SearchResult `json:"secrets"` } diff --git a/syft/formats/syftjson/to_format_model.go b/syft/formats/syftjson/to_format_model.go index 718237b99b5..7b3688ced69 100644 --- a/syft/formats/syftjson/to_format_model.go +++ b/syft/formats/syftjson/to_format_model.go @@ -74,7 +74,7 @@ func toDescriptor(d sbom.Descriptor) model.Descriptor { } } -func toSecrets(data map[source.Coordinates][]file.SearchResult) []model.Secrets { +func toSecrets(data map[file.Coordinates][]file.SearchResult) []model.Secrets { results := make([]model.Secrets, 0) for coordinates, secrets := range data { results = append(results, model.Secrets{ @@ -95,7 +95,7 @@ func toFile(s sbom.SBOM) []model.File { artifacts := s.Artifacts for _, coordinates := range s.AllCoordinates() { - var metadata *source.FileMetadata + var metadata *file.Metadata if metadataForLocation, exists := artifacts.FileMetadata[coordinates]; exists { metadata = &metadataForLocation } @@ -126,15 +126,23 @@ func toFile(s sbom.SBOM) []model.File { return results } -func toFileMetadataEntry(coordinates source.Coordinates, metadata *source.FileMetadata) *model.FileMetadataEntry { +func toFileMetadataEntry(coordinates file.Coordinates, metadata *file.Metadata) *model.FileMetadataEntry { if metadata == nil { return nil } - mode, err := strconv.Atoi(fmt.Sprintf("%o", metadata.Mode)) - if err != nil { - log.Warnf("invalid mode found in file catalog @ location=%+v mode=%q: %+v", coordinates, metadata.Mode, err) - mode = 0 + var mode int + var size int64 + if metadata != nil && metadata.FileInfo != nil { + var err error + + mode, err = strconv.Atoi(fmt.Sprintf("%o", metadata.Mode())) + if err != nil { + log.Warnf("invalid mode found in file catalog @ location=%+v mode=%q: %+v", coordinates, metadata.Mode, err) + mode = 0 + } + + size = metadata.Size() } return &model.FileMetadataEntry{ @@ -144,7 +152,7 @@ func toFileMetadataEntry(coordinates source.Coordinates, metadata *source.FileMe UserID: metadata.UserID, GroupID: metadata.GroupID, MIMEType: metadata.MIMEType, - Size: metadata.Size, + Size: size, } } @@ -187,7 +195,7 @@ func toPackageModels(catalog *pkg.Collection) []model.Package { func toLicenseModel(pkgLicenses []pkg.License) (modelLicenses []model.License) { for _, l := range pkgLicenses { // guarantee collection - locations := make([]source.Location, 0) + locations := make([]file.Location, 0) if v := l.Locations.ToSlice(); v != nil { locations = v } diff --git a/syft/formats/syftjson/to_format_model_test.go b/syft/formats/syftjson/to_format_model_test.go index a99e66a3692..98f03c7b08f 100644 --- a/syft/formats/syftjson/to_format_model_test.go +++ b/syft/formats/syftjson/to_format_model_test.go @@ -7,7 +7,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/formats/syftjson/model" "github.com/anchore/syft/syft/source" ) @@ -94,46 +95,46 @@ func Test_toSourceModel(t *testing.T) { func Test_toFileType(t *testing.T) { - badType := file.Type(0x1337) - var allTypesTested []file.Type + badType := stereoscopeFile.Type(0x1337) + var allTypesTested []stereoscopeFile.Type tests := []struct { - ty file.Type + ty stereoscopeFile.Type name string }{ { - ty: file.TypeRegular, + ty: stereoscopeFile.TypeRegular, name: "RegularFile", }, { - ty: file.TypeDirectory, + ty: stereoscopeFile.TypeDirectory, name: "Directory", }, { - ty: file.TypeSymLink, + ty: stereoscopeFile.TypeSymLink, name: "SymbolicLink", }, { - ty: file.TypeHardLink, + ty: stereoscopeFile.TypeHardLink, name: "HardLink", }, { - ty: file.TypeSocket, + ty: stereoscopeFile.TypeSocket, name: "Socket", }, { - ty: file.TypeCharacterDevice, + ty: stereoscopeFile.TypeCharacterDevice, name: "CharacterDevice", }, { - ty: file.TypeBlockDevice, + ty: stereoscopeFile.TypeBlockDevice, name: "BlockDevice", }, { - ty: file.TypeFIFO, + ty: stereoscopeFile.TypeFIFO, name: "FIFONode", }, { - ty: file.TypeIrregular, + ty: stereoscopeFile.TypeIrregular, name: "IrregularFile", }, { @@ -150,5 +151,47 @@ func Test_toFileType(t *testing.T) { }) } - assert.ElementsMatch(t, allTypesTested, file.AllTypes(), "not all file.Types are under test") + assert.ElementsMatch(t, allTypesTested, stereoscopeFile.AllTypes(), "not all file.Types are under test") +} + +func Test_toFileMetadataEntry(t *testing.T) { + coords := file.Coordinates{ + RealPath: "/path", + FileSystemID: "x", + } + tests := []struct { + name string + metadata *file.Metadata + want *model.FileMetadataEntry + }{ + { + name: "no metadata", + }, + { + name: "no file info", + metadata: &file.Metadata{ + FileInfo: nil, + }, + want: &model.FileMetadataEntry{ + Type: stereoscopeFile.TypeRegular.String(), + }, + }, + { + name: "with file info", + metadata: &file.Metadata{ + FileInfo: &stereoscopeFile.ManualInfo{ + ModeValue: 1, + }, + }, + want: &model.FileMetadataEntry{ + Mode: 1, + Type: stereoscopeFile.TypeRegular.String(), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, toFileMetadataEntry(coords, tt.metadata)) + }) + } } diff --git a/syft/formats/syftjson/to_syft_model.go b/syft/formats/syftjson/to_syft_model.go index 0d02ef6f974..aeb0c24f165 100644 --- a/syft/formats/syftjson/to_syft_model.go +++ b/syft/formats/syftjson/to_syft_model.go @@ -3,6 +3,7 @@ package syftjson import ( "fmt" "os" + "path" "strconv" "strings" @@ -63,8 +64,8 @@ func deduplicateErrors(errors []error) []string { func toSyftFiles(files []model.File) sbom.Artifacts { ret := sbom.Artifacts{ - FileMetadata: make(map[source.Coordinates]source.FileMetadata), - FileDigests: make(map[source.Coordinates][]file.Digest), + FileMetadata: make(map[file.Coordinates]file.Metadata), + FileDigests: make(map[file.Coordinates][]file.Digest), } for _, f := range files { @@ -78,15 +79,17 @@ func toSyftFiles(files []model.File) sbom.Artifacts { fm := os.FileMode(mode) - ret.FileMetadata[coord] = source.FileMetadata{ + ret.FileMetadata[coord] = file.Metadata{ + FileInfo: stereoscopeFile.ManualInfo{ + NameValue: path.Base(coord.RealPath), + SizeValue: f.Metadata.Size, + ModeValue: fm, + }, Path: coord.RealPath, LinkDestination: f.Metadata.LinkDestination, - Size: f.Metadata.Size, UserID: f.Metadata.UserID, GroupID: f.Metadata.GroupID, Type: toSyftFileType(f.Metadata.Type), - IsDir: fm.IsDir(), - Mode: fm, MIMEType: f.Metadata.MIMEType, } } @@ -109,7 +112,7 @@ func toSyftLicenses(m []model.License) (p []pkg.License) { SPDXExpression: l.SPDXExpression, Type: l.Type, URLs: internal.NewStringSet(l.URLs...), - Locations: source.NewLocationSet(l.Locations...), + Locations: file.NewLocationSet(l.Locations...), }) } return @@ -317,7 +320,7 @@ func toSyftPackage(p model.Package, idAliases map[string]string) pkg.Package { Name: p.Name, Version: p.Version, FoundBy: p.FoundBy, - Locations: source.NewLocationSet(p.Locations...), + Locations: file.NewLocationSet(p.Locations...), Licenses: pkg.NewLicenseSet(toSyftLicenses(p.Licenses)...), Language: p.Language, Type: p.Type, diff --git a/syft/formats/syftjson/to_syft_model_test.go b/syft/formats/syftjson/to_syft_model_test.go index de96667f14c..dabc33f3841 100644 --- a/syft/formats/syftjson/to_syft_model_test.go +++ b/syft/formats/syftjson/to_syft_model_test.go @@ -131,7 +131,7 @@ func Test_idsHaveChanged(t *testing.T) { } func Test_toSyftFiles(t *testing.T) { - coord := source.Coordinates{ + coord := file.Coordinates{ RealPath: "/somerwhere/place", FileSystemID: "abc", } @@ -145,8 +145,8 @@ func Test_toSyftFiles(t *testing.T) { name: "empty", files: []model.File{}, want: sbom.Artifacts{ - FileMetadata: map[source.Coordinates]source.FileMetadata{}, - FileDigests: map[source.Coordinates][]file.Digest{}, + FileMetadata: map[file.Coordinates]file.Metadata{}, + FileDigests: map[file.Coordinates][]file.Digest{}, }, }, { @@ -165,8 +165,8 @@ func Test_toSyftFiles(t *testing.T) { }, }, want: sbom.Artifacts{ - FileMetadata: map[source.Coordinates]source.FileMetadata{}, - FileDigests: map[source.Coordinates][]file.Digest{ + FileMetadata: map[file.Coordinates]file.Metadata{}, + FileDigests: map[file.Coordinates][]file.Digest{ coord: { { Algorithm: "sha256", @@ -200,20 +200,22 @@ func Test_toSyftFiles(t *testing.T) { }, }, want: sbom.Artifacts{ - FileMetadata: map[source.Coordinates]source.FileMetadata{ + FileMetadata: map[file.Coordinates]file.Metadata{ coord: { + FileInfo: stereoFile.ManualInfo{ + NameValue: "place", + SizeValue: 92, + ModeValue: 511, // 777 octal = 511 decimal + }, Path: coord.RealPath, LinkDestination: "", - Size: 92, UserID: 42, GroupID: 32, Type: stereoFile.TypeRegular, - IsDir: false, - Mode: 511, // 777 octal = 511 decimal MIMEType: "text/plain", }, }, - FileDigests: map[source.Coordinates][]file.Digest{ + FileDigests: map[file.Coordinates][]file.Digest{ coord: { { Algorithm: "sha256", diff --git a/syft/source/image_all_layers_resolver.go b/syft/internal/fileresolver/container_image_all_layers.go similarity index 65% rename from syft/source/image_all_layers_resolver.go rename to syft/internal/fileresolver/container_image_all_layers.go index ca40b12718c..e66c92aaf1b 100644 --- a/syft/source/image_all_layers_resolver.go +++ b/syft/internal/fileresolver/container_image_all_layers.go @@ -1,25 +1,26 @@ -package source +package fileresolver import ( "fmt" "io" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/filetree" "github.com/anchore/stereoscope/pkg/image" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" ) -var _ FileResolver = (*imageAllLayersResolver)(nil) +var _ file.Resolver = (*ContainerImageAllLayers)(nil) -// imageAllLayersResolver implements path and content access for the AllLayers source option for container image data sources. -type imageAllLayersResolver struct { +// ContainerImageAllLayers implements path and content access for the AllLayers source option for container image data sources. +type ContainerImageAllLayers struct { img *image.Image layers []int } -// newAllLayersResolver returns a new resolver from the perspective of all image layers for the given image. -func newAllLayersResolver(img *image.Image) (*imageAllLayersResolver, error) { +// NewFromContainerImageAllLayers returns a new resolver from the perspective of all image layers for the given image. +func NewFromContainerImageAllLayers(img *image.Image) (*ContainerImageAllLayers, error) { if len(img.Layers) == 0 { return nil, fmt.Errorf("the image does not contain any layers") } @@ -28,15 +29,15 @@ func newAllLayersResolver(img *image.Image) (*imageAllLayersResolver, error) { for idx := range img.Layers { layers = append(layers, idx) } - return &imageAllLayersResolver{ + return &ContainerImageAllLayers{ img: img, layers: layers, }, nil } // HasPath indicates if the given path exists in the underlying source. -func (r *imageAllLayersResolver) HasPath(path string) bool { - p := file.Path(path) +func (r *ContainerImageAllLayers) HasPath(path string) bool { + p := stereoscopeFile.Path(path) for _, layerIdx := range r.layers { tree := r.img.Layers[layerIdx].Tree if tree.HasPath(p) { @@ -46,8 +47,8 @@ func (r *imageAllLayersResolver) HasPath(path string) bool { return false } -func (r *imageAllLayersResolver) fileByRef(ref file.Reference, uniqueFileIDs file.ReferenceSet, layerIdx int) ([]file.Reference, error) { - uniqueFiles := make([]file.Reference, 0) +func (r *ContainerImageAllLayers) fileByRef(ref stereoscopeFile.Reference, uniqueFileIDs stereoscopeFile.ReferenceSet, layerIdx int) ([]stereoscopeFile.Reference, error) { + uniqueFiles := make([]stereoscopeFile.Reference, 0) // since there is potentially considerable work for each symlink/hardlink that needs to be resolved, let's check to see if this is a symlink/hardlink first entry, err := r.img.FileCatalog.Get(ref) @@ -55,7 +56,7 @@ func (r *imageAllLayersResolver) fileByRef(ref file.Reference, uniqueFileIDs fil return nil, fmt.Errorf("unable to fetch metadata (ref=%+v): %w", ref, err) } - if entry.Metadata.Type == file.TypeHardLink || entry.Metadata.Type == file.TypeSymLink { + if entry.Metadata.Type == stereoscopeFile.TypeHardLink || entry.Metadata.Type == stereoscopeFile.TypeSymLink { // a link may resolve in this layer or higher, assuming a squashed tree is used to search // we should search all possible resolutions within the valid source for _, subLayerIdx := range r.layers[layerIdx:] { @@ -77,9 +78,9 @@ func (r *imageAllLayersResolver) fileByRef(ref file.Reference, uniqueFileIDs fil } // FilesByPath returns all file.References that match the given paths from any layer in the image. -func (r *imageAllLayersResolver) FilesByPath(paths ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +func (r *ContainerImageAllLayers) FilesByPath(paths ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, path := range paths { for idx, layerIdx := range r.layers { @@ -100,7 +101,7 @@ func (r *imageAllLayersResolver) FilesByPath(paths ...string) ([]Location, error if err != nil { return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", ref.RealPath, err) } - if metadata.Metadata.IsDir { + if metadata.Metadata.IsDir() { continue } } @@ -110,7 +111,7 @@ func (r *imageAllLayersResolver) FilesByPath(paths ...string) ([]Location, error return nil, err } for _, result := range results { - uniqueLocations = append(uniqueLocations, NewLocationFromImage(path, result, r.img)) + uniqueLocations = append(uniqueLocations, file.NewLocationFromImage(path, result, r.img)) } } } @@ -119,9 +120,9 @@ func (r *imageAllLayersResolver) FilesByPath(paths ...string) ([]Location, error // FilesByGlob returns all file.References that match the given path glob pattern from any layer in the image. // nolint:gocognit -func (r *imageAllLayersResolver) FilesByGlob(patterns ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +func (r *ContainerImageAllLayers) FilesByGlob(patterns ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, pattern := range patterns { for idx, layerIdx := range r.layers { @@ -143,7 +144,7 @@ func (r *imageAllLayersResolver) FilesByGlob(patterns ...string) ([]Location, er return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", result.RequestPath, err) } // don't consider directories - if metadata.Metadata.IsDir { + if metadata.Metadata.IsDir() { continue } } @@ -153,7 +154,7 @@ func (r *imageAllLayersResolver) FilesByGlob(patterns ...string) ([]Location, er return nil, err } for _, refResult := range refResults { - uniqueLocations = append(uniqueLocations, NewLocationFromImage(string(result.RequestPath), refResult, r.img)) + uniqueLocations = append(uniqueLocations, file.NewLocationFromImage(string(result.RequestPath), refResult, r.img)) } } } @@ -164,10 +165,10 @@ func (r *imageAllLayersResolver) FilesByGlob(patterns ...string) ([]Location, er // RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference. // This is helpful when attempting to find a file that is in the same layer or lower as another file. -func (r *imageAllLayersResolver) RelativeFileByPath(location Location, path string) *Location { - layer := r.img.FileCatalog.Layer(location.ref) +func (r *ContainerImageAllLayers) RelativeFileByPath(location file.Location, path string) *file.Location { + layer := r.img.FileCatalog.Layer(location.Reference()) - exists, relativeRef, err := layer.SquashedTree.File(file.Path(path), filetree.FollowBasenameLinks) + exists, relativeRef, err := layer.SquashedTree.File(stereoscopeFile.Path(path), filetree.FollowBasenameLinks) if err != nil { log.Errorf("failed to find path=%q in squash: %+w", path, err) return nil @@ -176,21 +177,21 @@ func (r *imageAllLayersResolver) RelativeFileByPath(location Location, path stri return nil } - relativeLocation := NewLocationFromImage(path, *relativeRef.Reference, r.img) + relativeLocation := file.NewLocationFromImage(path, *relativeRef.Reference, r.img) return &relativeLocation } // FileContentsByLocation fetches file contents for a single file reference, irregardless of the source layer. // If the path does not exist an error is returned. -func (r *imageAllLayersResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { - entry, err := r.img.FileCatalog.Get(location.ref) +func (r *ContainerImageAllLayers) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { + entry, err := r.img.FileCatalog.Get(location.Reference()) if err != nil { return nil, fmt.Errorf("unable to get metadata for path=%q from file catalog: %w", location.RealPath, err) } switch entry.Metadata.Type { - case file.TypeSymLink, file.TypeHardLink: + case stereoscopeFile.TypeSymLink, stereoscopeFile.TypeHardLink: // the location we are searching may be a symlink, we should always work with the resolved file newLocation := r.RelativeFileByPath(location, location.VirtualPath) if newLocation == nil { @@ -198,16 +199,16 @@ func (r *imageAllLayersResolver) FileContentsByLocation(location Location) (io.R return nil, fmt.Errorf("no contents for location=%q", location.VirtualPath) } location = *newLocation - case file.TypeDirectory: - return nil, fmt.Errorf("cannot read contents of non-file %q", location.ref.RealPath) + case stereoscopeFile.TypeDirectory: + return nil, fmt.Errorf("cannot read contents of non-file %q", location.Reference().RealPath) } - return r.img.FileContentsByRef(location.ref) + return r.img.OpenReference(location.Reference()) } -func (r *imageAllLayersResolver) FilesByMIMEType(types ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +func (r *ContainerImageAllLayers) FilesByMIMEType(types ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for idx, layerIdx := range r.layers { refs, err := r.img.Layers[layerIdx].SearchContext.SearchByMIMEType(types...) @@ -225,7 +226,7 @@ func (r *imageAllLayersResolver) FilesByMIMEType(types ...string) ([]Location, e return nil, err } for _, refResult := range refResults { - uniqueLocations = append(uniqueLocations, NewLocationFromImage(string(ref.RequestPath), refResult, r.img)) + uniqueLocations = append(uniqueLocations, file.NewLocationFromImage(string(ref.RequestPath), refResult, r.img)) } } } @@ -233,20 +234,20 @@ func (r *imageAllLayersResolver) FilesByMIMEType(types ...string) ([]Location, e return uniqueLocations, nil } -func (r *imageAllLayersResolver) AllLocations() <-chan Location { - results := make(chan Location) +func (r *ContainerImageAllLayers) AllLocations() <-chan file.Location { + results := make(chan file.Location) go func() { defer close(results) for _, layerIdx := range r.layers { tree := r.img.Layers[layerIdx].Tree - for _, ref := range tree.AllFiles(file.AllTypes()...) { - results <- NewLocationFromImage(string(ref.RealPath), ref, r.img) + for _, ref := range tree.AllFiles(stereoscopeFile.AllTypes()...) { + results <- file.NewLocationFromImage(string(ref.RealPath), ref, r.img) } } }() return results } -func (r *imageAllLayersResolver) FileMetadataByLocation(location Location) (FileMetadata, error) { +func (r *ContainerImageAllLayers) FileMetadataByLocation(location file.Location) (file.Metadata, error) { return fileMetadataByLocation(r.img, location) } diff --git a/syft/source/image_all_layers_resolver_test.go b/syft/internal/fileresolver/container_image_all_layers_test.go similarity index 75% rename from syft/source/image_all_layers_resolver_test.go rename to syft/internal/fileresolver/container_image_all_layers_test.go index 86892b1884f..7fb04d56b78 100644 --- a/syft/source/image_all_layers_resolver_test.go +++ b/syft/internal/fileresolver/container_image_all_layers_test.go @@ -1,4 +1,4 @@ -package source +package fileresolver import ( "fmt" @@ -13,6 +13,7 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/stereoscope/pkg/imagetest" + "github.com/anchore/syft/syft/file" ) type resolution struct { @@ -93,7 +94,7 @@ func TestAllLayersResolver_FilesByPath(t *testing.T) { t.Run(c.name, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) if err != nil { t.Fatalf("could not create resolver: %+v", err) } @@ -121,15 +122,15 @@ func TestAllLayersResolver_FilesByPath(t *testing.T) { for idx, actual := range refs { expected := c.resolutions[idx] - if string(actual.ref.RealPath) != expected.path { - t.Errorf("bad resolve path: '%s'!='%s'", string(actual.ref.RealPath), expected.path) + if string(actual.Reference().RealPath) != expected.path { + t.Errorf("bad resolve path: '%s'!='%s'", string(actual.Reference().RealPath), expected.path) } - if expected.path != "" && string(actual.ref.RealPath) != actual.RealPath { + if expected.path != "" && string(actual.Reference().RealPath) != actual.RealPath { t.Errorf("we should always prefer real paths over ones with links") } - layer := img.FileCatalog.Layer(actual.ref) + layer := img.FileCatalog.Layer(actual.Reference()) if layer.Metadata.Index != expected.layer { t.Errorf("bad resolve layer: '%d'!='%d'", layer.Metadata.Index, expected.layer) } @@ -207,7 +208,7 @@ func TestAllLayersResolver_FilesByGlob(t *testing.T) { t.Run(c.name, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) if err != nil { t.Fatalf("could not create resolver: %+v", err) } @@ -224,15 +225,15 @@ func TestAllLayersResolver_FilesByGlob(t *testing.T) { for idx, actual := range refs { expected := c.resolutions[idx] - if string(actual.ref.RealPath) != expected.path { - t.Errorf("bad resolve path: '%s'!='%s'", string(actual.ref.RealPath), expected.path) + if string(actual.Reference().RealPath) != expected.path { + t.Errorf("bad resolve path: '%s'!='%s'", string(actual.Reference().RealPath), expected.path) } - if expected.path != "" && string(actual.ref.RealPath) != actual.RealPath { + if expected.path != "" && string(actual.Reference().RealPath) != actual.RealPath { t.Errorf("we should always prefer real paths over ones with links") } - layer := img.FileCatalog.Layer(actual.ref) + layer := img.FileCatalog.Layer(actual.Reference()) if layer.Metadata.Index != expected.layer { t.Errorf("bad resolve layer: '%d'!='%d'", layer.Metadata.Index, expected.layer) @@ -259,7 +260,7 @@ func Test_imageAllLayersResolver_FilesByMIMEType(t *testing.T) { t.Run(test.fixtureName, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", test.fixtureName) - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) assert.NoError(t, err) locations, err := resolver.FilesByMIMEType(test.mimeType) @@ -276,7 +277,7 @@ func Test_imageAllLayersResolver_FilesByMIMEType(t *testing.T) { func Test_imageAllLayersResolver_hasFilesystemIDInLocation(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-duplicate-path") - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) assert.NoError(t, err) locations, err := resolver.FilesByMIMEType("text/plain") @@ -336,7 +337,7 @@ func TestAllLayersImageResolver_FilesContents(t *testing.T) { t.Run(test.name, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) assert.NoError(t, err) refs, err := resolver.FilesByPath(test.fixture) @@ -363,14 +364,14 @@ func TestAllLayersImageResolver_FilesContents_errorOnDirRequest(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) assert.NoError(t, err) - var dirLoc *Location + var dirLoc *file.Location for loc := range resolver.AllLocations() { - entry, err := resolver.img.FileCatalog.Get(loc.ref) + entry, err := resolver.img.FileCatalog.Get(loc.Reference()) require.NoError(t, err) - if entry.Metadata.IsDir { + if entry.Metadata.IsDir() { dirLoc = &loc break } @@ -386,119 +387,119 @@ func TestAllLayersImageResolver_FilesContents_errorOnDirRequest(t *testing.T) { func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) { tests := []struct { name string - runner func(FileResolver) []Location - expected []Location + runner func(file.Resolver) []file.Location + expected []file.Location }{ { name: "by mimetype", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links should not show up when searching mimetype actualLocations, err := resolver.FilesByMIMEType("text/plain") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/etc/group", "/etc/group"), - NewVirtualLocation("/etc/passwd", "/etc/passwd"), - NewVirtualLocation("/etc/shadow", "/etc/shadow"), - NewVirtualLocation("/file-1.txt", "/file-1.txt"), - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 + expected: []file.Location{ + file.NewVirtualLocation("/etc/group", "/etc/group"), + file.NewVirtualLocation("/etc/passwd", "/etc/passwd"), + file.NewVirtualLocation("/etc/shadow", "/etc/shadow"), + file.NewVirtualLocation("/file-1.txt", "/file-1.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 // note: we're de-duping the redundant access to file-3.txt // ... (there would usually be two copies) - NewVirtualLocation("/file-3.txt", "/file-3.txt"), - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // copy 1 - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // copy 2 + file.NewVirtualLocation("/file-3.txt", "/file-3.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // copy 1 + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // copy 2 }, }, { name: "by glob to links", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("*ink-*") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/file-1.txt", "/link-1"), - NewVirtualLocation("/file-2.txt", "/link-2"), // copy 1 - NewVirtualLocation("/file-2.txt", "/link-2"), // copy 2 - NewVirtualLocation("/file-3.txt", "/link-within"), + expected: []file.Location{ + file.NewVirtualLocation("/file-1.txt", "/link-1"), + file.NewVirtualLocation("/file-2.txt", "/link-2"), // copy 1 + file.NewVirtualLocation("/file-2.txt", "/link-2"), // copy 2 + file.NewVirtualLocation("/file-3.txt", "/link-within"), }, }, { name: "by basename", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/file-2.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 + expected: []file.Location{ + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 }, }, { name: "by basename glob", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/file-?.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/file-1.txt", "/file-1.txt"), - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 - NewVirtualLocation("/file-3.txt", "/file-3.txt"), - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // when we copy into the link path, the same file-4.txt is copied + expected: []file.Location{ + file.NewVirtualLocation("/file-1.txt", "/file-1.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 + file.NewVirtualLocation("/file-3.txt", "/file-3.txt"), + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // when we copy into the link path, the same file-4.txt is copied }, }, { name: "by extension", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/*.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/file-1.txt", "/file-1.txt"), - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 - NewVirtualLocation("/file-3.txt", "/file-3.txt"), - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // when we copy into the link path, the same file-4.txt is copied + expected: []file.Location{ + file.NewVirtualLocation("/file-1.txt", "/file-1.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 + file.NewVirtualLocation("/file-3.txt", "/file-3.txt"), + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // when we copy into the link path, the same file-4.txt is copied }, }, { name: "by path to degree 1 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links resolve to the final file actualLocations, err := resolver.FilesByPath("/link-2") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers - NewVirtualLocation("/file-2.txt", "/link-2"), - NewVirtualLocation("/file-2.txt", "/link-2"), + file.NewVirtualLocation("/file-2.txt", "/link-2"), + file.NewVirtualLocation("/file-2.txt", "/link-2"), }, }, { name: "by path to degree 2 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // multiple links resolves to the final file actualLocations, err := resolver.FilesByPath("/link-indirect") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers - NewVirtualLocation("/file-2.txt", "/link-indirect"), - NewVirtualLocation("/file-2.txt", "/link-indirect"), + file.NewVirtualLocation("/file-2.txt", "/link-indirect"), + file.NewVirtualLocation("/file-2.txt", "/link-indirect"), }, }, } @@ -508,7 +509,7 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) assert.NoError(t, err) actual := test.runner(resolver) @@ -527,7 +528,7 @@ func TestAllLayersResolver_AllLocations(t *testing.T) { arch = "aarch64" } - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) assert.NoError(t, err) paths := strset.New() diff --git a/syft/source/image_squash_resolver.go b/syft/internal/fileresolver/container_image_squash.go similarity index 65% rename from syft/source/image_squash_resolver.go rename to syft/internal/fileresolver/container_image_squash.go index d62927b309c..92b4a8a13ee 100644 --- a/syft/source/image_squash_resolver.go +++ b/syft/internal/fileresolver/container_image_squash.go @@ -1,41 +1,42 @@ -package source +package fileresolver import ( "fmt" "io" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/filetree" "github.com/anchore/stereoscope/pkg/image" + "github.com/anchore/syft/syft/file" ) -var _ FileResolver = (*imageSquashResolver)(nil) +var _ file.Resolver = (*ContainerImageSquash)(nil) -// imageSquashResolver implements path and content access for the Squashed source option for container image data sources. -type imageSquashResolver struct { +// ContainerImageSquash implements path and content access for the Squashed source option for container image data sources. +type ContainerImageSquash struct { img *image.Image } -// newImageSquashResolver returns a new resolver from the perspective of the squashed representation for the given image. -func newImageSquashResolver(img *image.Image) (*imageSquashResolver, error) { +// NewFromContainerImageSquash returns a new resolver from the perspective of the squashed representation for the given image. +func NewFromContainerImageSquash(img *image.Image) (*ContainerImageSquash, error) { if img.SquashedTree() == nil { return nil, fmt.Errorf("the image does not have have a squashed tree") } - return &imageSquashResolver{ + return &ContainerImageSquash{ img: img, }, nil } // HasPath indicates if the given path exists in the underlying source. -func (r *imageSquashResolver) HasPath(path string) bool { - return r.img.SquashedTree().HasPath(file.Path(path)) +func (r *ContainerImageSquash) HasPath(path string) bool { + return r.img.SquashedTree().HasPath(stereoscopeFile.Path(path)) } // FilesByPath returns all file.References that match the given paths within the squashed representation of the image. -func (r *imageSquashResolver) FilesByPath(paths ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +func (r *ContainerImageSquash) FilesByPath(paths ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, path := range paths { ref, err := r.img.SquashedSearchContext.SearchByPath(path, filetree.FollowBasenameLinks) @@ -56,7 +57,7 @@ func (r *imageSquashResolver) FilesByPath(paths ...string) ([]Location, error) { return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", ref.RealPath, err) } // don't consider directories - if metadata.Metadata.IsDir { + if metadata.Metadata.IsDir() { continue } } @@ -69,7 +70,7 @@ func (r *imageSquashResolver) FilesByPath(paths ...string) ([]Location, error) { if resolvedRef.HasReference() && !uniqueFileIDs.Contains(*resolvedRef.Reference) { uniqueFileIDs.Add(*resolvedRef.Reference) - uniqueLocations = append(uniqueLocations, NewLocationFromImage(path, *resolvedRef.Reference, r.img)) + uniqueLocations = append(uniqueLocations, file.NewLocationFromImage(path, *resolvedRef.Reference, r.img)) } } @@ -78,9 +79,9 @@ func (r *imageSquashResolver) FilesByPath(paths ...string) ([]Location, error) { // FilesByGlob returns all file.References that match the given path glob pattern within the squashed representation of the image. // nolint:gocognit -func (r *imageSquashResolver) FilesByGlob(patterns ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +func (r *ContainerImageSquash) FilesByGlob(patterns ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, pattern := range patterns { results, err := r.img.SquashedSearchContext.SearchByGlob(pattern, filetree.FollowBasenameLinks) @@ -103,7 +104,7 @@ func (r *imageSquashResolver) FilesByGlob(patterns ...string) ([]Location, error return nil, fmt.Errorf("unable to get file metadata for path=%q: %w", result.RequestPath, err) } // don't consider directories - if metadata.Metadata.IsDir { + if metadata.Metadata.IsDir() { continue } } @@ -113,10 +114,10 @@ func (r *imageSquashResolver) FilesByGlob(patterns ...string) ([]Location, error return nil, fmt.Errorf("failed to find files by path (result=%+v): %w", result, err) } for _, resolvedLocation := range resolvedLocations { - if uniqueFileIDs.Contains(resolvedLocation.ref) { + if uniqueFileIDs.Contains(resolvedLocation.Reference()) { continue } - uniqueFileIDs.Add(resolvedLocation.ref) + uniqueFileIDs.Add(resolvedLocation.Reference()) uniqueLocations = append(uniqueLocations, resolvedLocation) } } @@ -127,8 +128,8 @@ func (r *imageSquashResolver) FilesByGlob(patterns ...string) ([]Location, error // RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference. // This is helpful when attempting to find a file that is in the same layer or lower as another file. For the -// imageSquashResolver, this is a simple path lookup. -func (r *imageSquashResolver) RelativeFileByPath(_ Location, path string) *Location { +// ContainerImageSquash, this is a simple path lookup. +func (r *ContainerImageSquash) RelativeFileByPath(_ file.Location, path string) *file.Location { paths, err := r.FilesByPath(path) if err != nil { return nil @@ -142,14 +143,14 @@ func (r *imageSquashResolver) RelativeFileByPath(_ Location, path string) *Locat // FileContentsByLocation fetches file contents for a single file reference, regardless of the source layer. // If the path does not exist an error is returned. -func (r *imageSquashResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { - entry, err := r.img.FileCatalog.Get(location.ref) +func (r *ContainerImageSquash) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { + entry, err := r.img.FileCatalog.Get(location.Reference()) if err != nil { return nil, fmt.Errorf("unable to get metadata for path=%q from file catalog: %w", location.RealPath, err) } switch entry.Metadata.Type { - case file.TypeSymLink, file.TypeHardLink: + case stereoscopeFile.TypeSymLink, stereoscopeFile.TypeHardLink: // the location we are searching may be a symlink, we should always work with the resolved file locations, err := r.FilesByPath(location.RealPath) if err != nil { @@ -164,39 +165,39 @@ func (r *imageSquashResolver) FileContentsByLocation(location Location) (io.Read default: return nil, fmt.Errorf("link resolution resulted in multiple results while resolving content location: %+v", location) } - case file.TypeDirectory: + case stereoscopeFile.TypeDirectory: return nil, fmt.Errorf("unable to get file contents for directory: %+v", location) } - return r.img.FileContentsByRef(location.ref) + return r.img.OpenReference(location.Reference()) } -func (r *imageSquashResolver) AllLocations() <-chan Location { - results := make(chan Location) +func (r *ContainerImageSquash) AllLocations() <-chan file.Location { + results := make(chan file.Location) go func() { defer close(results) - for _, ref := range r.img.SquashedTree().AllFiles(file.AllTypes()...) { - results <- NewLocationFromImage(string(ref.RealPath), ref, r.img) + for _, ref := range r.img.SquashedTree().AllFiles(stereoscopeFile.AllTypes()...) { + results <- file.NewLocationFromImage(string(ref.RealPath), ref, r.img) } }() return results } -func (r *imageSquashResolver) FilesByMIMEType(types ...string) ([]Location, error) { +func (r *ContainerImageSquash) FilesByMIMEType(types ...string) ([]file.Location, error) { refs, err := r.img.SquashedSearchContext.SearchByMIMEType(types...) if err != nil { return nil, err } - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, ref := range refs { if ref.HasReference() { if uniqueFileIDs.Contains(*ref.Reference) { continue } - location := NewLocationFromImage(string(ref.RequestPath), *ref.Reference, r.img) + location := file.NewLocationFromImage(string(ref.RequestPath), *ref.Reference, r.img) uniqueFileIDs.Add(*ref.Reference) uniqueLocations = append(uniqueLocations, location) @@ -206,6 +207,6 @@ func (r *imageSquashResolver) FilesByMIMEType(types ...string) ([]Location, erro return uniqueLocations, nil } -func (r *imageSquashResolver) FileMetadataByLocation(location Location) (FileMetadata, error) { +func (r *ContainerImageSquash) FileMetadataByLocation(location file.Location) (file.Metadata, error) { return fileMetadataByLocation(r.img, location) } diff --git a/syft/source/image_squash_resolver_test.go b/syft/internal/fileresolver/container_image_squash_test.go similarity index 71% rename from syft/source/image_squash_resolver_test.go rename to syft/internal/fileresolver/container_image_squash_test.go index 106de9a5e22..d65d0bccc88 100644 --- a/syft/source/image_squash_resolver_test.go +++ b/syft/internal/fileresolver/container_image_squash_test.go @@ -1,4 +1,4 @@ -package source +package fileresolver import ( "io" @@ -6,13 +6,12 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" "github.com/scylladb/go-set/strset" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/imagetest" + "github.com/anchore/syft/syft/file" ) func TestImageSquashResolver_FilesByPath(t *testing.T) { @@ -73,7 +72,7 @@ func TestImageSquashResolver_FilesByPath(t *testing.T) { t.Run(c.name, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) if err != nil { t.Fatalf("could not create resolver: %+v", err) } @@ -110,15 +109,15 @@ func TestImageSquashResolver_FilesByPath(t *testing.T) { actual := refs[0] - if string(actual.ref.RealPath) != c.resolvePath { - t.Errorf("bad resolve path: '%s'!='%s'", string(actual.ref.RealPath), c.resolvePath) + if string(actual.Reference().RealPath) != c.resolvePath { + t.Errorf("bad resolve path: '%s'!='%s'", string(actual.Reference().RealPath), c.resolvePath) } - if c.resolvePath != "" && string(actual.ref.RealPath) != actual.RealPath { + if c.resolvePath != "" && string(actual.Reference().RealPath) != actual.RealPath { t.Errorf("we should always prefer real paths over ones with links") } - layer := img.FileCatalog.Layer(actual.ref) + layer := img.FileCatalog.Layer(actual.Reference()) if layer.Metadata.Index != c.resolveLayer { t.Errorf("bad resolve layer: '%d'!='%d'", layer.Metadata.Index, c.resolveLayer) @@ -186,7 +185,7 @@ func TestImageSquashResolver_FilesByGlob(t *testing.T) { t.Run(c.name, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) if err != nil { t.Fatalf("could not create resolver: %+v", err) } @@ -212,15 +211,15 @@ func TestImageSquashResolver_FilesByGlob(t *testing.T) { actual := refs[0] - if string(actual.ref.RealPath) != c.resolvePath { - t.Errorf("bad resolve path: '%s'!='%s'", string(actual.ref.RealPath), c.resolvePath) + if string(actual.Reference().RealPath) != c.resolvePath { + t.Errorf("bad resolve path: '%s'!='%s'", string(actual.Reference().RealPath), c.resolvePath) } - if c.resolvePath != "" && string(actual.ref.RealPath) != actual.RealPath { + if c.resolvePath != "" && string(actual.Reference().RealPath) != actual.RealPath { t.Errorf("we should always prefer real paths over ones with links") } - layer := img.FileCatalog.Layer(actual.ref) + layer := img.FileCatalog.Layer(actual.Reference()) if layer.Metadata.Index != c.resolveLayer { t.Errorf("bad resolve layer: '%d'!='%d'", layer.Metadata.Index, c.resolveLayer) @@ -247,7 +246,7 @@ func Test_imageSquashResolver_FilesByMIMEType(t *testing.T) { t.Run(test.fixtureName, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", test.fixtureName) - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) assert.NoError(t, err) locations, err := resolver.FilesByMIMEType(test.mimeType) @@ -264,7 +263,7 @@ func Test_imageSquashResolver_FilesByMIMEType(t *testing.T) { func Test_imageSquashResolver_hasFilesystemIDInLocation(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-duplicate-path") - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) assert.NoError(t, err) locations, err := resolver.FilesByMIMEType("text/plain") @@ -322,7 +321,7 @@ func TestSquashImageResolver_FilesContents(t *testing.T) { t.Run(test.name, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) assert.NoError(t, err) refs, err := resolver.FilesByPath(test.path) @@ -347,14 +346,14 @@ func TestSquashImageResolver_FilesContents_errorOnDirRequest(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) assert.NoError(t, err) - var dirLoc *Location + var dirLoc *file.Location for loc := range resolver.AllLocations() { - entry, err := resolver.img.FileCatalog.Get(loc.ref) + entry, err := resolver.img.FileCatalog.Get(loc.Reference()) require.NoError(t, err) - if entry.Metadata.IsDir { + if entry.Metadata.IsDir() { dirLoc = &loc break } @@ -370,162 +369,130 @@ func TestSquashImageResolver_FilesContents_errorOnDirRequest(t *testing.T) { func Test_imageSquashResolver_resolvesLinks(t *testing.T) { tests := []struct { name string - runner func(FileResolver) []Location - expected []Location + runner func(file.Resolver) []file.Location + expected []file.Location }{ { name: "by mimetype", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links should not show up when searching mimetype actualLocations, err := resolver.FilesByMIMEType("text/plain") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/etc/group", "/etc/group"), - NewVirtualLocation("/etc/passwd", "/etc/passwd"), - NewVirtualLocation("/etc/shadow", "/etc/shadow"), - NewVirtualLocation("/file-1.txt", "/file-1.txt"), - NewVirtualLocation("/file-3.txt", "/file-3.txt"), - NewVirtualLocation("/file-2.txt", "/file-2.txt"), - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), + expected: []file.Location{ + file.NewVirtualLocation("/etc/group", "/etc/group"), + file.NewVirtualLocation("/etc/passwd", "/etc/passwd"), + file.NewVirtualLocation("/etc/shadow", "/etc/shadow"), + file.NewVirtualLocation("/file-1.txt", "/file-1.txt"), + file.NewVirtualLocation("/file-3.txt", "/file-3.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), }, }, { name: "by glob to links", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("*ink-*") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/file-1.txt", "/link-1"), - NewVirtualLocation("/file-2.txt", "/link-2"), + expected: []file.Location{ + file.NewVirtualLocation("/file-1.txt", "/link-1"), + file.NewVirtualLocation("/file-2.txt", "/link-2"), // though this is a link, and it matches to the file, the resolver de-duplicates files // by the real path, so it is not included in the results - //NewVirtualLocation("/file-2.txt", "/link-indirect"), + //file.NewVirtualLocation("/file-2.txt", "/link-indirect"), - NewVirtualLocation("/file-3.txt", "/link-within"), + file.NewVirtualLocation("/file-3.txt", "/link-within"), }, }, { name: "by basename", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/file-2.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // this has two copies in the base image, which overwrites the same location - NewVirtualLocation("/file-2.txt", "/file-2.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), }, }, { name: "by basename glob", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/file-?.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/file-1.txt", "/file-1.txt"), - NewVirtualLocation("/file-2.txt", "/file-2.txt"), - NewVirtualLocation("/file-3.txt", "/file-3.txt"), - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), + expected: []file.Location{ + file.NewVirtualLocation("/file-1.txt", "/file-1.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), + file.NewVirtualLocation("/file-3.txt", "/file-3.txt"), + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), }, }, { name: "by basename glob to links", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { actualLocations, err := resolver.FilesByGlob("**/link-*") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - - { - LocationData: LocationData{ - Coordinates: Coordinates{ - RealPath: "/file-1.txt", - }, - VirtualPath: "/link-1", - ref: file.Reference{RealPath: "/file-1.txt"}, - }, - }, - { - LocationData: LocationData{ - - Coordinates: Coordinates{ - RealPath: "/file-2.txt", - }, - VirtualPath: "/link-2", - ref: file.Reference{RealPath: "/file-2.txt"}, - }, - }, + expected: []file.Location{ + file.NewVirtualLocation("/file-1.txt", "/link-1"), + file.NewVirtualLocation("/file-2.txt", "/link-2"), + // we already have this real file path via another link, so only one is returned - //{ - // LocationData: LocationData{ - // Coordinates: Coordinates{ - // RealPath: "/file-2.txt", - // }, - // VirtualPath: "/link-indirect", - // ref: file.Reference{RealPath: "/file-2.txt"}, - // }, - //}, - { - LocationData: LocationData{ - Coordinates: Coordinates{ - RealPath: "/file-3.txt", - }, - VirtualPath: "/link-within", - ref: file.Reference{RealPath: "/file-3.txt"}, - }, - }, + // file.NewVirtualLocation("/file-2.txt", "/link-indirect"), + + file.NewVirtualLocation("/file-3.txt", "/link-within"), }, }, { name: "by extension", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/*.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/file-1.txt", "/file-1.txt"), - NewVirtualLocation("/file-2.txt", "/file-2.txt"), - NewVirtualLocation("/file-3.txt", "/file-3.txt"), - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), + expected: []file.Location{ + file.NewVirtualLocation("/file-1.txt", "/file-1.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), + file.NewVirtualLocation("/file-3.txt", "/file-3.txt"), + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), }, }, { name: "by path to degree 1 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links resolve to the final file actualLocations, err := resolver.FilesByPath("/link-2") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers - NewVirtualLocation("/file-2.txt", "/link-2"), + file.NewVirtualLocation("/file-2.txt", "/link-2"), }, }, { name: "by path to degree 2 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // multiple links resolves to the final file actualLocations, err := resolver.FilesByPath("/link-indirect") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers - NewVirtualLocation("/file-2.txt", "/link-indirect"), + file.NewVirtualLocation("/file-2.txt", "/link-indirect"), }, }, } @@ -535,7 +502,7 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) assert.NoError(t, err) actual := test.runner(resolver) @@ -546,30 +513,10 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) { } -func compareLocations(t *testing.T, expected, actual []Location) { - t.Helper() - ignoreUnexported := cmpopts.IgnoreFields(LocationData{}, "ref") - ignoreMetadata := cmpopts.IgnoreFields(LocationMetadata{}, "Annotations") - ignoreFS := cmpopts.IgnoreFields(Coordinates{}, "FileSystemID") - - sort.Sort(Locations(expected)) - sort.Sort(Locations(actual)) - - if d := cmp.Diff(expected, actual, - ignoreUnexported, - ignoreFS, - ignoreMetadata, - ); d != "" { - - t.Errorf("unexpected locations (-want +got):\n%s", d) - } - -} - func TestSquashResolver_AllLocations(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-files-deleted") - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) assert.NoError(t, err) paths := strset.New() diff --git a/syft/internal/fileresolver/deferred.go b/syft/internal/fileresolver/deferred.go new file mode 100644 index 00000000000..55dbbb1628b --- /dev/null +++ b/syft/internal/fileresolver/deferred.go @@ -0,0 +1,98 @@ +package fileresolver + +import ( + "io" + + "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" +) + +var _ file.Resolver = (*Deferred)(nil) + +func NewDeferred(creator func() (file.Resolver, error)) *Deferred { + return &Deferred{ + creator: creator, + } +} + +type Deferred struct { + creator func() (file.Resolver, error) + resolver file.Resolver +} + +func (d *Deferred) getResolver() (file.Resolver, error) { + if d.resolver == nil { + resolver, err := d.creator() + if err != nil { + return nil, err + } + d.resolver = resolver + } + return d.resolver, nil +} + +func (d *Deferred) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { + r, err := d.getResolver() + if err != nil { + return nil, err + } + return r.FileContentsByLocation(location) +} + +func (d *Deferred) HasPath(s string) bool { + r, err := d.getResolver() + if err != nil { + log.Debug("unable to get resolver: %v", err) + return false + } + return r.HasPath(s) +} + +func (d *Deferred) FilesByPath(paths ...string) ([]file.Location, error) { + r, err := d.getResolver() + if err != nil { + return nil, err + } + return r.FilesByPath(paths...) +} + +func (d *Deferred) FilesByGlob(patterns ...string) ([]file.Location, error) { + r, err := d.getResolver() + if err != nil { + return nil, err + } + return r.FilesByGlob(patterns...) +} + +func (d *Deferred) FilesByMIMEType(types ...string) ([]file.Location, error) { + r, err := d.getResolver() + if err != nil { + return nil, err + } + return r.FilesByMIMEType(types...) +} + +func (d *Deferred) RelativeFileByPath(location file.Location, path string) *file.Location { + r, err := d.getResolver() + if err != nil { + return nil + } + return r.RelativeFileByPath(location, path) +} + +func (d *Deferred) AllLocations() <-chan file.Location { + r, err := d.getResolver() + if err != nil { + log.Debug("unable to get resolver: %v", err) + return nil + } + return r.AllLocations() +} + +func (d *Deferred) FileMetadataByLocation(location file.Location) (file.Metadata, error) { + r, err := d.getResolver() + if err != nil { + return file.Metadata{}, err + } + return r.FileMetadataByLocation(location) +} diff --git a/syft/source/deferred_resolver_test.go b/syft/internal/fileresolver/deferred_test.go similarity index 68% rename from syft/source/deferred_resolver_test.go rename to syft/internal/fileresolver/deferred_test.go index c7cd166c305..61f592387be 100644 --- a/syft/source/deferred_resolver_test.go +++ b/syft/internal/fileresolver/deferred_test.go @@ -1,17 +1,19 @@ -package source +package fileresolver import ( "testing" "github.com/stretchr/testify/require" + + "github.com/anchore/syft/syft/file" ) func Test_NewDeferredResolver(t *testing.T) { creatorCalled := false - deferredResolver := NewDeferredResolver(func() (FileResolver, error) { + deferredResolver := NewDeferred(func() (file.Resolver, error) { creatorCalled = true - return NewMockResolverForPaths(), nil + return file.NewMockResolverForPaths(), nil }) require.False(t, creatorCalled) diff --git a/syft/source/directory_resolver.go b/syft/internal/fileresolver/directory.go similarity index 73% rename from syft/source/directory_resolver.go rename to syft/internal/fileresolver/directory.go index b68ce890344..2d634cf1eed 100644 --- a/syft/source/directory_resolver.go +++ b/syft/internal/fileresolver/directory.go @@ -1,4 +1,4 @@ -package source +package fileresolver import ( "errors" @@ -10,9 +10,10 @@ import ( "runtime" "strings" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/filetree" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" ) const WindowsOS = "windows" @@ -23,12 +24,12 @@ var unixSystemRuntimePrefixes = []string{ "/sys", } -var errSkipPath = errors.New("skip path") +var ErrSkipPath = errors.New("skip path") -var _ FileResolver = (*directoryResolver)(nil) +var _ file.Resolver = (*Directory)(nil) -// directoryResolver implements path and content access for the directory data source. -type directoryResolver struct { +// Directory implements path and content access for the directory data source. +type Directory struct { path string base string currentWdRelativeToRoot string @@ -39,8 +40,8 @@ type directoryResolver struct { indexer *directoryIndexer } -func newDirectoryResolver(root string, base string, pathFilters ...pathIndexVisitor) (*directoryResolver, error) { - r, err := newDirectoryResolverWithoutIndex(root, base, pathFilters...) +func NewFromDirectory(root string, base string, pathFilters ...PathIndexVisitor) (*Directory, error) { + r, err := newFromDirectoryWithoutIndex(root, base, pathFilters...) if err != nil { return nil, err } @@ -48,17 +49,11 @@ func newDirectoryResolver(root string, base string, pathFilters ...pathIndexVisi return r, r.buildIndex() } -func newDirectoryResolverWithoutIndex(root string, base string, pathFilters ...pathIndexVisitor) (*directoryResolver, error) { +func newFromDirectoryWithoutIndex(root string, base string, pathFilters ...PathIndexVisitor) (*Directory, error) { currentWD, err := os.Getwd() if err != nil { return nil, fmt.Errorf("could not get CWD: %w", err) } - // we have to account for the root being accessed through a symlink path and always resolve the real path. Otherwise - // we will not be able to normalize given paths that fall under the resolver - cleanCWD, err := filepath.EvalSymlinks(currentWD) - if err != nil { - return nil, fmt.Errorf("could not evaluate CWD symlinks: %w", err) - } cleanRoot, err := filepath.EvalSymlinks(root) if err != nil { @@ -79,7 +74,7 @@ func newDirectoryResolverWithoutIndex(root string, base string, pathFilters ...p var currentWdRelRoot string if path.IsAbs(cleanRoot) { - currentWdRelRoot, err = filepath.Rel(cleanCWD, cleanRoot) + currentWdRelRoot, err = filepath.Rel(currentWD, cleanRoot) if err != nil { return nil, fmt.Errorf("could not determine given root path to CWD: %w", err) } @@ -87,10 +82,10 @@ func newDirectoryResolverWithoutIndex(root string, base string, pathFilters ...p currentWdRelRoot = filepath.Clean(cleanRoot) } - return &directoryResolver{ + return &Directory{ path: cleanRoot, base: cleanBase, - currentWd: cleanCWD, + currentWd: currentWD, currentWdRelativeToRoot: currentWdRelRoot, tree: filetree.New(), index: filetree.NewIndex(), @@ -98,7 +93,7 @@ func newDirectoryResolverWithoutIndex(root string, base string, pathFilters ...p }, nil } -func (r *directoryResolver) buildIndex() error { +func (r *Directory) buildIndex() error { if r.indexer == nil { return fmt.Errorf("no directory indexer configured") } @@ -114,7 +109,7 @@ func (r *directoryResolver) buildIndex() error { return nil } -func (r directoryResolver) requestPath(userPath string) (string, error) { +func (r Directory) requestPath(userPath string) (string, error) { if filepath.IsAbs(userPath) { // don't allow input to potentially hop above root path userPath = path.Join(r.path, userPath) @@ -131,7 +126,8 @@ func (r directoryResolver) requestPath(userPath string) (string, error) { return userPath, nil } -func (r directoryResolver) responsePath(path string) string { +// responsePath takes a path from the underlying fs domain and converts it to a path that is relative to the root of the directory resolver. +func (r Directory) responsePath(path string) string { // check to see if we need to encode back to Windows from posix if runtime.GOOS == WindowsOS { path = posixToWindows(path) @@ -154,22 +150,22 @@ func (r directoryResolver) responsePath(path string) string { } // HasPath indicates if the given path exists in the underlying source. -func (r *directoryResolver) HasPath(userPath string) bool { +func (r *Directory) HasPath(userPath string) bool { requestPath, err := r.requestPath(userPath) if err != nil { return false } - return r.tree.HasPath(file.Path(requestPath)) + return r.tree.HasPath(stereoscopeFile.Path(requestPath)) } // Stringer to represent a directory path data source -func (r directoryResolver) String() string { +func (r Directory) String() string { return fmt.Sprintf("dir:%s", r.path) } // FilesByPath returns all file.References that match the given paths from the directory. -func (r directoryResolver) FilesByPath(userPaths ...string) ([]Location, error) { - var references = make([]Location, 0) +func (r Directory) FilesByPath(userPaths ...string) ([]file.Location, error) { + var references = make([]file.Location, 0) for _, userPath := range userPaths { userStrPath, err := r.requestPath(userPath) @@ -196,7 +192,7 @@ func (r directoryResolver) FilesByPath(userPaths ...string) ([]Location, error) } // don't consider directories - if entry.Metadata.IsDir { + if entry.Metadata.IsDir() { continue } @@ -206,7 +202,7 @@ func (r directoryResolver) FilesByPath(userPaths ...string) ([]Location, error) if ref.HasReference() { references = append(references, - NewVirtualLocationFromDirectory( + file.NewVirtualLocationFromDirectory( r.responsePath(string(ref.RealPath)), // the actual path relative to the resolver root r.responsePath(userStrPath), // the path used to access this file, relative to the resolver root *ref.Reference, @@ -219,9 +215,9 @@ func (r directoryResolver) FilesByPath(userPaths ...string) ([]Location, error) } // FilesByGlob returns all file.References that match the given path glob pattern from any layer in the image. -func (r directoryResolver) FilesByGlob(patterns ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +func (r Directory) FilesByGlob(patterns ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, pattern := range patterns { refVias, err := r.searchContext.SearchByGlob(pattern, filetree.FollowBasenameLinks) @@ -238,11 +234,11 @@ func (r directoryResolver) FilesByGlob(patterns ...string) ([]Location, error) { } // don't consider directories - if entry.Metadata.IsDir { + if entry.Metadata.IsDir() { continue } - loc := NewVirtualLocationFromDirectory( + loc := file.NewVirtualLocationFromDirectory( r.responsePath(string(refVia.Reference.RealPath)), // the actual path relative to the resolver root r.responsePath(string(refVia.RequestPath)), // the path used to access this file, relative to the resolver root *refVia.Reference, @@ -257,8 +253,8 @@ func (r directoryResolver) FilesByGlob(patterns ...string) ([]Location, error) { // RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference. // This is helpful when attempting to find a file that is in the same layer or lower as another file. For the -// directoryResolver, this is a simple path lookup. -func (r *directoryResolver) RelativeFileByPath(_ Location, path string) *Location { +// Directory, this is a simple path lookup. +func (r *Directory) RelativeFileByPath(_ file.Location, path string) *file.Location { paths, err := r.FilesByPath(path) if err != nil { return nil @@ -272,54 +268,54 @@ func (r *directoryResolver) RelativeFileByPath(_ Location, path string) *Locatio // FileContentsByLocation fetches file contents for a single file reference relative to a directory. // If the path does not exist an error is returned. -func (r directoryResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { - if location.ref.RealPath == "" { +func (r Directory) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { + if location.RealPath == "" { return nil, errors.New("empty path given") } - entry, err := r.index.Get(location.ref) + entry, err := r.index.Get(location.Reference()) if err != nil { return nil, err } // don't consider directories - if entry.Type == file.TypeDirectory { - return nil, fmt.Errorf("cannot read contents of non-file %q", location.ref.RealPath) + if entry.Type == stereoscopeFile.TypeDirectory { + return nil, fmt.Errorf("cannot read contents of non-file %q", location.Reference().RealPath) } // RealPath is posix so for windows directory resolver we need to translate // to its true on disk path. - filePath := string(location.ref.RealPath) + filePath := string(location.Reference().RealPath) if runtime.GOOS == WindowsOS { filePath = posixToWindows(filePath) } - return file.NewLazyReadCloser(filePath), nil + return stereoscopeFile.NewLazyReadCloser(filePath), nil } -func (r *directoryResolver) AllLocations() <-chan Location { - results := make(chan Location) +func (r *Directory) AllLocations() <-chan file.Location { + results := make(chan file.Location) go func() { defer close(results) - for _, ref := range r.tree.AllFiles(file.AllTypes()...) { - results <- NewLocationFromDirectory(r.responsePath(string(ref.RealPath)), ref) + for _, ref := range r.tree.AllFiles(stereoscopeFile.AllTypes()...) { + results <- file.NewLocationFromDirectory(r.responsePath(string(ref.RealPath)), ref) } }() return results } -func (r *directoryResolver) FileMetadataByLocation(location Location) (FileMetadata, error) { - entry, err := r.index.Get(location.ref) +func (r *Directory) FileMetadataByLocation(location file.Location) (file.Metadata, error) { + entry, err := r.index.Get(location.Reference()) if err != nil { - return FileMetadata{}, fmt.Errorf("location: %+v : %w", location, os.ErrNotExist) + return file.Metadata{}, fmt.Errorf("location: %+v : %w", location, os.ErrNotExist) } return entry.Metadata, nil } -func (r *directoryResolver) FilesByMIMEType(types ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +func (r *Directory) FilesByMIMEType(types ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) refVias, err := r.searchContext.SearchByMIMEType(types...) if err != nil { @@ -332,7 +328,7 @@ func (r *directoryResolver) FilesByMIMEType(types ...string) ([]Location, error) if uniqueFileIDs.Contains(*refVia.Reference) { continue } - location := NewLocationFromDirectory( + location := file.NewLocationFromDirectory( r.responsePath(string(refVia.Reference.RealPath)), *refVia.Reference, ) diff --git a/syft/source/directory_indexer.go b/syft/internal/fileresolver/directory_indexer.go similarity index 77% rename from syft/source/directory_indexer.go rename to syft/internal/fileresolver/directory_indexer.go index e840a9ddf69..b4383d75d02 100644 --- a/syft/source/directory_indexer.go +++ b/syft/internal/fileresolver/directory_indexer.go @@ -1,4 +1,4 @@ -package source +package fileresolver import ( "errors" @@ -8,6 +8,7 @@ import ( "path" "path/filepath" "runtime" + "strings" "github.com/wagoodman/go-partybus" "github.com/wagoodman/go-progress" @@ -20,30 +21,30 @@ import ( "github.com/anchore/syft/syft/event" ) -type pathIndexVisitor func(string, os.FileInfo, error) error +type PathIndexVisitor func(string, os.FileInfo, error) error type directoryIndexer struct { path string base string - pathIndexVisitors []pathIndexVisitor + pathIndexVisitors []PathIndexVisitor errPaths map[string]error tree filetree.ReadWriter index filetree.Index } -func newDirectoryIndexer(path, base string, visitors ...pathIndexVisitor) *directoryIndexer { +func newDirectoryIndexer(path, base string, visitors ...PathIndexVisitor) *directoryIndexer { i := &directoryIndexer{ path: path, base: base, tree: filetree.New(), index: filetree.NewIndex(), - pathIndexVisitors: append([]pathIndexVisitor{requireFileInfo, disallowByFileType, disallowUnixSystemRuntimePath}, visitors...), + pathIndexVisitors: append([]PathIndexVisitor{requireFileInfo, disallowByFileType, disallowUnixSystemRuntimePath}, visitors...), errPaths: make(map[string]error), } // these additional stateful visitors should be the first thing considered when walking / indexing i.pathIndexVisitors = append( - []pathIndexVisitor{ + []PathIndexVisitor{ i.disallowRevisitingVisitor, i.disallowFileAccessErr, }, @@ -119,6 +120,22 @@ func (r *directoryIndexer) indexTree(root string, stager *progress.Stage) ([]str return roots, nil } + shouldIndexFullTree, err := isRealPath(root) + if err != nil { + return nil, err + } + + if !shouldIndexFullTree { + newRoots, err := r.indexBranch(root, stager) + if err != nil { + return nil, fmt.Errorf("unable to index branch=%q: %w", root, err) + } + + roots = append(roots, newRoots...) + + return roots, nil + } + err = filepath.Walk(root, func(path string, info os.FileInfo, err error) error { stager.Current = path @@ -143,6 +160,85 @@ func (r *directoryIndexer) indexTree(root string, stager *progress.Stage) ([]str return roots, nil } +func isRealPath(root string) (bool, error) { + rootParent := filepath.Clean(filepath.Dir(root)) + + realRootParent, err := filepath.EvalSymlinks(rootParent) + if err != nil { + return false, err + } + + realRootParent = filepath.Clean(realRootParent) + + return rootParent == realRootParent, nil +} + +func (r *directoryIndexer) indexBranch(root string, stager *progress.Stage) ([]string, error) { + rootRealPath, err := filepath.EvalSymlinks(root) + if err != nil { + return nil, err + } + + // there is a symlink within the path to the root, we need to index the real root parent first + // then capture the symlinks to the root path + roots, err := r.indexTree(rootRealPath, stager) + if err != nil { + return nil, fmt.Errorf("unable to index real root=%q: %w", rootRealPath, err) + } + + // walk down all ancestor paths and shallow-add non-existing elements to the tree + for idx, p := range allContainedPaths(root) { + var targetPath string + if idx != 0 { + parent := path.Dir(p) + cleanParent, err := filepath.EvalSymlinks(parent) + if err != nil { + return nil, fmt.Errorf("unable to evaluate symlink for contained path parent=%q: %w", parent, err) + } + targetPath = filepath.Join(cleanParent, filepath.Base(p)) + } else { + targetPath = p + } + + stager.Current = targetPath + + lstat, err := os.Lstat(targetPath) + newRoot, err := r.indexPath(targetPath, lstat, err) + if err != nil && !errors.Is(err, ErrSkipPath) && !errors.Is(err, fs.SkipDir) { + return nil, fmt.Errorf("unable to index ancestor path=%q: %w", targetPath, err) + } + if newRoot != "" { + roots = append(roots, newRoot) + } + } + + return roots, nil +} + +func allContainedPaths(p string) []string { + var all []string + var currentPath string + + cleanPath := strings.TrimSpace(p) + + if cleanPath == "" { + return nil + } + + // iterate through all parts of the path, replacing path elements with link resolutions where possible. + for idx, part := range strings.Split(filepath.Clean(cleanPath), file.DirSeparator) { + if idx == 0 && part == "" { + currentPath = file.DirSeparator + continue + } + + // cumulatively gather where we are currently at and provide a rich object + currentPath = path.Join(currentPath, part) + all = append(all, currentPath) + } + return all +} + func (r *directoryIndexer) indexPath(path string, info os.FileInfo, err error) (string, error) { // ignore any path which a filter function returns true for _, filterFn := range r.pathIndexVisitors { @@ -181,7 +277,7 @@ func (r *directoryIndexer) indexPath(path string, info os.FileInfo, err error) ( func (r *directoryIndexer) disallowFileAccessErr(path string, _ os.FileInfo, err error) error { if r.isFileAccessErr(path, err) { - return errSkipPath + return ErrSkipPath } return nil } @@ -307,11 +403,11 @@ func (r *directoryIndexer) disallowRevisitingVisitor(path string, _ os.FileInfo, // - link destinations twice, once for the real file and another through the virtual path // - infinite link cycles if indexed, metadata := r.hasBeenIndexed(path); indexed { - if metadata.IsDir { + if metadata.IsDir() { // signal to walk() that we should skip this directory entirely return fs.SkipDir } - return errSkipPath + return ErrSkipPath } return nil } @@ -330,7 +426,7 @@ func disallowByFileType(_ string, info os.FileInfo, _ error) error { } switch file.TypeFromMode(info.Mode()) { case file.TypeCharacterDevice, file.TypeSocket, file.TypeBlockDevice, file.TypeFIFO, file.TypeIrregular: - return errSkipPath + return ErrSkipPath // note: symlinks that point to these files may still get by. // We handle this later in processing to help prevent against infinite links traversal. } @@ -340,7 +436,7 @@ func disallowByFileType(_ string, info os.FileInfo, _ error) error { func requireFileInfo(_ string, info os.FileInfo, _ error) error { if info == nil { - return errSkipPath + return ErrSkipPath } return nil } diff --git a/syft/source/directory_indexer_test.go b/syft/internal/fileresolver/directory_indexer_test.go similarity index 85% rename from syft/source/directory_indexer_test.go rename to syft/internal/fileresolver/directory_indexer_test.go index b6403559d16..3e5c128cde3 100644 --- a/syft/source/directory_indexer_test.go +++ b/syft/internal/fileresolver/directory_indexer_test.go @@ -1,4 +1,4 @@ -package source +package fileresolver import ( "io/fs" @@ -172,7 +172,7 @@ func TestDirectoryIndexer_indexPath_skipsNilFileInfo(t *testing.T) { } func TestDirectoryIndexer_index(t *testing.T) { - // note: this test is testing the effects from newDirectoryResolver, indexTree, and addPathToIndex + // note: this test is testing the effects from NewFromDirectory, indexTree, and addPathToIndex indexer := newDirectoryIndexer("test-fixtures/system_paths/target", "") tree, index, err := indexer.build() require.NoError(t, err) @@ -226,8 +226,8 @@ func TestDirectoryIndexer_SkipsAlreadyVisitedLinkDestinations(t *testing.T) { var observedPaths []string pathObserver := func(p string, _ os.FileInfo, _ error) error { fields := strings.Split(p, "test-fixtures/symlinks-prune-indexing") - if len(fields) != 2 { - t.Fatalf("unable to parse path: %s", p) + if len(fields) < 2 { + return nil } clean := strings.TrimLeft(fields[1], "/") if clean != "" { @@ -237,7 +237,7 @@ func TestDirectoryIndexer_SkipsAlreadyVisitedLinkDestinations(t *testing.T) { } resolver := newDirectoryIndexer("./test-fixtures/symlinks-prune-indexing", "") // we want to cut ahead of any possible filters to see what paths are considered for indexing (closest to walking) - resolver.pathIndexVisitors = append([]pathIndexVisitor{pathObserver}, resolver.pathIndexVisitors...) + resolver.pathIndexVisitors = append([]PathIndexVisitor{pathObserver}, resolver.pathIndexVisitors...) // note: this test is NOT about the effects left on the tree or the index, but rather the WHICH paths that are // considered for indexing and HOW traversal prunes paths that have already been visited @@ -261,9 +261,11 @@ func TestDirectoryIndexer_SkipsAlreadyVisitedLinkDestinations(t *testing.T) { "path/5/6/7/8/dont-index-me-twice-either.txt", "path/file.txt", // everything below is after the original tree is indexed, and we are now indexing additional roots from symlinks - "path", // considered from symlink before-path, but pruned - "before-path/file.txt", // considered from symlink c-file.txt, but pruned - "before-path", // considered from symlink c-path, but pruned + "path", // considered from symlink before-path, but pruned + "path/file.txt", // leaf + "before-path", // considered from symlink c-path, but pruned + "path/file.txt", // leaf + "before-path", // considered from symlink c-path, but pruned } assert.Equal(t, expected, observedPaths, "visited paths differ \n %s", cmp.Diff(expected, observedPaths)) @@ -282,7 +284,7 @@ func TestDirectoryIndexer_IndexesAllTypes(t *testing.T) { for _, ref := range allRefs { fields := strings.Split(string(ref.RealPath), "test-fixtures/symlinks-prune-indexing") if len(fields) != 2 { - t.Fatalf("unable to parse path: %s", ref.RealPath) + continue } clean := strings.TrimLeft(fields[1], "/") if clean == "" { @@ -326,3 +328,58 @@ func TestDirectoryIndexer_IndexesAllTypes(t *testing.T) { } } + +func Test_allContainedPaths(t *testing.T) { + + tests := []struct { + name string + path string + want []string + }{ + { + name: "empty", + path: "", + want: nil, + }, + { + name: "single relative", + path: "a", + want: []string{"a"}, + }, + { + name: "single absolute", + path: "/a", + want: []string{"/a"}, + }, + { + name: "multiple relative", + path: "a/b/c", + want: []string{"a", "a/b", "a/b/c"}, + }, + { + name: "multiple absolute", + path: "/a/b/c", + want: []string{"/a", "/a/b", "/a/b/c"}, + }, + { + name: "multiple absolute with extra slashs", + path: "///a/b//c/", + want: []string{"/a", "/a/b", "/a/b/c"}, + }, + { + name: "relative with single dot", + path: "a/./b", + want: []string{"a", "a/b"}, + }, + { + name: "relative with double single dot", + path: "a/../b", + want: []string{"b"}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, allContainedPaths(tt.path)) + }) + } +} diff --git a/syft/internal/fileresolver/directory_test.go b/syft/internal/fileresolver/directory_test.go new file mode 100644 index 00000000000..09d135f9c2b --- /dev/null +++ b/syft/internal/fileresolver/directory_test.go @@ -0,0 +1,1448 @@ +//go:build !windows +// +build !windows + +package fileresolver + +import ( + "io" + "io/fs" + "os" + "path/filepath" + "sort" + "strings" + "testing" + "time" + + "github.com/google/go-cmp/cmp" + "github.com/scylladb/go-set/strset" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" + "github.com/anchore/syft/syft/file" +) + +func TestDirectoryResolver_FilesByPath_request_response(t *testing.T) { + // / + // somewhere/ + // outside.txt + // root-link -> ./ + // path/ + // to/ + // abs-inside.txt -> /path/to/the/file.txt # absolute link to somewhere inside of the root + // rel-inside.txt -> ./the/file.txt # relative link to somewhere inside of the root + // the/ + // file.txt + // abs-outside.txt -> /somewhere/outside.txt # absolute link to outside of the root + // rel-outside -> ../../../somewhere/outside.txt # relative link to outside of the root + // + + testDir, err := os.Getwd() + require.NoError(t, err) + relative := filepath.Join("test-fixtures", "req-resp") + absolute := filepath.Join(testDir, relative) + + absInsidePath := filepath.Join(absolute, "path", "to", "abs-inside.txt") + absOutsidePath := filepath.Join(absolute, "path", "to", "the", "abs-outside.txt") + + relativeViaLink := filepath.Join(relative, "root-link") + absoluteViaLink := filepath.Join(absolute, "root-link") + + relativeViaDoubleLink := filepath.Join(relative, "root-link", "root-link") + absoluteViaDoubleLink := filepath.Join(absolute, "root-link", "root-link") + + cleanup := func() { + _ = os.Remove(absInsidePath) + _ = os.Remove(absOutsidePath) + } + + // ensure the absolute symlinks are cleaned up from any previous runs + cleanup() + + require.NoError(t, os.Symlink(filepath.Join(absolute, "path", "to", "the", "file.txt"), absInsidePath)) + require.NoError(t, os.Symlink(filepath.Join(absolute, "somewhere", "outside.txt"), absOutsidePath)) + + t.Cleanup(cleanup) + + cases := []struct { + name string + cwd string + root string + base string + input string + expectedRealPath string + expectedVirtualPath string + }{ + { + name: "relative root, relative request, direct", + root: relative, + input: "path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "abs root, relative request, direct", + root: absolute, + input: "path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "relative root, abs request, direct", + root: relative, + input: "/path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "abs root, abs request, direct", + root: absolute, + input: "/path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + // cwd within root... + { + name: "relative root, relative request, direct, cwd within root", + cwd: filepath.Join(relative, "path/to"), + root: "../../", + input: "path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "abs root, relative request, direct, cwd within root", + cwd: filepath.Join(relative, "path/to"), + root: absolute, + input: "path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "relative root, abs request, direct, cwd within root", + cwd: filepath.Join(relative, "path/to"), + root: "../../", + input: "/path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "abs root, abs request, direct, cwd within root", + cwd: filepath.Join(relative, "path/to"), + + root: absolute, + input: "/path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + // cwd within symlink root... + { + name: "relative root, relative request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: "./", + input: "path/to/the/file.txt", + // note: why not expect "path/to/the/file.txt" here? + // this is because we don't know that the path used to access this path (which is a link within + // the root) resides within the root. Without this information it appears as if this file resides + // outside the root. + expectedRealPath: filepath.Join(absolute, "path/to/the/file.txt"), + //expectedRealPath: "path/to/the/file.txt", + expectedVirtualPath: "path/to/the/file.txt", + }, + { + name: "abs root, relative request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: absoluteViaLink, + input: "path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "relative root, abs request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: "./", + input: "/path/to/the/file.txt", + // note: why not expect "path/to/the/file.txt" here? + // this is because we don't know that the path used to access this path (which is a link within + // the root) resides within the root. Without this information it appears as if this file resides + // outside the root. + expectedRealPath: filepath.Join(absolute, "path/to/the/file.txt"), + //expectedRealPath: "path/to/the/file.txt", + expectedVirtualPath: "path/to/the/file.txt", + }, + { + name: "abs root, abs request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: absoluteViaLink, + input: "/path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + // cwd within symlink root, request nested within... + { + name: "relative root, relative nested request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: "./path", + input: "to/the/file.txt", + // note: why not expect "to/the/file.txt" here? + // this is because we don't know that the path used to access this path (which is a link within + // the root) resides within the root. Without this information it appears as if this file resides + // outside the root. + expectedRealPath: filepath.Join(absolute, "path/to/the/file.txt"), + //expectedRealPath: "to/the/file.txt", + expectedVirtualPath: "to/the/file.txt", + }, + { + name: "abs root, relative nested request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: filepath.Join(absoluteViaLink, "path"), + input: "to/the/file.txt", + expectedRealPath: "to/the/file.txt", + }, + { + name: "relative root, abs nested request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: "./path", + input: "/to/the/file.txt", + // note: why not expect "to/the/file.txt" here? + // this is because we don't know that the path used to access this path (which is a link within + // the root) resides within the root. Without this information it appears as if this file resides + // outside the root. + expectedRealPath: filepath.Join(absolute, "path/to/the/file.txt"), + //expectedRealPath: "to/the/file.txt", + expectedVirtualPath: "to/the/file.txt", + }, + { + name: "abs root, abs nested request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: filepath.Join(absoluteViaLink, "path"), + input: "/to/the/file.txt", + expectedRealPath: "to/the/file.txt", + }, + // cwd within DOUBLE symlink root... + { + name: "relative root, relative request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: "./", + input: "path/to/the/file.txt", + // note: why not expect "path/to/the/file.txt" here? + // this is because we don't know that the path used to access this path (which is a link within + // the root) resides within the root. Without this information it appears as if this file resides + // outside the root. + expectedRealPath: filepath.Join(absolute, "path/to/the/file.txt"), + //expectedRealPath: "path/to/the/file.txt", + expectedVirtualPath: "path/to/the/file.txt", + }, + { + name: "abs root, relative request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: absoluteViaDoubleLink, + input: "path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "relative root, abs request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: "./", + input: "/path/to/the/file.txt", + // note: why not expect "path/to/the/file.txt" here? + // this is because we don't know that the path used to access this path (which is a link within + // the root) resides within the root. Without this information it appears as if this file resides + // outside the root. + expectedRealPath: filepath.Join(absolute, "path/to/the/file.txt"), + //expectedRealPath: "path/to/the/file.txt", + expectedVirtualPath: "path/to/the/file.txt", + }, + { + name: "abs root, abs request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: absoluteViaDoubleLink, + input: "/path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + // cwd within DOUBLE symlink root, request nested within... + { + name: "relative root, relative nested request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: "./path", + input: "to/the/file.txt", + // note: why not expect "path/to/the/file.txt" here? + // this is because we don't know that the path used to access this path (which is a link within + // the root) resides within the root. Without this information it appears as if this file resides + // outside the root. + expectedRealPath: filepath.Join(absolute, "path/to/the/file.txt"), + //expectedRealPath: "to/the/file.txt", + expectedVirtualPath: "to/the/file.txt", + }, + { + name: "abs root, relative nested request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: filepath.Join(absoluteViaDoubleLink, "path"), + input: "to/the/file.txt", + expectedRealPath: "to/the/file.txt", + }, + { + name: "relative root, abs nested request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: "./path", + input: "/to/the/file.txt", + // note: why not expect "path/to/the/file.txt" here? + // this is because we don't know that the path used to access this path (which is a link within + // the root) resides within the root. Without this information it appears as if this file resides + // outside the root. + expectedRealPath: filepath.Join(absolute, "path/to/the/file.txt"), + //expectedRealPath: "to/the/file.txt", + expectedVirtualPath: "to/the/file.txt", + }, + { + name: "abs root, abs nested request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: filepath.Join(absoluteViaDoubleLink, "path"), + input: "/to/the/file.txt", + expectedRealPath: "to/the/file.txt", + }, + // cwd within DOUBLE symlink root, request nested DEEP within... + { + name: "relative root, relative nested request, direct, cwd deep within (double) symlink root", + cwd: filepath.Join(relativeViaDoubleLink, "path", "to"), + root: "../", + input: "to/the/file.txt", + // note: why not expect "path/to/the/file.txt" here? + // this is because we don't know that the path used to access this path (which is a link within + // the root) resides within the root. Without this information it appears as if this file resides + // outside the root. + expectedRealPath: filepath.Join(absolute, "path/to/the/file.txt"), + //expectedRealPath: "to/the/file.txt", + expectedVirtualPath: "to/the/file.txt", + }, + { + name: "abs root, relative nested request, direct, cwd deep within (double) symlink root", + cwd: filepath.Join(relativeViaDoubleLink, "path", "to"), + root: filepath.Join(absoluteViaDoubleLink, "path"), + input: "to/the/file.txt", + expectedRealPath: "to/the/file.txt", + }, + { + name: "relative root, abs nested request, direct, cwd deep within (double) symlink root", + cwd: filepath.Join(relativeViaDoubleLink, "path", "to"), + root: "../", + input: "/to/the/file.txt", + // note: why not expect "path/to/the/file.txt" here? + // this is because we don't know that the path used to access this path (which is a link within + // the root) resides within the root. Without this information it appears as if this file resides + // outside the root. + expectedRealPath: filepath.Join(absolute, "path/to/the/file.txt"), + //expectedRealPath: "to/the/file.txt", + expectedVirtualPath: "to/the/file.txt", + }, + { + name: "abs root, abs nested request, direct, cwd deep within (double) symlink root", + cwd: filepath.Join(relativeViaDoubleLink, "path", "to"), + root: filepath.Join(absoluteViaDoubleLink, "path"), + input: "/to/the/file.txt", + expectedRealPath: "to/the/file.txt", + }, + // link to outside of root cases... + { + name: "relative root, relative request, abs indirect (outside of root)", + root: filepath.Join(relative, "path"), + input: "to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "abs root, relative request, abs indirect (outside of root)", + root: filepath.Join(absolute, "path"), + input: "to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "relative root, abs request, abs indirect (outside of root)", + root: filepath.Join(relative, "path"), + input: "/to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "abs root, abs request, abs indirect (outside of root)", + root: filepath.Join(absolute, "path"), + input: "/to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "relative root, relative request, relative indirect (outside of root)", + root: filepath.Join(relative, "path"), + input: "to/the/rel-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "abs root, relative request, relative indirect (outside of root)", + root: filepath.Join(absolute, "path"), + input: "to/the/rel-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "relative root, abs request, relative indirect (outside of root)", + root: filepath.Join(relative, "path"), + input: "/to/the/rel-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "abs root, abs request, relative indirect (outside of root)", + root: filepath.Join(absolute, "path"), + input: "/to/the/rel-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/rel-outside.txt", + }, + // link to outside of root cases... cwd within symlink root + { + name: "relative root, relative request, abs indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: "path", + input: "to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "abs root, relative request, abs indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: filepath.Join(absolute, "path"), + input: "to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "relative root, abs request, abs indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: "path", + input: "/to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "abs root, abs request, abs indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: filepath.Join(absolute, "path"), + input: "/to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "relative root, relative request, relative indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: "path", + input: "to/the/rel-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "abs root, relative request, relative indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: filepath.Join(absolute, "path"), + input: "to/the/rel-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "relative root, abs request, relative indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: "path", + input: "/to/the/rel-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "abs root, abs request, relative indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: filepath.Join(absolute, "path"), + input: "/to/the/rel-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "relative root, relative request, relative indirect (outside of root), cwd within DOUBLE symlink root", + cwd: relativeViaDoubleLink, + root: "path", + input: "to/the/rel-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "abs root, relative request, relative indirect (outside of root), cwd within DOUBLE symlink root", + cwd: relativeViaDoubleLink, + root: filepath.Join(absolute, "path"), + input: "to/the/rel-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "relative root, abs request, relative indirect (outside of root), cwd within DOUBLE symlink root", + cwd: relativeViaDoubleLink, + root: "path", + input: "/to/the/rel-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "abs root, abs request, relative indirect (outside of root), cwd within DOUBLE symlink root", + cwd: relativeViaDoubleLink, + root: filepath.Join(absolute, "path"), + input: "/to/the/rel-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/rel-outside.txt", + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + + // we need to mimic a shell, otherwise we won't get a path within a symlink + targetPath := filepath.Join(testDir, c.cwd) + t.Setenv("PWD", filepath.Clean(targetPath)) + + require.NoError(t, err) + require.NoError(t, os.Chdir(targetPath)) + t.Cleanup(func() { + require.NoError(t, os.Chdir(testDir)) + }) + + resolver, err := NewFromDirectory(c.root, c.base) + require.NoError(t, err) + require.NotNil(t, resolver) + + refs, err := resolver.FilesByPath(c.input) + require.NoError(t, err) + if c.expectedRealPath == "" { + require.Empty(t, refs) + return + } + require.Len(t, refs, 1) + assert.Equal(t, c.expectedRealPath, refs[0].RealPath, "real path different") + assert.Equal(t, c.expectedVirtualPath, refs[0].VirtualPath, "virtual path different") + }) + } +} + +func TestDirectoryResolver_FilesByPath_relativeRoot(t *testing.T) { + cases := []struct { + name string + relativeRoot string + input string + expected []string + }{ + { + name: "should find a file from an absolute input", + relativeRoot: "./test-fixtures/", + input: "/image-symlinks/file-1.txt", + expected: []string{ + "image-symlinks/file-1.txt", + }, + }, + { + name: "should find a file from a relative path", + relativeRoot: "./test-fixtures/", + input: "image-symlinks/file-1.txt", + expected: []string{ + "image-symlinks/file-1.txt", + }, + }, + { + name: "should find a file from a relative path (root above cwd)", + // TODO: refactor me! this test depends on the structure of the source dir not changing, which isn't great + relativeRoot: "../", + input: "fileresolver/directory.go", + expected: []string{ + "fileresolver/directory.go", + }, + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + resolver, err := NewFromDirectory(c.relativeRoot, "") + assert.NoError(t, err) + + refs, err := resolver.FilesByPath(c.input) + require.NoError(t, err) + assert.Len(t, refs, len(c.expected)) + s := strset.New() + for _, actual := range refs { + s.Add(actual.RealPath) + } + assert.ElementsMatch(t, c.expected, s.List()) + }) + } +} + +func TestDirectoryResolver_FilesByPath_absoluteRoot(t *testing.T) { + cases := []struct { + name string + relativeRoot string + input string + expected []string + }{ + { + name: "should find a file from an absolute input", + relativeRoot: "./test-fixtures/", + input: "/image-symlinks/file-1.txt", + expected: []string{ + "image-symlinks/file-1.txt", + }, + }, + { + name: "should find a file from a relative path", + relativeRoot: "./test-fixtures/", + input: "image-symlinks/file-1.txt", + expected: []string{ + "image-symlinks/file-1.txt", + }, + }, + { + name: "should find a file from a relative path (root above cwd)", + // TODO: refactor me! this test depends on the structure of the source dir not changing, which isn't great + relativeRoot: "../", + input: "fileresolver/directory.go", + expected: []string{ + "fileresolver/directory.go", + }, + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + // note: this test is all about asserting correct functionality when the given analysis path + // is an absolute path + absRoot, err := filepath.Abs(c.relativeRoot) + require.NoError(t, err) + + resolver, err := NewFromDirectory(absRoot, "") + assert.NoError(t, err) + + refs, err := resolver.FilesByPath(c.input) + require.NoError(t, err) + assert.Len(t, refs, len(c.expected)) + s := strset.New() + for _, actual := range refs { + s.Add(actual.RealPath) + } + assert.ElementsMatch(t, c.expected, s.List()) + }) + } +} + +func TestDirectoryResolver_FilesByPath(t *testing.T) { + cases := []struct { + name string + root string + input string + expected string + refCount int + forcePositiveHasPath bool + }{ + { + name: "finds a file (relative)", + root: "./test-fixtures/", + input: "image-symlinks/file-1.txt", + expected: "image-symlinks/file-1.txt", + refCount: 1, + }, + { + name: "finds a file with relative indirection", + root: "./test-fixtures/../test-fixtures", + input: "image-symlinks/file-1.txt", + expected: "image-symlinks/file-1.txt", + refCount: 1, + }, + { + name: "managed non-existing files (relative)", + root: "./test-fixtures/", + input: "test-fixtures/image-symlinks/bogus.txt", + refCount: 0, + }, + { + name: "finds a file (absolute)", + root: "./test-fixtures/", + input: "/image-symlinks/file-1.txt", + expected: "image-symlinks/file-1.txt", + refCount: 1, + }, + { + name: "directories ignored", + root: "./test-fixtures/", + input: "/image-symlinks", + refCount: 0, + forcePositiveHasPath: true, + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + resolver, err := NewFromDirectory(c.root, "") + assert.NoError(t, err) + + hasPath := resolver.HasPath(c.input) + if !c.forcePositiveHasPath { + if c.refCount != 0 && !hasPath { + t.Errorf("expected HasPath() to indicate existence, but did not") + } else if c.refCount == 0 && hasPath { + t.Errorf("expected HasPath() to NOT indicate existence, but does") + } + } else if !hasPath { + t.Errorf("expected HasPath() to indicate existence, but did not (force path)") + } + + refs, err := resolver.FilesByPath(c.input) + require.NoError(t, err) + assert.Len(t, refs, c.refCount) + for _, actual := range refs { + assert.Equal(t, c.expected, actual.RealPath) + } + }) + } +} + +func TestDirectoryResolver_MultipleFilesByPath(t *testing.T) { + cases := []struct { + name string + input []string + refCount int + }{ + { + name: "finds multiple files", + input: []string{"image-symlinks/file-1.txt", "image-symlinks/file-2.txt"}, + refCount: 2, + }, + { + name: "skips non-existing files", + input: []string{"image-symlinks/bogus.txt", "image-symlinks/file-1.txt"}, + refCount: 1, + }, + { + name: "does not return anything for non-existing directories", + input: []string{"non-existing/bogus.txt", "non-existing/file-1.txt"}, + refCount: 0, + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + resolver, err := NewFromDirectory("./test-fixtures", "") + assert.NoError(t, err) + refs, err := resolver.FilesByPath(c.input...) + assert.NoError(t, err) + + if len(refs) != c.refCount { + t.Errorf("unexpected number of refs: %d != %d", len(refs), c.refCount) + } + }) + } +} + +func TestDirectoryResolver_FilesByGlobMultiple(t *testing.T) { + resolver, err := NewFromDirectory("./test-fixtures", "") + assert.NoError(t, err) + refs, err := resolver.FilesByGlob("**/image-symlinks/file*") + assert.NoError(t, err) + + assert.Len(t, refs, 2) +} + +func TestDirectoryResolver_FilesByGlobRecursive(t *testing.T) { + resolver, err := NewFromDirectory("./test-fixtures/image-symlinks", "") + assert.NoError(t, err) + refs, err := resolver.FilesByGlob("**/*.txt") + assert.NoError(t, err) + assert.Len(t, refs, 6) +} + +func TestDirectoryResolver_FilesByGlobSingle(t *testing.T) { + resolver, err := NewFromDirectory("./test-fixtures", "") + assert.NoError(t, err) + refs, err := resolver.FilesByGlob("**/image-symlinks/*1.txt") + assert.NoError(t, err) + + assert.Len(t, refs, 1) + assert.Equal(t, "image-symlinks/file-1.txt", refs[0].RealPath) +} + +func TestDirectoryResolver_FilesByPath_ResolvesSymlinks(t *testing.T) { + + tests := []struct { + name string + fixture string + }{ + { + name: "one degree", + fixture: "link_to_new_readme", + }, + { + name: "two degrees", + fixture: "link_to_link_to_new_readme", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + resolver, err := NewFromDirectory("./test-fixtures/symlinks-simple", "") + assert.NoError(t, err) + + refs, err := resolver.FilesByPath(test.fixture) + require.NoError(t, err) + assert.Len(t, refs, 1) + + reader, err := resolver.FileContentsByLocation(refs[0]) + require.NoError(t, err) + + actual, err := io.ReadAll(reader) + require.NoError(t, err) + + expected, err := os.ReadFile("test-fixtures/symlinks-simple/readme") + require.NoError(t, err) + + assert.Equal(t, string(expected), string(actual)) + }) + } +} + +func TestDirectoryResolverDoesNotIgnoreRelativeSystemPaths(t *testing.T) { + // let's make certain that "dev/place" is not ignored, since it is not "/dev/place" + resolver, err := NewFromDirectory("test-fixtures/system_paths/target", "") + assert.NoError(t, err) + + // all paths should be found (non filtering matches a path) + locations, err := resolver.FilesByGlob("**/place") + assert.NoError(t, err) + // 4: within target/ + // 1: target/link --> relative path to "place" // NOTE: this is filtered out since it not unique relative to outside_root/link_target/place + // 1: outside_root/link_target/place + assert.Len(t, locations, 5) + + // ensure that symlink indexing outside of root worked + testLocation := "test-fixtures/system_paths/outside_root/link_target/place" + ok := false + for _, location := range locations { + if strings.HasSuffix(location.RealPath, testLocation) { + ok = true + } + } + + if !ok { + t.Fatalf("could not find test location=%q", testLocation) + } +} + +var _ fs.FileInfo = (*testFileInfo)(nil) + +type testFileInfo struct { + mode os.FileMode +} + +func (t testFileInfo) Name() string { + panic("implement me") +} + +func (t testFileInfo) Size() int64 { + panic("implement me") +} + +func (t testFileInfo) Mode() fs.FileMode { + return t.mode +} + +func (t testFileInfo) ModTime() time.Time { + panic("implement me") +} + +func (t testFileInfo) IsDir() bool { + panic("implement me") +} + +func (t testFileInfo) Sys() interface{} { + panic("implement me") +} + +func Test_isUnallowableFileType(t *testing.T) { + tests := []struct { + name string + info os.FileInfo + expected error + }{ + { + name: "regular file", + info: testFileInfo{ + mode: 0, + }, + }, + { + name: "dir", + info: testFileInfo{ + mode: os.ModeDir, + }, + }, + { + name: "symlink", + info: testFileInfo{ + mode: os.ModeSymlink, + }, + }, + { + name: "socket", + info: testFileInfo{ + mode: os.ModeSocket, + }, + expected: ErrSkipPath, + }, + { + name: "named pipe", + info: testFileInfo{ + mode: os.ModeNamedPipe, + }, + expected: ErrSkipPath, + }, + { + name: "char device", + info: testFileInfo{ + mode: os.ModeCharDevice, + }, + expected: ErrSkipPath, + }, + { + name: "block device", + info: testFileInfo{ + mode: os.ModeDevice, + }, + expected: ErrSkipPath, + }, + { + name: "irregular", + info: testFileInfo{ + mode: os.ModeIrregular, + }, + expected: ErrSkipPath, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + assert.Equal(t, test.expected, disallowByFileType("dont/care", test.info, nil)) + }) + } +} + +func Test_directoryResolver_FilesByMIMEType(t *testing.T) { + tests := []struct { + fixturePath string + mimeType string + expectedPaths *strset.Set + }{ + { + fixturePath: "./test-fixtures/image-simple", + mimeType: "text/plain", + expectedPaths: strset.New("file-1.txt", "file-2.txt", "target/really/nested/file-3.txt", "Dockerfile"), + }, + } + for _, test := range tests { + t.Run(test.fixturePath, func(t *testing.T) { + resolver, err := NewFromDirectory(test.fixturePath, "") + assert.NoError(t, err) + locations, err := resolver.FilesByMIMEType(test.mimeType) + assert.NoError(t, err) + assert.Equal(t, test.expectedPaths.Size(), len(locations)) + for _, l := range locations { + assert.True(t, test.expectedPaths.Has(l.RealPath), "does not have path %q", l.RealPath) + } + }) + } +} + +func Test_IndexingNestedSymLinks(t *testing.T) { + resolver, err := NewFromDirectory("./test-fixtures/symlinks-simple", "") + require.NoError(t, err) + + // check that we can get the real path + locations, err := resolver.FilesByPath("./readme") + require.NoError(t, err) + assert.Len(t, locations, 1) + + // check that we can access the same file via 1 symlink + locations, err = resolver.FilesByPath("./link_to_new_readme") + require.NoError(t, err) + require.Len(t, locations, 1) + assert.Equal(t, "readme", locations[0].RealPath) + assert.Equal(t, "link_to_new_readme", locations[0].VirtualPath) + + // check that we can access the same file via 2 symlinks + locations, err = resolver.FilesByPath("./link_to_link_to_new_readme") + require.NoError(t, err) + require.Len(t, locations, 1) + assert.Equal(t, "readme", locations[0].RealPath) + assert.Equal(t, "link_to_link_to_new_readme", locations[0].VirtualPath) + + // check that we can access the same file via 2 symlinks + locations, err = resolver.FilesByGlob("**/link_*") + require.NoError(t, err) + require.Len(t, locations, 1) // you would think this is 2, however, they point to the same file, and glob only returns unique files + + // returned locations can be in any order + expectedVirtualPaths := []string{ + "link_to_link_to_new_readme", + //"link_to_new_readme", // we filter out this one because the first symlink resolves to the same file + } + + expectedRealPaths := []string{ + "readme", + } + + actualRealPaths := strset.New() + actualVirtualPaths := strset.New() + for _, a := range locations { + actualVirtualPaths.Add(a.VirtualPath) + actualRealPaths.Add(a.RealPath) + } + + assert.ElementsMatch(t, expectedVirtualPaths, actualVirtualPaths.List()) + assert.ElementsMatch(t, expectedRealPaths, actualRealPaths.List()) +} + +func Test_IndexingNestedSymLinks_ignoredIndexes(t *testing.T) { + filterFn := func(path string, _ os.FileInfo, _ error) error { + if strings.HasSuffix(path, string(filepath.Separator)+"readme") { + return ErrSkipPath + } + return nil + } + + resolver, err := NewFromDirectory("./test-fixtures/symlinks-simple", "", filterFn) + require.NoError(t, err) + + // the path to the real file is PRUNED from the index, so we should NOT expect a location returned + locations, err := resolver.FilesByPath("./readme") + require.NoError(t, err) + assert.Empty(t, locations) + + // check that we cannot access the file even via symlink + locations, err = resolver.FilesByPath("./link_to_new_readme") + require.NoError(t, err) + assert.Empty(t, locations) + + // check that we still cannot access the same file via 2 symlinks + locations, err = resolver.FilesByPath("./link_to_link_to_new_readme") + require.NoError(t, err) + assert.Empty(t, locations) +} + +func Test_IndexingNestedSymLinksOutsideOfRoot(t *testing.T) { + resolver, err := NewFromDirectory("./test-fixtures/symlinks-multiple-roots/root", "") + require.NoError(t, err) + + // check that we can get the real path + locations, err := resolver.FilesByPath("./readme") + require.NoError(t, err) + assert.Len(t, locations, 1) + + // check that we can access the same file via 2 symlinks (link_to_link_to_readme -> link_to_readme -> readme) + locations, err = resolver.FilesByPath("./link_to_link_to_readme") + require.NoError(t, err) + assert.Len(t, locations, 1) + + // something looks wrong here + t.Failed() +} + +func Test_RootViaSymlink(t *testing.T) { + resolver, err := NewFromDirectory("./test-fixtures/symlinked-root/nested/link-root", "") + require.NoError(t, err) + + locations, err := resolver.FilesByPath("./file1.txt") + require.NoError(t, err) + assert.Len(t, locations, 1) + + locations, err = resolver.FilesByPath("./nested/file2.txt") + require.NoError(t, err) + assert.Len(t, locations, 1) + + locations, err = resolver.FilesByPath("./nested/linked-file1.txt") + require.NoError(t, err) + assert.Len(t, locations, 1) +} + +func Test_directoryResolver_FileContentsByLocation(t *testing.T) { + cwd, err := os.Getwd() + require.NoError(t, err) + + r, err := NewFromDirectory(".", "") + require.NoError(t, err) + + exists, existingPath, err := r.tree.File(stereoscopeFile.Path(filepath.Join(cwd, "test-fixtures/image-simple/file-1.txt"))) + require.True(t, exists) + require.NoError(t, err) + require.True(t, existingPath.HasReference()) + + tests := []struct { + name string + location file.Location + expects string + err bool + }{ + { + name: "use file reference for content requests", + location: file.NewLocationFromDirectory("some/place", *existingPath.Reference), + expects: "this file has contents", + }, + { + name: "error on empty file reference", + location: file.NewLocationFromDirectory("doesn't matter", stereoscopeFile.Reference{}), + err: true, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + + actual, err := r.FileContentsByLocation(test.location) + if test.err { + require.Error(t, err) + return + } + + require.NoError(t, err) + if test.expects != "" { + b, err := io.ReadAll(actual) + require.NoError(t, err) + assert.Equal(t, test.expects, string(b)) + } + }) + } +} + +func Test_isUnixSystemRuntimePath(t *testing.T) { + tests := []struct { + path string + expected error + }{ + { + path: "proc/place", + }, + { + path: "/proc/place", + expected: fs.SkipDir, + }, + { + path: "/proc", + expected: fs.SkipDir, + }, + { + path: "/pro/c", + }, + { + path: "/pro", + }, + { + path: "/dev", + expected: fs.SkipDir, + }, + { + path: "/sys", + expected: fs.SkipDir, + }, + { + path: "/something/sys", + }, + } + for _, test := range tests { + t.Run(test.path, func(t *testing.T) { + assert.Equal(t, test.expected, disallowUnixSystemRuntimePath(test.path, nil, nil)) + }) + } +} + +func Test_SymlinkLoopWithGlobsShouldResolve(t *testing.T) { + test := func(t *testing.T) { + resolver, err := NewFromDirectory("./test-fixtures/symlinks-loop", "") + require.NoError(t, err) + + locations, err := resolver.FilesByGlob("**/file.target") + require.NoError(t, err) + + require.Len(t, locations, 1) + assert.Equal(t, "devices/loop0/file.target", locations[0].RealPath) + } + + testWithTimeout(t, 5*time.Second, test) +} + +func TestDirectoryResolver_FilesByPath_baseRoot(t *testing.T) { + cases := []struct { + name string + root string + input string + expected []string + }{ + { + name: "should find the base file", + root: "./test-fixtures/symlinks-base/", + input: "./base", + expected: []string{ + "/base", + }, + }, + { + name: "should follow a link with a pivoted root", + root: "./test-fixtures/symlinks-base/", + input: "./foo", + expected: []string{ + "/base", + }, + }, + { + name: "should follow a relative link with extra parents", + root: "./test-fixtures/symlinks-base/", + input: "./bar", + expected: []string{ + "/base", + }, + }, + { + name: "should follow an absolute link with extra parents", + root: "./test-fixtures/symlinks-base/", + input: "./baz", + expected: []string{ + "/base", + }, + }, + { + name: "should follow an absolute link with extra parents", + root: "./test-fixtures/symlinks-base/", + input: "./sub/link", + expected: []string{ + "/sub/item", + }, + }, + { + name: "should follow chained pivoted link", + root: "./test-fixtures/symlinks-base/", + input: "./chain", + expected: []string{ + "/base", + }, + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + resolver, err := NewFromDirectory(c.root, c.root) + assert.NoError(t, err) + + refs, err := resolver.FilesByPath(c.input) + require.NoError(t, err) + assert.Len(t, refs, len(c.expected)) + s := strset.New() + for _, actual := range refs { + s.Add(actual.RealPath) + } + assert.ElementsMatch(t, c.expected, s.List()) + }) + } + +} + +func Test_directoryResolver_resolvesLinks(t *testing.T) { + tests := []struct { + name string + runner func(file.Resolver) []file.Location + expected []file.Location + }{ + { + name: "by mimetype", + runner: func(resolver file.Resolver) []file.Location { + // links should not show up when searching mimetype + actualLocations, err := resolver.FilesByMIMEType("text/plain") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + file.NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt" + file.NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt" + file.NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt" + file.NewLocation("parent/file-4.txt"), // note: missing virtual path "file-4.txt" + }, + }, + { + name: "by glob to links", + runner: func(resolver file.Resolver) []file.Location { + // links are searched, but resolve to the real files + // for that reason we need to place **/ in front (which is not the same for other resolvers) + actualLocations, err := resolver.FilesByGlob("**/*ink-*") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + file.NewVirtualLocation("file-1.txt", "link-1"), + file.NewVirtualLocation("file-2.txt", "link-2"), + // we already have this real file path via another link, so only one is returned + //file.NewVirtualLocation("file-2.txt", "link-indirect"), + file.NewVirtualLocation("file-3.txt", "link-within"), + }, + }, + { + name: "by basename", + runner: func(resolver file.Resolver) []file.Location { + // links are searched, but resolve to the real files + actualLocations, err := resolver.FilesByGlob("**/file-2.txt") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + // this has two copies in the base image, which overwrites the same location + file.NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt", + }, + }, + { + name: "by basename glob", + runner: func(resolver file.Resolver) []file.Location { + // links are searched, but resolve to the real files + actualLocations, err := resolver.FilesByGlob("**/file-?.txt") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + file.NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt" + file.NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt" + file.NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt" + file.NewLocation("parent/file-4.txt"), // note: missing virtual path "parent/file-4.txt" + }, + }, + { + name: "by basename glob to links", + runner: func(resolver file.Resolver) []file.Location { + actualLocations, err := resolver.FilesByGlob("**/link-*") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + file.NewVirtualLocation("file-1.txt", "link-1"), + file.NewVirtualLocation("file-2.txt", "link-2"), + + // we already have this real file path via another link, so only one is returned + //file.NewVirtualLocation("file-2.txt", "link-indirect"), + + file.NewVirtualLocation("file-3.txt", "link-within"), + }, + }, + { + name: "by extension", + runner: func(resolver file.Resolver) []file.Location { + // links are searched, but resolve to the real files + actualLocations, err := resolver.FilesByGlob("**/*.txt") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + file.NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt" + file.NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt" + file.NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt" + file.NewLocation("parent/file-4.txt"), // note: missing virtual path "parent/file-4.txt" + }, + }, + { + name: "by path to degree 1 link", + runner: func(resolver file.Resolver) []file.Location { + // links resolve to the final file + actualLocations, err := resolver.FilesByPath("/link-2") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + // we have multiple copies across layers + file.NewVirtualLocation("file-2.txt", "link-2"), + }, + }, + { + name: "by path to degree 2 link", + runner: func(resolver file.Resolver) []file.Location { + // multiple links resolves to the final file + actualLocations, err := resolver.FilesByPath("/link-indirect") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + // we have multiple copies across layers + file.NewVirtualLocation("file-2.txt", "link-indirect"), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + resolver, err := NewFromDirectory("./test-fixtures/symlinks-from-image-symlinks-fixture", "") + require.NoError(t, err) + assert.NoError(t, err) + + actual := test.runner(resolver) + + compareLocations(t, test.expected, actual) + }) + } +} + +func TestDirectoryResolver_DoNotAddVirtualPathsToTree(t *testing.T) { + resolver, err := NewFromDirectory("./test-fixtures/symlinks-prune-indexing", "") + require.NoError(t, err) + + var allRealPaths []stereoscopeFile.Path + for l := range resolver.AllLocations() { + allRealPaths = append(allRealPaths, stereoscopeFile.Path(l.RealPath)) + } + pathSet := stereoscopeFile.NewPathSet(allRealPaths...) + + assert.False(t, + pathSet.Contains("before-path/file.txt"), + "symlink destinations should only be indexed at their real path, not through their virtual (symlinked) path", + ) + + assert.False(t, + pathSet.Contains("a-path/file.txt"), + "symlink destinations should only be indexed at their real path, not through their virtual (symlinked) path", + ) + +} + +func TestDirectoryResolver_FilesContents_errorOnDirRequest(t *testing.T) { + resolver, err := NewFromDirectory("./test-fixtures/system_paths", "") + assert.NoError(t, err) + + var dirLoc *file.Location + for loc := range resolver.AllLocations() { + entry, err := resolver.index.Get(loc.Reference()) + require.NoError(t, err) + if entry.Metadata.IsDir() { + dirLoc = &loc + break + } + } + + require.NotNil(t, dirLoc) + + reader, err := resolver.FileContentsByLocation(*dirLoc) + require.Error(t, err) + require.Nil(t, reader) +} + +func TestDirectoryResolver_AllLocations(t *testing.T) { + resolver, err := NewFromDirectory("./test-fixtures/symlinks-from-image-symlinks-fixture", "") + assert.NoError(t, err) + + paths := strset.New() + for loc := range resolver.AllLocations() { + if strings.HasPrefix(loc.RealPath, "/") { + // ignore outside the fixture root for now + continue + } + paths.Add(loc.RealPath) + } + expected := []string{ + "file-1.txt", + "file-2.txt", + "file-3.txt", + "link-1", + "link-2", + "link-dead", + "link-indirect", + "link-within", + "parent", + "parent-link", + "parent/file-4.txt", + } + + pathsList := paths.List() + sort.Strings(pathsList) + + assert.ElementsMatchf(t, expected, pathsList, "expected all paths to be indexed, but found different paths: \n%s", cmp.Diff(expected, paths.List())) +} diff --git a/syft/source/directory_resolver_windows_test.go b/syft/internal/fileresolver/directory_windows_test.go similarity index 99% rename from syft/source/directory_resolver_windows_test.go rename to syft/internal/fileresolver/directory_windows_test.go index 18cbb7856c4..115fb30a3b1 100644 --- a/syft/source/directory_resolver_windows_test.go +++ b/syft/internal/fileresolver/directory_windows_test.go @@ -1,4 +1,4 @@ -package source +package fileresolver import "testing" diff --git a/syft/internal/fileresolver/empty.go b/syft/internal/fileresolver/empty.go new file mode 100644 index 00000000000..3b08f395c87 --- /dev/null +++ b/syft/internal/fileresolver/empty.go @@ -0,0 +1,47 @@ +package fileresolver + +import ( + "io" + + "github.com/anchore/syft/syft/file" +) + +var _ file.WritableResolver = (*Empty)(nil) + +type Empty struct{} + +func (e Empty) FileContentsByLocation(_ file.Location) (io.ReadCloser, error) { + return nil, nil +} + +func (e Empty) HasPath(_ string) bool { + return false +} + +func (e Empty) FilesByPath(_ ...string) ([]file.Location, error) { + return nil, nil +} + +func (e Empty) FilesByGlob(_ ...string) ([]file.Location, error) { + return nil, nil +} + +func (e Empty) FilesByMIMEType(_ ...string) ([]file.Location, error) { + return nil, nil +} + +func (e Empty) RelativeFileByPath(_ file.Location, _ string) *file.Location { + return nil +} + +func (e Empty) AllLocations() <-chan file.Location { + return nil +} + +func (e Empty) FileMetadataByLocation(_ file.Location) (file.Metadata, error) { + return file.Metadata{}, nil +} + +func (e Empty) Write(_ file.Location, _ io.Reader) error { + return nil +} diff --git a/syft/source/excluding_file_resolver.go b/syft/internal/fileresolver/excluding_file.go similarity index 55% rename from syft/source/excluding_file_resolver.go rename to syft/internal/fileresolver/excluding_file.go index 50969116a81..81caa49c765 100644 --- a/syft/source/excluding_file_resolver.go +++ b/syft/internal/fileresolver/excluding_file.go @@ -1,65 +1,67 @@ -package source +package fileresolver import ( "fmt" "io" + + "github.com/anchore/syft/syft/file" ) type excludeFn func(string) bool -// excludingResolver decorates a resolver with an exclusion function that is used to +// excluding decorates a resolver with an exclusion function that is used to // filter out entries in the delegate resolver -type excludingResolver struct { - delegate FileResolver +type excluding struct { + delegate file.Resolver excludeFn excludeFn } -// NewExcludingResolver create a new resolver which wraps the provided delegate and excludes +// NewExcluding create a new resolver which wraps the provided delegate and excludes // entries based on a provided path exclusion function -func NewExcludingResolver(delegate FileResolver, excludeFn excludeFn) FileResolver { - return &excludingResolver{ +func NewExcluding(delegate file.Resolver, excludeFn excludeFn) file.Resolver { + return &excluding{ delegate, excludeFn, } } -func (r *excludingResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { +func (r *excluding) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { if locationMatches(&location, r.excludeFn) { return nil, fmt.Errorf("no such location: %+v", location.RealPath) } return r.delegate.FileContentsByLocation(location) } -func (r *excludingResolver) FileMetadataByLocation(location Location) (FileMetadata, error) { +func (r *excluding) FileMetadataByLocation(location file.Location) (file.Metadata, error) { if locationMatches(&location, r.excludeFn) { - return FileMetadata{}, fmt.Errorf("no such location: %+v", location.RealPath) + return file.Metadata{}, fmt.Errorf("no such location: %+v", location.RealPath) } return r.delegate.FileMetadataByLocation(location) } -func (r *excludingResolver) HasPath(path string) bool { +func (r *excluding) HasPath(path string) bool { if r.excludeFn(path) { return false } return r.delegate.HasPath(path) } -func (r *excludingResolver) FilesByPath(paths ...string) ([]Location, error) { +func (r *excluding) FilesByPath(paths ...string) ([]file.Location, error) { locations, err := r.delegate.FilesByPath(paths...) return filterLocations(locations, err, r.excludeFn) } -func (r *excludingResolver) FilesByGlob(patterns ...string) ([]Location, error) { +func (r *excluding) FilesByGlob(patterns ...string) ([]file.Location, error) { locations, err := r.delegate.FilesByGlob(patterns...) return filterLocations(locations, err, r.excludeFn) } -func (r *excludingResolver) FilesByMIMEType(types ...string) ([]Location, error) { +func (r *excluding) FilesByMIMEType(types ...string) ([]file.Location, error) { locations, err := r.delegate.FilesByMIMEType(types...) return filterLocations(locations, err, r.excludeFn) } -func (r *excludingResolver) RelativeFileByPath(location Location, path string) *Location { +func (r *excluding) RelativeFileByPath(location file.Location, path string) *file.Location { l := r.delegate.RelativeFileByPath(location, path) if l != nil && locationMatches(l, r.excludeFn) { return nil @@ -67,8 +69,8 @@ func (r *excludingResolver) RelativeFileByPath(location Location, path string) * return l } -func (r *excludingResolver) AllLocations() <-chan Location { - c := make(chan Location) +func (r *excluding) AllLocations() <-chan file.Location { + c := make(chan file.Location) go func() { defer close(c) for location := range r.delegate.AllLocations() { @@ -80,11 +82,11 @@ func (r *excludingResolver) AllLocations() <-chan Location { return c } -func locationMatches(location *Location, exclusionFn excludeFn) bool { +func locationMatches(location *file.Location, exclusionFn excludeFn) bool { return exclusionFn(location.RealPath) || exclusionFn(location.VirtualPath) } -func filterLocations(locations []Location, err error, exclusionFn excludeFn) ([]Location, error) { +func filterLocations(locations []file.Location, err error, exclusionFn excludeFn) ([]file.Location, error) { if err != nil { return nil, err } diff --git a/syft/source/excluding_file_resolver_test.go b/syft/internal/fileresolver/excluding_file_test.go similarity index 66% rename from syft/source/excluding_file_resolver_test.go rename to syft/internal/fileresolver/excluding_file_test.go index c448e39210e..2ba51473682 100644 --- a/syft/source/excluding_file_resolver_test.go +++ b/syft/internal/fileresolver/excluding_file_test.go @@ -1,4 +1,4 @@ -package source +package fileresolver import ( "io" @@ -6,6 +6,8 @@ import ( "testing" "github.com/stretchr/testify/assert" + + "github.com/anchore/syft/syft/file" ) func TestExcludingResolver(t *testing.T) { @@ -54,7 +56,7 @@ func TestExcludingResolver(t *testing.T) { resolver := &mockResolver{ locations: test.locations, } - er := NewExcludingResolver(resolver, test.excludeFn) + er := NewExcluding(resolver, test.excludeFn) locations, _ := er.FilesByPath() assert.ElementsMatch(t, locationPaths(locations), test.expected) @@ -65,7 +67,7 @@ func TestExcludingResolver(t *testing.T) { locations, _ = er.FilesByMIMEType() assert.ElementsMatch(t, locationPaths(locations), test.expected) - locations = []Location{} + locations = []file.Location{} channel := er.AllLocations() for location := range channel { @@ -77,25 +79,25 @@ func TestExcludingResolver(t *testing.T) { for _, path := range diff { assert.False(t, er.HasPath(path)) - c, err := er.FileContentsByLocation(NewLocation(path)) + c, err := er.FileContentsByLocation(file.NewLocation(path)) assert.Nil(t, c) assert.Error(t, err) - m, err := er.FileMetadataByLocation(NewLocation(path)) + m, err := er.FileMetadataByLocation(file.NewLocation(path)) assert.Empty(t, m.LinkDestination) assert.Error(t, err) - l := er.RelativeFileByPath(NewLocation(""), path) + l := er.RelativeFileByPath(file.NewLocation(""), path) assert.Nil(t, l) } for _, path := range test.expected { assert.True(t, er.HasPath(path)) - c, err := er.FileContentsByLocation(NewLocation(path)) + c, err := er.FileContentsByLocation(file.NewLocation(path)) assert.NotNil(t, c) assert.Nil(t, err) - m, err := er.FileMetadataByLocation(NewLocation(path)) + m, err := er.FileMetadataByLocation(file.NewLocation(path)) assert.NotEmpty(t, m.LinkDestination) assert.Nil(t, err) - l := er.RelativeFileByPath(NewLocation(""), path) + l := er.RelativeFileByPath(file.NewLocation(""), path) assert.NotNil(t, l) } }) @@ -117,7 +119,7 @@ func difference(a, b []string) []string { return diff } -func locationPaths(locations []Location) []string { +func locationPaths(locations []file.Location) []string { paths := []string{} for _, l := range locations { paths = append(paths, l.RealPath) @@ -129,20 +131,20 @@ type mockResolver struct { locations []string } -func (r *mockResolver) getLocations() ([]Location, error) { - out := []Location{} +func (r *mockResolver) getLocations() ([]file.Location, error) { + out := []file.Location{} for _, path := range r.locations { - out = append(out, NewLocation(path)) + out = append(out, file.NewLocation(path)) } return out, nil } -func (r *mockResolver) FileContentsByLocation(_ Location) (io.ReadCloser, error) { +func (r *mockResolver) FileContentsByLocation(_ file.Location) (io.ReadCloser, error) { return io.NopCloser(strings.NewReader("Hello, world!")), nil } -func (r *mockResolver) FileMetadataByLocation(_ Location) (FileMetadata, error) { - return FileMetadata{ +func (r *mockResolver) FileMetadataByLocation(_ file.Location) (file.Metadata, error) { + return file.Metadata{ LinkDestination: "MOCK", }, nil } @@ -151,37 +153,37 @@ func (r *mockResolver) HasPath(_ string) bool { return true } -func (r *mockResolver) FilesByPath(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByPath(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) FilesByGlob(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByGlob(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) FilesByMIMEType(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByMIMEType(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) FilesByExtension(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByExtension(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) FilesByBasename(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByBasename(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) FilesByBasenameGlob(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByBasenameGlob(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) RelativeFileByPath(_ Location, path string) *Location { - l := NewLocation(path) +func (r *mockResolver) RelativeFileByPath(_ file.Location, path string) *file.Location { + l := file.NewLocation(path) return &l } -func (r *mockResolver) AllLocations() <-chan Location { - c := make(chan Location) +func (r *mockResolver) AllLocations() <-chan file.Location { + c := make(chan file.Location) go func() { defer close(c) locations, _ := r.getLocations() diff --git a/syft/internal/fileresolver/file_metadata_by_location.go b/syft/internal/fileresolver/file_metadata_by_location.go new file mode 100644 index 00000000000..9d5974d9b05 --- /dev/null +++ b/syft/internal/fileresolver/file_metadata_by_location.go @@ -0,0 +1,15 @@ +package fileresolver + +import ( + "github.com/anchore/stereoscope/pkg/image" + "github.com/anchore/syft/syft/file" +) + +func fileMetadataByLocation(img *image.Image, location file.Location) (file.Metadata, error) { + entry, err := img.FileCatalog.Get(location.Reference()) + if err != nil { + return file.Metadata{}, err + } + + return entry.Metadata, nil +} diff --git a/syft/internal/fileresolver/test-fixtures/generate-tar-fixture-from-source-dir.sh b/syft/internal/fileresolver/test-fixtures/generate-tar-fixture-from-source-dir.sh new file mode 100755 index 00000000000..922941d36fb --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/generate-tar-fixture-from-source-dir.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +set -eux + +# $1 —— absolute path to destination file, should end with .tar +# $2 —— absolute path to directory from which to add entries to the archive + +pushd "$2" + tar -cvf "$1" . +popd diff --git a/syft/source/test-fixtures/image-duplicate-path/Dockerfile b/syft/internal/fileresolver/test-fixtures/image-duplicate-path/Dockerfile similarity index 100% rename from syft/source/test-fixtures/image-duplicate-path/Dockerfile rename to syft/internal/fileresolver/test-fixtures/image-duplicate-path/Dockerfile diff --git a/syft/source/test-fixtures/image-duplicate-path/file-1.txt b/syft/internal/fileresolver/test-fixtures/image-duplicate-path/file-1.txt similarity index 100% rename from syft/source/test-fixtures/image-duplicate-path/file-1.txt rename to syft/internal/fileresolver/test-fixtures/image-duplicate-path/file-1.txt diff --git a/syft/source/test-fixtures/image-duplicate-path/file-2.txt b/syft/internal/fileresolver/test-fixtures/image-duplicate-path/file-2.txt similarity index 100% rename from syft/source/test-fixtures/image-duplicate-path/file-2.txt rename to syft/internal/fileresolver/test-fixtures/image-duplicate-path/file-2.txt diff --git a/syft/source/test-fixtures/image-files-deleted/Dockerfile b/syft/internal/fileresolver/test-fixtures/image-files-deleted/Dockerfile similarity index 100% rename from syft/source/test-fixtures/image-files-deleted/Dockerfile rename to syft/internal/fileresolver/test-fixtures/image-files-deleted/Dockerfile diff --git a/syft/source/test-fixtures/image-files-deleted/file-1.txt b/syft/internal/fileresolver/test-fixtures/image-files-deleted/file-1.txt similarity index 100% rename from syft/source/test-fixtures/image-files-deleted/file-1.txt rename to syft/internal/fileresolver/test-fixtures/image-files-deleted/file-1.txt diff --git a/syft/source/test-fixtures/image-files-deleted/file-3.txt b/syft/internal/fileresolver/test-fixtures/image-files-deleted/file-3.txt similarity index 100% rename from syft/source/test-fixtures/image-files-deleted/file-3.txt rename to syft/internal/fileresolver/test-fixtures/image-files-deleted/file-3.txt diff --git a/syft/source/test-fixtures/image-files-deleted/target/file-2.txt b/syft/internal/fileresolver/test-fixtures/image-files-deleted/target/file-2.txt similarity index 100% rename from syft/source/test-fixtures/image-files-deleted/target/file-2.txt rename to syft/internal/fileresolver/test-fixtures/image-files-deleted/target/file-2.txt diff --git a/syft/internal/fileresolver/test-fixtures/image-simple/Dockerfile b/syft/internal/fileresolver/test-fixtures/image-simple/Dockerfile new file mode 100644 index 00000000000..62fb151e497 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/image-simple/Dockerfile @@ -0,0 +1,6 @@ +# Note: changes to this file will result in updating several test values. Consider making a new image fixture instead of editing this one. +FROM scratch +ADD file-1.txt /somefile-1.txt +ADD file-2.txt /somefile-2.txt +# note: adding a directory will behave differently on docker engine v18 vs v19 +ADD target / diff --git a/syft/internal/fileresolver/test-fixtures/image-simple/file-1.txt b/syft/internal/fileresolver/test-fixtures/image-simple/file-1.txt new file mode 100644 index 00000000000..985d3408e98 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/image-simple/file-1.txt @@ -0,0 +1 @@ +this file has contents \ No newline at end of file diff --git a/syft/internal/fileresolver/test-fixtures/image-simple/file-2.txt b/syft/internal/fileresolver/test-fixtures/image-simple/file-2.txt new file mode 100644 index 00000000000..396d08bbc72 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/image-simple/file-2.txt @@ -0,0 +1 @@ +file-2 contents! \ No newline at end of file diff --git a/syft/internal/fileresolver/test-fixtures/image-simple/target/really/nested/file-3.txt b/syft/internal/fileresolver/test-fixtures/image-simple/target/really/nested/file-3.txt new file mode 100644 index 00000000000..f85472c937d --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/image-simple/target/really/nested/file-3.txt @@ -0,0 +1,2 @@ +another file! +with lines... \ No newline at end of file diff --git a/syft/source/test-fixtures/image-symlinks/Dockerfile b/syft/internal/fileresolver/test-fixtures/image-symlinks/Dockerfile similarity index 100% rename from syft/source/test-fixtures/image-symlinks/Dockerfile rename to syft/internal/fileresolver/test-fixtures/image-symlinks/Dockerfile diff --git a/syft/internal/fileresolver/test-fixtures/image-symlinks/file-1.txt b/syft/internal/fileresolver/test-fixtures/image-symlinks/file-1.txt new file mode 100644 index 00000000000..d86db8155c3 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/image-symlinks/file-1.txt @@ -0,0 +1 @@ +file 1! \ No newline at end of file diff --git a/syft/source/test-fixtures/image-symlinks/file-2.txt b/syft/internal/fileresolver/test-fixtures/image-symlinks/file-2.txt similarity index 100% rename from syft/source/test-fixtures/image-symlinks/file-2.txt rename to syft/internal/fileresolver/test-fixtures/image-symlinks/file-2.txt diff --git a/syft/source/test-fixtures/image-symlinks/nested/nested/file-3.txt b/syft/internal/fileresolver/test-fixtures/image-symlinks/nested/nested/file-3.txt similarity index 100% rename from syft/source/test-fixtures/image-symlinks/nested/nested/file-3.txt rename to syft/internal/fileresolver/test-fixtures/image-symlinks/nested/nested/file-3.txt diff --git a/syft/source/test-fixtures/image-symlinks/new-file-2.txt b/syft/internal/fileresolver/test-fixtures/image-symlinks/new-file-2.txt similarity index 100% rename from syft/source/test-fixtures/image-symlinks/new-file-2.txt rename to syft/internal/fileresolver/test-fixtures/image-symlinks/new-file-2.txt diff --git a/syft/source/test-fixtures/image-symlinks/new-file-4.txt b/syft/internal/fileresolver/test-fixtures/image-symlinks/new-file-4.txt similarity index 100% rename from syft/source/test-fixtures/image-symlinks/new-file-4.txt rename to syft/internal/fileresolver/test-fixtures/image-symlinks/new-file-4.txt diff --git a/syft/source/test-fixtures/image-symlinks/parent/file-4.txt b/syft/internal/fileresolver/test-fixtures/image-symlinks/parent/file-4.txt similarity index 100% rename from syft/source/test-fixtures/image-symlinks/parent/file-4.txt rename to syft/internal/fileresolver/test-fixtures/image-symlinks/parent/file-4.txt diff --git a/syft/internal/fileresolver/test-fixtures/path-detected-2/.vimrc b/syft/internal/fileresolver/test-fixtures/path-detected-2/.vimrc new file mode 100644 index 00000000000..7f865a925e7 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/path-detected-2/.vimrc @@ -0,0 +1 @@ +Another .vimrc file \ No newline at end of file diff --git a/syft/source/test-fixtures/symlinks-base/sub/item b/syft/internal/fileresolver/test-fixtures/path-detected-2/empty similarity index 100% rename from syft/source/test-fixtures/symlinks-base/sub/item rename to syft/internal/fileresolver/test-fixtures/path-detected-2/empty diff --git a/syft/internal/fileresolver/test-fixtures/path-detected/.vimrc b/syft/internal/fileresolver/test-fixtures/path-detected/.vimrc new file mode 100644 index 00000000000..93b07e21b93 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/path-detected/.vimrc @@ -0,0 +1 @@ +" A .vimrc file diff --git a/syft/internal/fileresolver/test-fixtures/path-detected/empty b/syft/internal/fileresolver/test-fixtures/path-detected/empty new file mode 100644 index 00000000000..e69de29bb2d diff --git a/syft/internal/fileresolver/test-fixtures/req-resp/path/to/rel-inside.txt b/syft/internal/fileresolver/test-fixtures/req-resp/path/to/rel-inside.txt new file mode 120000 index 00000000000..f2bc06e87c4 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/req-resp/path/to/rel-inside.txt @@ -0,0 +1 @@ +./the/file.txt \ No newline at end of file diff --git a/syft/internal/fileresolver/test-fixtures/req-resp/path/to/the/file.txt b/syft/internal/fileresolver/test-fixtures/req-resp/path/to/the/file.txt new file mode 100644 index 00000000000..fbfd79f5e48 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/req-resp/path/to/the/file.txt @@ -0,0 +1 @@ +file-1 diff --git a/syft/internal/fileresolver/test-fixtures/req-resp/path/to/the/rel-outside.txt b/syft/internal/fileresolver/test-fixtures/req-resp/path/to/the/rel-outside.txt new file mode 120000 index 00000000000..6ad08d35758 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/req-resp/path/to/the/rel-outside.txt @@ -0,0 +1 @@ +../../../somewhere/outside.txt \ No newline at end of file diff --git a/syft/internal/fileresolver/test-fixtures/req-resp/root-link b/syft/internal/fileresolver/test-fixtures/req-resp/root-link new file mode 120000 index 00000000000..6a043149e81 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/req-resp/root-link @@ -0,0 +1 @@ +./ \ No newline at end of file diff --git a/syft/internal/fileresolver/test-fixtures/req-resp/somewhere/outside.txt b/syft/internal/fileresolver/test-fixtures/req-resp/somewhere/outside.txt new file mode 100644 index 00000000000..37ad5611998 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/req-resp/somewhere/outside.txt @@ -0,0 +1 @@ +file-2 diff --git a/syft/source/test-fixtures/symlinked-root/nested/link-root b/syft/internal/fileresolver/test-fixtures/symlinked-root/nested/link-root similarity index 100% rename from syft/source/test-fixtures/symlinked-root/nested/link-root rename to syft/internal/fileresolver/test-fixtures/symlinked-root/nested/link-root diff --git a/syft/source/test-fixtures/symlinked-root/real-root/file1.txt b/syft/internal/fileresolver/test-fixtures/symlinked-root/real-root/file1.txt similarity index 100% rename from syft/source/test-fixtures/symlinked-root/real-root/file1.txt rename to syft/internal/fileresolver/test-fixtures/symlinked-root/real-root/file1.txt diff --git a/syft/source/test-fixtures/symlinked-root/real-root/nested/file2.txt b/syft/internal/fileresolver/test-fixtures/symlinked-root/real-root/nested/file2.txt similarity index 100% rename from syft/source/test-fixtures/symlinked-root/real-root/nested/file2.txt rename to syft/internal/fileresolver/test-fixtures/symlinked-root/real-root/nested/file2.txt diff --git a/syft/source/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt b/syft/internal/fileresolver/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt similarity index 100% rename from syft/source/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt rename to syft/internal/fileresolver/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt diff --git a/syft/source/test-fixtures/symlinks-base/bar b/syft/internal/fileresolver/test-fixtures/symlinks-base/bar similarity index 100% rename from syft/source/test-fixtures/symlinks-base/bar rename to syft/internal/fileresolver/test-fixtures/symlinks-base/bar diff --git a/syft/internal/fileresolver/test-fixtures/symlinks-base/base b/syft/internal/fileresolver/test-fixtures/symlinks-base/base new file mode 100644 index 00000000000..e69de29bb2d diff --git a/syft/source/test-fixtures/symlinks-base/baz b/syft/internal/fileresolver/test-fixtures/symlinks-base/baz similarity index 100% rename from syft/source/test-fixtures/symlinks-base/baz rename to syft/internal/fileresolver/test-fixtures/symlinks-base/baz diff --git a/syft/source/test-fixtures/symlinks-base/chain b/syft/internal/fileresolver/test-fixtures/symlinks-base/chain similarity index 100% rename from syft/source/test-fixtures/symlinks-base/chain rename to syft/internal/fileresolver/test-fixtures/symlinks-base/chain diff --git a/syft/source/test-fixtures/symlinks-base/foo b/syft/internal/fileresolver/test-fixtures/symlinks-base/foo similarity index 100% rename from syft/source/test-fixtures/symlinks-base/foo rename to syft/internal/fileresolver/test-fixtures/symlinks-base/foo diff --git a/syft/internal/fileresolver/test-fixtures/symlinks-base/sub/item b/syft/internal/fileresolver/test-fixtures/symlinks-base/sub/item new file mode 100644 index 00000000000..e69de29bb2d diff --git a/syft/source/test-fixtures/symlinks-base/sub/link b/syft/internal/fileresolver/test-fixtures/symlinks-base/sub/link similarity index 100% rename from syft/source/test-fixtures/symlinks-base/sub/link rename to syft/internal/fileresolver/test-fixtures/symlinks-base/sub/link diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/file-1.txt b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/file-1.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/file-1.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/file-1.txt diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/file-2.txt b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/file-2.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/file-2.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/file-2.txt diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/file-3.txt b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/file-3.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/file-3.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/file-3.txt diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-1 b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-1 similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-1 rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-1 diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-2 b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-2 similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-2 rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-2 diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-dead b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-dead similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-dead rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-dead diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-indirect b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-indirect similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-indirect rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-indirect diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-within b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-within similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-within rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-within diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/parent-link b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/parent-link similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/parent-link rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/parent-link diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/parent/file-4.txt b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/parent/file-4.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/parent/file-4.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/parent/file-4.txt diff --git a/syft/source/test-fixtures/symlinks-loop/README.md b/syft/internal/fileresolver/test-fixtures/symlinks-loop/README.md similarity index 100% rename from syft/source/test-fixtures/symlinks-loop/README.md rename to syft/internal/fileresolver/test-fixtures/symlinks-loop/README.md diff --git a/syft/source/test-fixtures/symlinks-loop/block/loop0 b/syft/internal/fileresolver/test-fixtures/symlinks-loop/block/loop0 similarity index 100% rename from syft/source/test-fixtures/symlinks-loop/block/loop0 rename to syft/internal/fileresolver/test-fixtures/symlinks-loop/block/loop0 diff --git a/syft/source/test-fixtures/symlinks-loop/devices/loop0/file.target b/syft/internal/fileresolver/test-fixtures/symlinks-loop/devices/loop0/file.target similarity index 100% rename from syft/source/test-fixtures/symlinks-loop/devices/loop0/file.target rename to syft/internal/fileresolver/test-fixtures/symlinks-loop/devices/loop0/file.target diff --git a/syft/source/test-fixtures/symlinks-loop/devices/loop0/subsystem b/syft/internal/fileresolver/test-fixtures/symlinks-loop/devices/loop0/subsystem similarity index 100% rename from syft/source/test-fixtures/symlinks-loop/devices/loop0/subsystem rename to syft/internal/fileresolver/test-fixtures/symlinks-loop/devices/loop0/subsystem diff --git a/syft/source/test-fixtures/symlinks-multiple-roots/outside/link_to_readme b/syft/internal/fileresolver/test-fixtures/symlinks-multiple-roots/outside/link_to_readme similarity index 100% rename from syft/source/test-fixtures/symlinks-multiple-roots/outside/link_to_readme rename to syft/internal/fileresolver/test-fixtures/symlinks-multiple-roots/outside/link_to_readme diff --git a/syft/source/test-fixtures/symlinks-multiple-roots/root/link_to_link_to_readme b/syft/internal/fileresolver/test-fixtures/symlinks-multiple-roots/root/link_to_link_to_readme similarity index 100% rename from syft/source/test-fixtures/symlinks-multiple-roots/root/link_to_link_to_readme rename to syft/internal/fileresolver/test-fixtures/symlinks-multiple-roots/root/link_to_link_to_readme diff --git a/syft/source/test-fixtures/symlinks-multiple-roots/root/readme b/syft/internal/fileresolver/test-fixtures/symlinks-multiple-roots/root/readme similarity index 100% rename from syft/source/test-fixtures/symlinks-multiple-roots/root/readme rename to syft/internal/fileresolver/test-fixtures/symlinks-multiple-roots/root/readme diff --git a/syft/source/test-fixtures/symlinks-prune-indexing/before-path b/syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/before-path similarity index 100% rename from syft/source/test-fixtures/symlinks-prune-indexing/before-path rename to syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/before-path diff --git a/syft/source/test-fixtures/symlinks-prune-indexing/c-file.txt b/syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/c-file.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-prune-indexing/c-file.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/c-file.txt diff --git a/syft/source/test-fixtures/symlinks-prune-indexing/c-path b/syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/c-path similarity index 100% rename from syft/source/test-fixtures/symlinks-prune-indexing/c-path rename to syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/c-path diff --git a/syft/source/test-fixtures/symlinks-prune-indexing/path/1/2/3/4/dont-index-me-twice.txt b/syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/path/1/2/3/4/dont-index-me-twice.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-prune-indexing/path/1/2/3/4/dont-index-me-twice.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/path/1/2/3/4/dont-index-me-twice.txt diff --git a/syft/source/test-fixtures/symlinks-prune-indexing/path/5/6/7/8/dont-index-me-twice-either.txt b/syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/path/5/6/7/8/dont-index-me-twice-either.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-prune-indexing/path/5/6/7/8/dont-index-me-twice-either.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/path/5/6/7/8/dont-index-me-twice-either.txt diff --git a/syft/source/test-fixtures/symlinks-prune-indexing/path/file.txt b/syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/path/file.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-prune-indexing/path/file.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/path/file.txt diff --git a/syft/source/test-fixtures/symlinks-simple/link_to_link_to_new_readme b/syft/internal/fileresolver/test-fixtures/symlinks-simple/link_to_link_to_new_readme similarity index 100% rename from syft/source/test-fixtures/symlinks-simple/link_to_link_to_new_readme rename to syft/internal/fileresolver/test-fixtures/symlinks-simple/link_to_link_to_new_readme diff --git a/syft/source/test-fixtures/symlinks-simple/link_to_new_readme b/syft/internal/fileresolver/test-fixtures/symlinks-simple/link_to_new_readme similarity index 100% rename from syft/source/test-fixtures/symlinks-simple/link_to_new_readme rename to syft/internal/fileresolver/test-fixtures/symlinks-simple/link_to_new_readme diff --git a/syft/source/test-fixtures/symlinks-simple/readme b/syft/internal/fileresolver/test-fixtures/symlinks-simple/readme similarity index 100% rename from syft/source/test-fixtures/symlinks-simple/readme rename to syft/internal/fileresolver/test-fixtures/symlinks-simple/readme diff --git a/syft/internal/fileresolver/test-fixtures/system_paths/outside_root/link_target/place b/syft/internal/fileresolver/test-fixtures/system_paths/outside_root/link_target/place new file mode 100644 index 00000000000..476e93d5714 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/system_paths/outside_root/link_target/place @@ -0,0 +1 @@ +good \ No newline at end of file diff --git a/syft/internal/fileresolver/test-fixtures/system_paths/target/dev/place b/syft/internal/fileresolver/test-fixtures/system_paths/target/dev/place new file mode 100644 index 00000000000..44d6628cdc6 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/system_paths/target/dev/place @@ -0,0 +1 @@ +bad \ No newline at end of file diff --git a/syft/internal/fileresolver/test-fixtures/system_paths/target/home/place b/syft/internal/fileresolver/test-fixtures/system_paths/target/home/place new file mode 100644 index 00000000000..476e93d5714 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/system_paths/target/home/place @@ -0,0 +1 @@ +good \ No newline at end of file diff --git a/syft/source/test-fixtures/system_paths/target/link/a-symlink b/syft/internal/fileresolver/test-fixtures/system_paths/target/link/a-symlink similarity index 100% rename from syft/source/test-fixtures/system_paths/target/link/a-symlink rename to syft/internal/fileresolver/test-fixtures/system_paths/target/link/a-symlink diff --git a/syft/internal/fileresolver/test-fixtures/system_paths/target/proc/place b/syft/internal/fileresolver/test-fixtures/system_paths/target/proc/place new file mode 100644 index 00000000000..44d6628cdc6 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/system_paths/target/proc/place @@ -0,0 +1 @@ +bad \ No newline at end of file diff --git a/syft/internal/fileresolver/test-fixtures/system_paths/target/sys/place b/syft/internal/fileresolver/test-fixtures/system_paths/target/sys/place new file mode 100644 index 00000000000..44d6628cdc6 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/system_paths/target/sys/place @@ -0,0 +1 @@ +bad \ No newline at end of file diff --git a/syft/source/unindexed_directory_resolver.go b/syft/internal/fileresolver/unindexed_directory.go similarity index 80% rename from syft/source/unindexed_directory_resolver.go rename to syft/internal/fileresolver/unindexed_directory.go index e965fef5c34..ae2300c5a39 100644 --- a/syft/source/unindexed_directory_resolver.go +++ b/syft/internal/fileresolver/unindexed_directory.go @@ -1,4 +1,4 @@ -package source +package fileresolver import ( "fmt" @@ -16,9 +16,13 @@ import ( "golang.org/x/exp/slices" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" ) -type UnindexedDirectoryResolver struct { +var _ file.Resolver = (*UnindexedDirectory)(nil) +var _ file.WritableResolver = (*UnindexedDirectory)(nil) + +type UnindexedDirectory struct { ls afero.Lstater lr afero.LinkReader base string @@ -26,15 +30,15 @@ type UnindexedDirectoryResolver struct { fs afero.Fs } -func NewUnindexedDirectoryResolver(dir string) WritableFileResolver { - return NewUnindexedDirectoryResolverFS(afero.NewOsFs(), dir, "") +func NewFromUnindexedDirectory(dir string) file.WritableResolver { + return NewFromUnindexedDirectoryFS(afero.NewOsFs(), dir, "") } -func NewUnindexedDirectoryResolverRooted(dir string, base string) WritableFileResolver { - return NewUnindexedDirectoryResolverFS(afero.NewOsFs(), dir, base) +func NewFromRootedUnindexedDirectory(dir string, base string) file.WritableResolver { + return NewFromUnindexedDirectoryFS(afero.NewOsFs(), dir, base) } -func NewUnindexedDirectoryResolverFS(fs afero.Fs, dir string, base string) WritableFileResolver { +func NewFromUnindexedDirectoryFS(fs afero.Fs, dir string, base string) file.WritableResolver { ls, ok := fs.(afero.Lstater) if !ok { panic(fmt.Sprintf("unable to get afero.Lstater interface from: %+v", fs)) @@ -62,7 +66,7 @@ func NewUnindexedDirectoryResolverFS(fs afero.Fs, dir string, base string) Writa base = path.Clean(path.Join(wd, base)) } } - return UnindexedDirectoryResolver{ + return UnindexedDirectory{ base: base, dir: dir, fs: fs, @@ -71,7 +75,7 @@ func NewUnindexedDirectoryResolverFS(fs afero.Fs, dir string, base string) Writa } } -func (u UnindexedDirectoryResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { +func (u UnindexedDirectory) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { p := u.absPath(u.scrubInputPath(location.RealPath)) f, err := u.fs.Open(p) if err != nil { @@ -89,29 +93,29 @@ func (u UnindexedDirectoryResolver) FileContentsByLocation(location Location) (i // - full symlink resolution should be performed on all requests // - returns locations for any file or directory -func (u UnindexedDirectoryResolver) HasPath(p string) bool { +func (u UnindexedDirectory) HasPath(p string) bool { locs, err := u.filesByPath(true, true, p) return err == nil && len(locs) > 0 } -func (u UnindexedDirectoryResolver) canLstat(p string) bool { +func (u UnindexedDirectory) canLstat(p string) bool { _, _, err := u.ls.LstatIfPossible(u.absPath(p)) return err == nil } -func (u UnindexedDirectoryResolver) isRegularFile(p string) bool { +func (u UnindexedDirectory) isRegularFile(p string) bool { fi, _, err := u.ls.LstatIfPossible(u.absPath(p)) return err == nil && !fi.IsDir() } -func (u UnindexedDirectoryResolver) scrubInputPath(p string) string { +func (u UnindexedDirectory) scrubInputPath(p string) string { if path.IsAbs(p) { p = p[1:] } return path.Clean(p) } -func (u UnindexedDirectoryResolver) scrubResolutionPath(p string) string { +func (u UnindexedDirectory) scrubResolutionPath(p string) string { if u.base != "" { if path.IsAbs(p) { p = p[1:] @@ -123,7 +127,7 @@ func (u UnindexedDirectoryResolver) scrubResolutionPath(p string) string { return path.Clean(p) } -func (u UnindexedDirectoryResolver) absPath(p string) string { +func (u UnindexedDirectory) absPath(p string) string { if u.base != "" { if path.IsAbs(p) { p = p[1:] @@ -142,11 +146,11 @@ func (u UnindexedDirectoryResolver) absPath(p string) string { // - full symlink resolution should be performed on all requests // - only returns locations to files (NOT directories) -func (u UnindexedDirectoryResolver) FilesByPath(paths ...string) (out []Location, _ error) { +func (u UnindexedDirectory) FilesByPath(paths ...string) (out []file.Location, _ error) { return u.filesByPath(true, false, paths...) } -func (u UnindexedDirectoryResolver) filesByPath(resolveLinks bool, includeDirs bool, paths ...string) (out []Location, _ error) { +func (u UnindexedDirectory) filesByPath(resolveLinks bool, includeDirs bool, paths ...string) (out []file.Location, _ error) { // sort here for stable output sort.Strings(paths) nextPath: @@ -176,11 +180,11 @@ nextPath: // - full symlink resolution should be performed on all requests // - if multiple paths to the same file are found, the best single match should be returned // - only returns locations to files (NOT directories) -func (u UnindexedDirectoryResolver) FilesByGlob(patterns ...string) (out []Location, _ error) { +func (u UnindexedDirectory) FilesByGlob(patterns ...string) (out []file.Location, _ error) { return u.filesByGlob(true, false, patterns...) } -func (u UnindexedDirectoryResolver) filesByGlob(resolveLinks bool, includeDirs bool, patterns ...string) (out []Location, _ error) { +func (u UnindexedDirectory) filesByGlob(resolveLinks bool, includeDirs bool, patterns ...string) (out []file.Location, _ error) { f := unindexedDirectoryResolverFS{ u: u, } @@ -199,13 +203,13 @@ func (u UnindexedDirectoryResolver) filesByGlob(resolveLinks bool, includeDirs b return u.filesByPath(resolveLinks, includeDirs, paths...) } -func (u UnindexedDirectoryResolver) FilesByMIMEType(_ ...string) ([]Location, error) { +func (u UnindexedDirectory) FilesByMIMEType(_ ...string) ([]file.Location, error) { panic("FilesByMIMEType unsupported") } // RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference. // This is helpful when attempting to find a file that is in the same layer or lower as another file. -func (u UnindexedDirectoryResolver) RelativeFileByPath(l Location, p string) *Location { +func (u UnindexedDirectory) RelativeFileByPath(l file.Location, p string) *file.Location { p = path.Clean(path.Join(l.RealPath, p)) locs, err := u.filesByPath(true, false, p) if err != nil || len(locs) == 0 { @@ -221,8 +225,8 @@ func (u UnindexedDirectoryResolver) RelativeFileByPath(l Location, p string) *Lo // - NO symlink resolution should be performed on results // - returns locations for any file or directory -func (u UnindexedDirectoryResolver) AllLocations() <-chan Location { - out := make(chan Location) +func (u UnindexedDirectory) AllLocations() <-chan file.Location { + out := make(chan file.Location) go func() { defer close(out) err := afero.Walk(u.fs, u.absPath("."), func(p string, info fs.FileInfo, err error) error { @@ -231,7 +235,7 @@ func (u UnindexedDirectoryResolver) AllLocations() <-chan Location { return nil } p = strings.TrimPrefix(p, "/") - out <- NewLocation(p) + out <- file.NewLocation(p) return nil }) if err != nil { @@ -241,11 +245,11 @@ func (u UnindexedDirectoryResolver) AllLocations() <-chan Location { return out } -func (u UnindexedDirectoryResolver) FileMetadataByLocation(_ Location) (FileMetadata, error) { +func (u UnindexedDirectory) FileMetadataByLocation(_ file.Location) (file.Metadata, error) { panic("FileMetadataByLocation unsupported") } -func (u UnindexedDirectoryResolver) Write(location Location, reader io.Reader) error { +func (u UnindexedDirectory) Write(location file.Location, reader io.Reader) error { filePath := location.RealPath if path.IsAbs(filePath) { filePath = filePath[1:] @@ -254,10 +258,7 @@ func (u UnindexedDirectoryResolver) Write(location Location, reader io.Reader) e return afero.WriteReader(u.fs, absPath, reader) } -var _ FileResolver = (*UnindexedDirectoryResolver)(nil) -var _ WritableFileResolver = (*UnindexedDirectoryResolver)(nil) - -func (u UnindexedDirectoryResolver) newLocation(filePath string, resolveLinks bool) *Location { +func (u UnindexedDirectory) newLocation(filePath string, resolveLinks bool) *file.Location { filePath = path.Clean(filePath) virtualPath := "" @@ -277,12 +278,12 @@ func (u UnindexedDirectoryResolver) newLocation(filePath string, resolveLinks bo } } - l := NewVirtualLocation(realPath, virtualPath) + l := file.NewVirtualLocation(realPath, virtualPath) return &l } //nolint:gocognit -func (u UnindexedDirectoryResolver) resolveLinks(filePath string) []string { +func (u UnindexedDirectory) resolveLinks(filePath string) []string { var visited []string out := []string{} @@ -349,15 +350,15 @@ func (u UnindexedDirectoryResolver) resolveLinks(filePath string) []string { return out } -func (u UnindexedDirectoryResolver) isSymlink(fi os.FileInfo) bool { +func (u UnindexedDirectory) isSymlink(fi os.FileInfo) bool { return fi.Mode().Type()&fs.ModeSymlink == fs.ModeSymlink } // ------------------------- fs.FS ------------------------------ -// unindexedDirectoryResolverFS wraps the UnindexedDirectoryResolver as a fs.FS, fs.ReadDirFS, and fs.StatFS +// unindexedDirectoryResolverFS wraps the UnindexedDirectory as a fs.FS, fs.ReadDirFS, and fs.StatFS type unindexedDirectoryResolverFS struct { - u UnindexedDirectoryResolver + u UnindexedDirectory } // resolve takes a virtual path and returns the resolved absolute or relative path and file info @@ -470,7 +471,7 @@ func (f unindexedDirectoryResolverDirEntry) Info() (fs.FileInfo, error) { var _ fs.DirEntry = (*unindexedDirectoryResolverDirEntry)(nil) type unindexedDirectoryResolverFile struct { - u UnindexedDirectoryResolver + u UnindexedDirectory path string } @@ -493,7 +494,7 @@ func (f unindexedDirectoryResolverFile) Close() error { var _ fs.File = (*unindexedDirectoryResolverFile)(nil) type unindexedDirectoryResolverFileInfo struct { - u UnindexedDirectoryResolver + u UnindexedDirectory name string size int64 mode fs.FileMode @@ -502,7 +503,7 @@ type unindexedDirectoryResolverFileInfo struct { sys any } -func newFsFileInfo(u UnindexedDirectoryResolver, name string, isDir bool, fi os.FileInfo) unindexedDirectoryResolverFileInfo { +func newFsFileInfo(u UnindexedDirectory, name string, isDir bool, fi os.FileInfo) unindexedDirectoryResolverFileInfo { return unindexedDirectoryResolverFileInfo{ u: u, name: name, diff --git a/syft/internal/fileresolver/unindexed_directory_test.go b/syft/internal/fileresolver/unindexed_directory_test.go new file mode 100644 index 00000000000..3714d8d55eb --- /dev/null +++ b/syft/internal/fileresolver/unindexed_directory_test.go @@ -0,0 +1,1285 @@ +//go:build !windows +// +build !windows + +package fileresolver + +import ( + "io" + "os" + "path" + "path/filepath" + "sort" + "strings" + "testing" + "time" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/scylladb/go-set/strset" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" + "github.com/anchore/syft/syft/file" +) + +func Test_UnindexDirectoryResolver_RequestRelativePathWithinSymlink(t *testing.T) { + pwd, err := os.Getwd() + + // we need to mimic a shell, otherwise we won't get a path within a symlink + targetPath := filepath.Join(pwd, "./test-fixtures/symlinked-root/nested/link-root/nested") + t.Setenv("PWD", targetPath) + + require.NoError(t, err) + require.NoError(t, os.Chdir(targetPath)) + t.Cleanup(func() { + require.NoError(t, os.Chdir(pwd)) + }) + + resolver := NewFromUnindexedDirectory("./") + require.NoError(t, err) + + locations, err := resolver.FilesByPath("file2.txt") + require.NoError(t, err) + require.Len(t, locations, 1) + + // TODO: this is technically not correct behavior since this is reporting the symlink path (virtual path) and + // not the real path. + require.False(t, filepath.IsAbs(locations[0].RealPath), "should be relative path") +} + +func Test_UnindexDirectoryResolver_FilesByPath_request_response(t *testing.T) { + // / + // somewhere/ + // outside.txt + // root-link -> ./ + // path/ + // to/ + // abs-inside.txt -> /path/to/the/file.txt # absolute link to somewhere inside of the root + // rel-inside.txt -> ./the/file.txt # relative link to somewhere inside of the root + // the/ + // file.txt + // abs-outside.txt -> /somewhere/outside.txt # absolute link to outside of the root + // rel-outside -> ../../../somewhere/outside.txt # relative link to outside of the root + // + + testDir, err := os.Getwd() + require.NoError(t, err) + relative := filepath.Join("test-fixtures", "req-resp") + absolute := filepath.Join(testDir, relative) + + absInsidePath := filepath.Join(absolute, "path", "to", "abs-inside.txt") + absOutsidePath := filepath.Join(absolute, "path", "to", "the", "abs-outside.txt") + + relativeViaLink := filepath.Join(relative, "root-link") + absoluteViaLink := filepath.Join(absolute, "root-link") + + relativeViaDoubleLink := filepath.Join(relative, "root-link", "root-link") + absoluteViaDoubleLink := filepath.Join(absolute, "root-link", "root-link") + + cleanup := func() { + _ = os.Remove(absInsidePath) + _ = os.Remove(absOutsidePath) + } + + // ensure the absolute symlinks are cleaned up from any previous runs + cleanup() + + require.NoError(t, os.Symlink(filepath.Join(absolute, "path", "to", "the", "file.txt"), absInsidePath)) + require.NoError(t, os.Symlink(filepath.Join(absolute, "somewhere", "outside.txt"), absOutsidePath)) + + t.Cleanup(cleanup) + + cases := []struct { + name string + cwd string + root string + base string + input string + expectedRealPath string + expectedVirtualPath string + }{ + { + name: "relative root, relative request, direct", + root: relative, + input: "path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "abs root, relative request, direct", + root: absolute, + input: "path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "relative root, abs request, direct", + root: relative, + input: "/path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "abs root, abs request, direct", + root: absolute, + input: "/path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + // cwd within root... + { + name: "relative root, relative request, direct, cwd within root", + cwd: filepath.Join(relative, "path/to"), + root: "../../", + input: "path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "abs root, relative request, direct, cwd within root", + cwd: filepath.Join(relative, "path/to"), + root: absolute, + input: "path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "relative root, abs request, direct, cwd within root", + cwd: filepath.Join(relative, "path/to"), + root: "../../", + input: "/path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "abs root, abs request, direct, cwd within root", + cwd: filepath.Join(relative, "path/to"), + + root: absolute, + input: "/path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + // cwd within symlink root... + { + name: "relative root, relative request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: "./", + input: "path/to/the/file.txt", + // note: this is inconsistent with the directory resolver. The real path is essentially the virtual path + // in this case for the unindexed resolver, which is not correct. + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "abs root, relative request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: absoluteViaLink, + input: "path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "relative root, abs request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: "./", + input: "/path/to/the/file.txt", + // note: this is inconsistent with the directory resolver. The real path is essentially the virtual path + // in this case for the unindexed resolver, which is not correct. + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "abs root, abs request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: absoluteViaLink, + input: "/path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + // cwd within symlink root, request nested within... + { + name: "relative root, relative nested request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: "./path", + input: "to/the/file.txt", + // note: this is inconsistent with the directory resolver. The real path is essentially the virtual path + // in this case for the unindexed resolver, which is not correct. + expectedRealPath: "to/the/file.txt", + }, + { + name: "abs root, relative nested request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: filepath.Join(absoluteViaLink, "path"), + input: "to/the/file.txt", + expectedRealPath: "to/the/file.txt", + }, + { + name: "relative root, abs nested request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: "./path", + input: "/to/the/file.txt", + // note: this is inconsistent with the directory resolver. The real path is essentially the virtual path + // in this case for the unindexed resolver, which is not correct. + expectedRealPath: "to/the/file.txt", + }, + { + name: "abs root, abs nested request, direct, cwd within symlink root", + cwd: relativeViaLink, + root: filepath.Join(absoluteViaLink, "path"), + input: "/to/the/file.txt", + expectedRealPath: "to/the/file.txt", + }, + // cwd within DOUBLE symlink root... + { + name: "relative root, relative request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: "./", + input: "path/to/the/file.txt", + // note: this is inconsistent with the directory resolver. The real path is essentially the virtual path + // in this case for the unindexed resolver, which is not correct. + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "abs root, relative request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: absoluteViaDoubleLink, + input: "path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "relative root, abs request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: "./", + input: "/path/to/the/file.txt", + // note: this is inconsistent with the directory resolver. The real path is essentially the virtual path + // in this case for the unindexed resolver, which is not correct. + expectedRealPath: "path/to/the/file.txt", + }, + { + name: "abs root, abs request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: absoluteViaDoubleLink, + input: "/path/to/the/file.txt", + expectedRealPath: "path/to/the/file.txt", + }, + // cwd within DOUBLE symlink root, request nested within... + { + name: "relative root, relative nested request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: "./path", + input: "to/the/file.txt", + // note: this is inconsistent with the directory resolver. The real path is essentially the virtual path + // in this case for the unindexed resolver, which is not correct. + expectedRealPath: "to/the/file.txt", + }, + { + name: "abs root, relative nested request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: filepath.Join(absoluteViaDoubleLink, "path"), + input: "to/the/file.txt", + expectedRealPath: "to/the/file.txt", + }, + { + name: "relative root, abs nested request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: "./path", + input: "/to/the/file.txt", + // note: this is inconsistent with the directory resolver. The real path is essentially the virtual path + // in this case for the unindexed resolver, which is not correct. + expectedRealPath: "to/the/file.txt", + }, + { + name: "abs root, abs nested request, direct, cwd within (double) symlink root", + cwd: relativeViaDoubleLink, + root: filepath.Join(absoluteViaDoubleLink, "path"), + input: "/to/the/file.txt", + expectedRealPath: "to/the/file.txt", + }, + // cwd within DOUBLE symlink root, request nested DEEP within... + { + name: "relative root, relative nested request, direct, cwd deep within (double) symlink root", + cwd: filepath.Join(relativeViaDoubleLink, "path", "to"), + root: "../", + input: "to/the/file.txt", + // note: this is inconsistent with the directory resolver. The real path is essentially the virtual path + // in this case for the unindexed resolver, which is not correct. + expectedRealPath: "to/the/file.txt", + }, + { + name: "abs root, relative nested request, direct, cwd deep within (double) symlink root", + cwd: filepath.Join(relativeViaDoubleLink, "path", "to"), + root: filepath.Join(absoluteViaDoubleLink, "path"), + input: "to/the/file.txt", + expectedRealPath: "to/the/file.txt", + }, + { + name: "relative root, abs nested request, direct, cwd deep within (double) symlink root", + cwd: filepath.Join(relativeViaDoubleLink, "path", "to"), + root: "../", + input: "/to/the/file.txt", + // note: this is inconsistent with the directory resolver. The real path is essentially the virtual path + // in this case for the unindexed resolver, which is not correct. + expectedRealPath: "to/the/file.txt", + }, + { + name: "abs root, abs nested request, direct, cwd deep within (double) symlink root", + cwd: filepath.Join(relativeViaDoubleLink, "path", "to"), + root: filepath.Join(absoluteViaDoubleLink, "path"), + input: "/to/the/file.txt", + expectedRealPath: "to/the/file.txt", + }, + // link to outside of root cases... + { + name: "relative root, relative request, abs indirect (outside of root)", + root: filepath.Join(relative, "path"), + input: "to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "abs root, relative request, abs indirect (outside of root)", + root: filepath.Join(absolute, "path"), + input: "to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "relative root, abs request, abs indirect (outside of root)", + root: filepath.Join(relative, "path"), + input: "/to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "abs root, abs request, abs indirect (outside of root)", + root: filepath.Join(absolute, "path"), + input: "/to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "relative root, relative request, relative indirect (outside of root)", + root: filepath.Join(relative, "path"), + input: "to/the/rel-outside.txt", + //expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + // TODO: the real path is not correct + expectedRealPath: "../somewhere/outside.txt", + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "abs root, relative request, relative indirect (outside of root)", + root: filepath.Join(absolute, "path"), + input: "to/the/rel-outside.txt", + //expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + // TODO: the real path is not correct + expectedRealPath: "../somewhere/outside.txt", + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "relative root, abs request, relative indirect (outside of root)", + root: filepath.Join(relative, "path"), + input: "/to/the/rel-outside.txt", + //expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + // TODO: the real path is not correct + expectedRealPath: "../somewhere/outside.txt", + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "abs root, abs request, relative indirect (outside of root)", + root: filepath.Join(absolute, "path"), + input: "/to/the/rel-outside.txt", + //expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + // TODO: the real path is not correct + expectedRealPath: "../somewhere/outside.txt", + expectedVirtualPath: "to/the/rel-outside.txt", + }, + // link to outside of root cases... cwd within symlink root + { + name: "relative root, relative request, abs indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: "path", + input: "to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "abs root, relative request, abs indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: filepath.Join(absolute, "path"), + input: "to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "relative root, abs request, abs indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: "path", + input: "/to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "abs root, abs request, abs indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: filepath.Join(absolute, "path"), + input: "/to/the/abs-outside.txt", + expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + expectedVirtualPath: "to/the/abs-outside.txt", + }, + { + name: "relative root, relative request, relative indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: "path", + input: "to/the/rel-outside.txt", + //expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + // TODO: the real path is not correct + expectedRealPath: "../somewhere/outside.txt", + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "abs root, relative request, relative indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: filepath.Join(absolute, "path"), + input: "to/the/rel-outside.txt", + //expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + // TODO: the real path is not correct + expectedRealPath: "../somewhere/outside.txt", + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "relative root, abs request, relative indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: "path", + input: "/to/the/rel-outside.txt", + //expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + // TODO: the real path is not correct + expectedRealPath: "../somewhere/outside.txt", + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "abs root, abs request, relative indirect (outside of root), cwd within symlink root", + cwd: relativeViaLink, + root: filepath.Join(absolute, "path"), + input: "/to/the/rel-outside.txt", + //expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + // TODO: the real path is not correct + expectedRealPath: "../somewhere/outside.txt", + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "relative root, relative request, relative indirect (outside of root), cwd within DOUBLE symlink root", + cwd: relativeViaDoubleLink, + root: "path", + input: "to/the/rel-outside.txt", + //expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + // TODO: the real path is not correct + expectedRealPath: "../somewhere/outside.txt", + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "abs root, relative request, relative indirect (outside of root), cwd within DOUBLE symlink root", + cwd: relativeViaDoubleLink, + root: filepath.Join(absolute, "path"), + input: "to/the/rel-outside.txt", + //expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + // TODO: the real path is not correct + expectedRealPath: "../somewhere/outside.txt", + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "relative root, abs request, relative indirect (outside of root), cwd within DOUBLE symlink root", + cwd: relativeViaDoubleLink, + root: "path", + input: "/to/the/rel-outside.txt", + //expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + // TODO: the real path is not correct + expectedRealPath: "../somewhere/outside.txt", + expectedVirtualPath: "to/the/rel-outside.txt", + }, + { + name: "abs root, abs request, relative indirect (outside of root), cwd within DOUBLE symlink root", + cwd: relativeViaDoubleLink, + root: filepath.Join(absolute, "path"), + input: "/to/the/rel-outside.txt", + //expectedRealPath: filepath.Join(absolute, "/somewhere/outside.txt"), + // TODO: the real path is not correct + expectedRealPath: "../somewhere/outside.txt", + expectedVirtualPath: "to/the/rel-outside.txt", + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + + // we need to mimic a shell, otherwise we won't get a path within a symlink + targetPath := filepath.Join(testDir, c.cwd) + t.Setenv("PWD", filepath.Clean(targetPath)) + + require.NoError(t, err) + require.NoError(t, os.Chdir(targetPath)) + t.Cleanup(func() { + require.NoError(t, os.Chdir(testDir)) + }) + + resolver := NewFromUnindexedDirectory(c.root) + require.NotNil(t, resolver) + + refs, err := resolver.FilesByPath(c.input) + require.NoError(t, err) + if c.expectedRealPath == "" { + require.Empty(t, refs) + return + } + require.Len(t, refs, 1) + assert.Equal(t, c.expectedRealPath, refs[0].RealPath, "real path different") + assert.Equal(t, c.expectedVirtualPath, refs[0].VirtualPath, "virtual path different") + }) + } +} + +func Test_UnindexedDirectoryResolver_Basic(t *testing.T) { + wd, err := os.Getwd() + require.NoError(t, err) + + r := NewFromUnindexedDirectory(path.Join(wd, "test-fixtures")) + locations, err := r.FilesByGlob("image-symlinks/*") + require.NoError(t, err) + require.Len(t, locations, 5) +} + +func Test_UnindexedDirectoryResolver_FilesByPath_relativeRoot(t *testing.T) { + cases := []struct { + name string + relativeRoot string + input string + expected []string + }{ + { + name: "should find a file from an absolute input", + relativeRoot: "./test-fixtures/", + input: "/image-symlinks/file-1.txt", + expected: []string{ + "image-symlinks/file-1.txt", + }, + }, + { + name: "should find a file from a relative path", + relativeRoot: "./test-fixtures/", + input: "image-symlinks/file-1.txt", + expected: []string{ + "image-symlinks/file-1.txt", + }, + }, + { + name: "should find a file from a relative path (root above cwd)", + // TODO: refactor me! this test depends on the structure of the source dir not changing, which isn't great + relativeRoot: "../", + input: "fileresolver/deferred.go", + expected: []string{ + "fileresolver/deferred.go", + }, + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + resolver := NewFromUnindexedDirectory(c.relativeRoot) + + refs, err := resolver.FilesByPath(c.input) + require.NoError(t, err) + assert.Len(t, refs, len(c.expected)) + s := strset.New() + for _, actual := range refs { + s.Add(actual.RealPath) + } + assert.ElementsMatch(t, c.expected, s.List()) + }) + } +} + +func Test_UnindexedDirectoryResolver_FilesByPath_absoluteRoot(t *testing.T) { + cases := []struct { + name string + relativeRoot string + input string + expected []string + }{ + { + name: "should find a file from an absolute input", + relativeRoot: "./test-fixtures/", + input: "/image-symlinks/file-1.txt", + expected: []string{ + "image-symlinks/file-1.txt", + }, + }, + { + name: "should find a file from a relative path", + relativeRoot: "./test-fixtures/", + input: "image-symlinks/file-1.txt", + expected: []string{ + "image-symlinks/file-1.txt", + }, + }, + { + name: "should find a file from a relative path (root above cwd)", + // TODO: refactor me! this test depends on the structure of the source dir not changing, which isn't great + relativeRoot: "../", + input: "fileresolver/directory.go", + expected: []string{ + "fileresolver/directory.go", + }, + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + // note: this test is all about asserting correct functionality when the given analysis path + // is an absolute path + absRoot, err := filepath.Abs(c.relativeRoot) + require.NoError(t, err) + + resolver := NewFromUnindexedDirectory(absRoot) + assert.NoError(t, err) + + refs, err := resolver.FilesByPath(c.input) + require.NoError(t, err) + assert.Len(t, refs, len(c.expected)) + s := strset.New() + for _, actual := range refs { + s.Add(actual.RealPath) + } + assert.ElementsMatch(t, c.expected, s.List()) + }) + } +} + +func Test_UnindexedDirectoryResolver_FilesByPath(t *testing.T) { + cases := []struct { + name string + root string + input string + expected string + refCount int + forcePositiveHasPath bool + }{ + { + name: "finds a file (relative)", + root: "./test-fixtures/", + input: "image-symlinks/file-1.txt", + expected: "image-symlinks/file-1.txt", + refCount: 1, + }, + { + name: "finds a file with relative indirection", + root: "./test-fixtures/../test-fixtures", + input: "image-symlinks/file-1.txt", + expected: "image-symlinks/file-1.txt", + refCount: 1, + }, + { + name: "managed non-existing files (relative)", + root: "./test-fixtures/", + input: "test-fixtures/image-symlinks/bogus.txt", + refCount: 0, + }, + { + name: "finds a file (absolute)", + root: "./test-fixtures/", + input: "/image-symlinks/file-1.txt", + expected: "image-symlinks/file-1.txt", + refCount: 1, + }, + { + name: "directories ignored", + root: "./test-fixtures/", + input: "/image-symlinks", + refCount: 0, + forcePositiveHasPath: true, + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + resolver := NewFromUnindexedDirectory(c.root) + + hasPath := resolver.HasPath(c.input) + if !c.forcePositiveHasPath { + if c.refCount != 0 && !hasPath { + t.Errorf("expected HasPath() to indicate existence, but did not") + } else if c.refCount == 0 && hasPath { + t.Errorf("expected HasPath() to NOT indicate existence, but does") + } + } else if !hasPath { + t.Errorf("expected HasPath() to indicate existence, but did not (force path)") + } + + refs, err := resolver.FilesByPath(c.input) + require.NoError(t, err) + assert.Len(t, refs, c.refCount) + for _, actual := range refs { + assert.Equal(t, c.expected, actual.RealPath) + } + }) + } +} + +func Test_UnindexedDirectoryResolver_MultipleFilesByPath(t *testing.T) { + cases := []struct { + name string + input []string + refCount int + }{ + { + name: "finds multiple files", + input: []string{"image-symlinks/file-1.txt", "image-symlinks/file-2.txt"}, + refCount: 2, + }, + { + name: "skips non-existing files", + input: []string{"image-symlinks/bogus.txt", "image-symlinks/file-1.txt"}, + refCount: 1, + }, + { + name: "does not return anything for non-existing directories", + input: []string{"non-existing/bogus.txt", "non-existing/file-1.txt"}, + refCount: 0, + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + resolver := NewFromUnindexedDirectory("./test-fixtures") + refs, err := resolver.FilesByPath(c.input...) + assert.NoError(t, err) + + if len(refs) != c.refCount { + t.Errorf("unexpected number of refs: %d != %d", len(refs), c.refCount) + } + }) + } +} + +func Test_UnindexedDirectoryResolver_FilesByGlobMultiple(t *testing.T) { + resolver := NewFromUnindexedDirectory("./test-fixtures") + refs, err := resolver.FilesByGlob("**/image-symlinks/file*") + assert.NoError(t, err) + + assert.Len(t, refs, 2) +} + +func Test_UnindexedDirectoryResolver_FilesByGlobRecursive(t *testing.T) { + resolver := NewFromUnindexedDirectory("./test-fixtures/image-symlinks") + refs, err := resolver.FilesByGlob("**/*.txt") + assert.NoError(t, err) + assert.Len(t, refs, 6) +} + +func Test_UnindexedDirectoryResolver_FilesByGlobSingle(t *testing.T) { + resolver := NewFromUnindexedDirectory("./test-fixtures") + refs, err := resolver.FilesByGlob("**/image-symlinks/*1.txt") + assert.NoError(t, err) + + assert.Len(t, refs, 1) + assert.Equal(t, "image-symlinks/file-1.txt", refs[0].RealPath) +} + +func Test_UnindexedDirectoryResolver_FilesByPath_ResolvesSymlinks(t *testing.T) { + + tests := []struct { + name string + fixture string + }{ + { + name: "one degree", + fixture: "link_to_new_readme", + }, + { + name: "two degrees", + fixture: "link_to_link_to_new_readme", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinks-simple") + + refs, err := resolver.FilesByPath(test.fixture) + require.NoError(t, err) + require.Len(t, refs, 1) + + reader, err := resolver.FileContentsByLocation(refs[0]) + require.NoError(t, err) + + actual, err := io.ReadAll(reader) + require.NoError(t, err) + + expected, err := os.ReadFile("test-fixtures/symlinks-simple/readme") + require.NoError(t, err) + + require.Equal(t, string(expected), string(actual)) + }) + } +} + +func Test_UnindexedDirectoryResolverDoesNotIgnoreRelativeSystemPaths(t *testing.T) { + // let's make certain that "dev/place" is not ignored, since it is not "/dev/place" + resolver := NewFromUnindexedDirectory("test-fixtures/system_paths/target") + + // all paths should be found (non filtering matches a path) + locations, err := resolver.FilesByGlob("**/place") + assert.NoError(t, err) + // 4: within target/ + // 1: target/link --> relative path to "place" // NOTE: this is filtered out since it not unique relative to outside_root/link_target/place + // 1: outside_root/link_target/place + assert.Len(t, locations, 5) + + // ensure that symlink indexing outside of root worked + testLocation := "../outside_root/link_target/place" + ok := false + for _, location := range locations { + if strings.HasSuffix(location.RealPath, testLocation) { + ok = true + } + } + + if !ok { + t.Fatalf("could not find test location=%q", testLocation) + } +} + +func Test_UnindexedDirectoryResover_IndexingNestedSymLinks(t *testing.T) { + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinks-simple") + + // check that we can get the real path + locations, err := resolver.FilesByPath("./readme") + require.NoError(t, err) + assert.Len(t, locations, 1) + + // check that we can access the same file via 1 symlink + locations, err = resolver.FilesByPath("./link_to_new_readme") + require.NoError(t, err) + require.Len(t, locations, 1) + assert.Equal(t, "readme", locations[0].RealPath) + assert.Equal(t, "link_to_new_readme", locations[0].VirtualPath) + + // check that we can access the same file via 2 symlinks + locations, err = resolver.FilesByPath("./link_to_link_to_new_readme") + require.NoError(t, err) + require.Len(t, locations, 1) + assert.Equal(t, "readme", locations[0].RealPath) + assert.Equal(t, "link_to_link_to_new_readme", locations[0].VirtualPath) + + // check that we can access the same file via 2 symlinks + locations, err = resolver.FilesByGlob("**/link_*") + require.NoError(t, err) + require.Len(t, locations, 1) // you would think this is 2, however, they point to the same file, and glob only returns unique files + + // returned locations can be in any order + expectedVirtualPaths := []string{ + "link_to_link_to_new_readme", + //"link_to_new_readme", // we filter out this one because the first symlink resolves to the same file + } + + expectedRealPaths := []string{ + "readme", + } + + actualRealPaths := strset.New() + actualVirtualPaths := strset.New() + for _, a := range locations { + actualVirtualPaths.Add(a.VirtualPath) + actualRealPaths.Add(a.RealPath) + } + + assert.ElementsMatch(t, expectedVirtualPaths, actualVirtualPaths.List()) + assert.ElementsMatch(t, expectedRealPaths, actualRealPaths.List()) +} + +func Test_UnindexedDirectoryResover_IndexingNestedSymLinksOutsideOfRoot(t *testing.T) { + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinks-multiple-roots/root") + + // check that we can get the real path + locations, err := resolver.FilesByPath("./readme") + require.NoError(t, err) + assert.Len(t, locations, 1) + + // check that we can access the same file via 2 symlinks (link_to_link_to_readme -> link_to_readme -> readme) + locations, err = resolver.FilesByPath("./link_to_link_to_readme") + require.NoError(t, err) + assert.Len(t, locations, 1) + + // something looks wrong here + t.Failed() +} + +func Test_UnindexedDirectoryResover_RootViaSymlink(t *testing.T) { + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinked-root/nested/link-root") + + locations, err := resolver.FilesByPath("./file1.txt") + require.NoError(t, err) + assert.Len(t, locations, 1) + + locations, err = resolver.FilesByPath("./nested/file2.txt") + require.NoError(t, err) + assert.Len(t, locations, 1) + + locations, err = resolver.FilesByPath("./nested/linked-file1.txt") + require.NoError(t, err) + assert.Len(t, locations, 1) +} + +func Test_UnindexedDirectoryResolver_FileContentsByLocation(t *testing.T) { + cwd, err := os.Getwd() + require.NoError(t, err) + + r := NewFromUnindexedDirectory(path.Join(cwd, "test-fixtures/image-simple")) + require.NoError(t, err) + + tests := []struct { + name string + location file.Location + expects string + err bool + }{ + { + name: "use file reference for content requests", + location: file.NewLocation("file-1.txt"), + expects: "this file has contents", + }, + { + name: "error on empty file reference", + location: file.NewLocationFromDirectory("doesn't matter", stereoscopeFile.Reference{}), + err: true, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + + actual, err := r.FileContentsByLocation(test.location) + if test.err { + require.Error(t, err) + return + } + + require.NoError(t, err) + if test.expects != "" { + b, err := io.ReadAll(actual) + require.NoError(t, err) + assert.Equal(t, test.expects, string(b)) + } + }) + } +} + +func Test_UnindexedDirectoryResover_SymlinkLoopWithGlobsShouldResolve(t *testing.T) { + test := func(t *testing.T) { + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinks-loop") + + locations, err := resolver.FilesByGlob("**/file.target") + require.NoError(t, err) + + require.Len(t, locations, 1) + assert.Equal(t, "devices/loop0/file.target", locations[0].RealPath) + } + + testWithTimeout(t, 5*time.Second, test) +} + +func Test_UnindexedDirectoryResolver_FilesByPath_baseRoot(t *testing.T) { + cases := []struct { + name string + root string + input string + expected []string + }{ + { + name: "should find the base file", + root: "./test-fixtures/symlinks-base/", + input: "./base", + expected: []string{ + "base", + }, + }, + { + name: "should follow a link with a pivoted root", + root: "./test-fixtures/symlinks-base/", + input: "./foo", + expected: []string{ + "base", + }, + }, + { + name: "should follow a relative link with extra parents", + root: "./test-fixtures/symlinks-base/", + input: "./bar", + expected: []string{ + "base", + }, + }, + { + name: "should follow an absolute link with extra parents", + root: "./test-fixtures/symlinks-base/", + input: "./baz", + expected: []string{ + "base", + }, + }, + { + name: "should follow an absolute link with extra parents", + root: "./test-fixtures/symlinks-base/", + input: "./sub/link", + expected: []string{ + "sub/item", + }, + }, + { + name: "should follow chained pivoted link", + root: "./test-fixtures/symlinks-base/", + input: "./chain", + expected: []string{ + "base", + }, + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + resolver := NewFromRootedUnindexedDirectory(c.root, c.root) + + refs, err := resolver.FilesByPath(c.input) + require.NoError(t, err) + assert.Len(t, refs, len(c.expected)) + s := strset.New() + for _, actual := range refs { + s.Add(actual.RealPath) + } + assert.ElementsMatch(t, c.expected, s.List()) + }) + } + +} + +func Test_UnindexedDirectoryResolver_resolvesLinks(t *testing.T) { + tests := []struct { + name string + runner func(file.Resolver) []file.Location + expected []file.Location + }{ + { + name: "by glob to links", + runner: func(resolver file.Resolver) []file.Location { + // links are searched, but resolve to the real files + // for that reason we need to place **/ in front (which is not the same for other resolvers) + actualLocations, err := resolver.FilesByGlob("**/*ink-*") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + file.NewVirtualLocation("file-1.txt", "link-1"), + file.NewVirtualLocation("file-2.txt", "link-2"), + // we already have this real file path via another link, so only one is returned + // file.NewVirtualLocation("file-2.txt", "link-indirect"), + file.NewVirtualLocation("file-3.txt", "link-within"), + }, + }, + { + name: "by basename", + runner: func(resolver file.Resolver) []file.Location { + // links are searched, but resolve to the real files + actualLocations, err := resolver.FilesByGlob("**/file-2.txt") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + // this has two copies in the base image, which overwrites the same location + file.NewLocation("file-2.txt"), + }, + }, + { + name: "by basename glob", + runner: func(resolver file.Resolver) []file.Location { + // links are searched, but resolve to the real files + actualLocations, err := resolver.FilesByGlob("**/file-?.txt") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + file.NewLocation("file-1.txt"), + file.NewLocation("file-2.txt"), + file.NewLocation("file-3.txt"), + file.NewLocation("parent/file-4.txt"), + }, + }, + { + name: "by basename glob to links", + runner: func(resolver file.Resolver) []file.Location { + actualLocations, err := resolver.FilesByGlob("**/link-*") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + file.NewVirtualLocationFromDirectory("file-1.txt", "link-1", stereoscopeFile.Reference{RealPath: "file-1.txt"}), + file.NewVirtualLocationFromDirectory("file-2.txt", "link-2", stereoscopeFile.Reference{RealPath: "file-2.txt"}), + // we already have this real file path via another link, so only one is returned + //file.NewVirtualLocationFromDirectory("file-2.txt", "link-indirect", stereoscopeFile.Reference{RealPath: "file-2.txt"}), + file.NewVirtualLocationFromDirectory("file-3.txt", "link-within", stereoscopeFile.Reference{RealPath: "file-3.txt"}), + }, + }, + { + name: "by extension", + runner: func(resolver file.Resolver) []file.Location { + // links are searched, but resolve to the real files + actualLocations, err := resolver.FilesByGlob("**/*.txt") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + file.NewLocation("file-1.txt"), + file.NewLocation("file-2.txt"), + file.NewLocation("file-3.txt"), + file.NewLocation("parent/file-4.txt"), + }, + }, + { + name: "by path to degree 1 link", + runner: func(resolver file.Resolver) []file.Location { + // links resolve to the final file + actualLocations, err := resolver.FilesByPath("/link-2") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + // we have multiple copies across layers + file.NewVirtualLocation("file-2.txt", "link-2"), + }, + }, + { + name: "by path to degree 2 link", + runner: func(resolver file.Resolver) []file.Location { + // multiple links resolves to the final file + actualLocations, err := resolver.FilesByPath("/link-indirect") + assert.NoError(t, err) + return actualLocations + }, + expected: []file.Location{ + // we have multiple copies across layers + file.NewVirtualLocation("file-2.txt", "link-indirect"), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinks-from-image-symlinks-fixture") + + actual := test.runner(resolver) + + compareLocations(t, test.expected, actual) + }) + } +} + +func Test_UnindexedDirectoryResolver_DoNotAddVirtualPathsToTree(t *testing.T) { + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinks-prune-indexing") + + allLocations := resolver.AllLocations() + var allRealPaths []stereoscopeFile.Path + for l := range allLocations { + allRealPaths = append(allRealPaths, stereoscopeFile.Path(l.RealPath)) + } + pathSet := stereoscopeFile.NewPathSet(allRealPaths...) + + assert.False(t, + pathSet.Contains("before-path/file.txt"), + "symlink destinations should only be indexed at their real path, not through their virtual (symlinked) path", + ) + + assert.False(t, + pathSet.Contains("a-path/file.txt"), + "symlink destinations should only be indexed at their real path, not through their virtual (symlinked) path", + ) +} + +func Test_UnindexedDirectoryResolver_FilesContents_errorOnDirRequest(t *testing.T) { + resolver := NewFromUnindexedDirectory("./test-fixtures/system_paths") + + dirLoc := file.NewLocation("arg/foo") + + reader, err := resolver.FileContentsByLocation(dirLoc) + require.Error(t, err) + require.Nil(t, reader) +} + +func Test_UnindexedDirectoryResolver_AllLocations(t *testing.T) { + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinks-from-image-symlinks-fixture") + + paths := strset.New() + for loc := range resolver.AllLocations() { + if strings.HasPrefix(loc.RealPath, "/") { + // ignore outside of the fixture root for now + continue + } + paths.Add(loc.RealPath) + } + expected := []string{ + "file-1.txt", + "file-2.txt", + "file-3.txt", + "link-1", + "link-2", + "link-dead", + "link-indirect", + "link-within", + "parent", + "parent-link", + "parent/file-4.txt", + } + + pathsList := paths.List() + sort.Strings(pathsList) + + assert.ElementsMatchf(t, expected, pathsList, "expected all paths to be indexed, but found different paths: \n%s", cmp.Diff(expected, paths.List())) +} + +func Test_WritableUnindexedDirectoryResolver(t *testing.T) { + tmpdir := t.TempDir() + + p := "some/path/file" + c := "some contents" + + dr := NewFromUnindexedDirectory(tmpdir) + + locations, err := dr.FilesByPath(p) + require.NoError(t, err) + require.Len(t, locations, 0) + + err = dr.Write(file.NewLocation(p), strings.NewReader(c)) + require.NoError(t, err) + + locations, err = dr.FilesByPath(p) + require.NoError(t, err) + require.Len(t, locations, 1) + + reader, err := dr.FileContentsByLocation(locations[0]) + require.NoError(t, err) + bytes, err := io.ReadAll(reader) + require.Equal(t, c, string(bytes)) +} + +func testWithTimeout(t *testing.T, timeout time.Duration, test func(*testing.T)) { + done := make(chan bool) + go func() { + test(t) + done <- true + }() + + select { + case <-time.After(timeout): + t.Fatal("test timed out") + case <-done: + } +} + +func compareLocations(t *testing.T, expected, actual []file.Location) { + t.Helper() + ignoreUnexported := cmpopts.IgnoreFields(file.LocationData{}, "ref") + ignoreMetadata := cmpopts.IgnoreFields(file.LocationMetadata{}, "Annotations") + ignoreFS := cmpopts.IgnoreFields(file.Coordinates{}, "FileSystemID") + + sort.Sort(file.Locations(expected)) + sort.Sort(file.Locations(actual)) + + if d := cmp.Diff(expected, actual, + ignoreUnexported, + ignoreFS, + ignoreMetadata, + ); d != "" { + + t.Errorf("unexpected locations (-want +got):\n%s", d) + } + +} diff --git a/syft/license/license.go b/syft/license/license.go index e9dd93c6235..c2b9d260295 100644 --- a/syft/license/license.go +++ b/syft/license/license.go @@ -3,6 +3,7 @@ package license import ( "fmt" + "runtime/debug" "github.com/github/go-spdx/v2/spdxexp" @@ -16,19 +17,28 @@ const ( Concluded Type = "concluded" ) -func ParseExpression(expression string) (string, error) { +func ParseExpression(expression string) (ex string, err error) { + // https://github.com/anchore/syft/issues/1837 + // The current spdx library can panic when parsing some expressions + // This is a temporary fix to recover and patch until we can investigate and contribute + // a fix to the upstream github library + defer func() { + if r := recover(); r != nil { + err = fmt.Errorf("recovered from panic while parsing license expression at: \n%s", string(debug.Stack())) + } + }() + licenseID, exists := spdxlicense.ID(expression) if exists { return licenseID, nil } - // If it doesn't exist initially in the SPDX list it might be a more complex expression // ignored variable is any invalid expressions // TODO: contribute to spdxexp to expose deprecated license IDs // https://github.com/anchore/syft/issues/1814 valid, _ := spdxexp.ValidateLicenses([]string{expression}) if !valid { - return "", fmt.Errorf("failed to validate spdx expression: %s", expression) + return "", fmt.Errorf("invalid SPDX expression: %s", expression) } return expression, nil diff --git a/syft/linux/identify_release.go b/syft/linux/identify_release.go index 152b53324c6..cf2da477cc3 100644 --- a/syft/linux/identify_release.go +++ b/syft/linux/identify_release.go @@ -11,7 +11,7 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) // returns a distro or nil @@ -54,7 +54,7 @@ var identityFiles = []parseEntry{ } // IdentifyRelease parses distro-specific files to discover and raise linux distribution release details. -func IdentifyRelease(resolver source.FileResolver) *Release { +func IdentifyRelease(resolver file.Resolver) *Release { logger := log.Nested("operation", "identify-release") for _, entry := range identityFiles { locations, err := resolver.FilesByPath(entry.path) diff --git a/syft/pkg/binary_metadata.go b/syft/pkg/binary_metadata.go index de0a0a2d40b..a915acc5296 100644 --- a/syft/pkg/binary_metadata.go +++ b/syft/pkg/binary_metadata.go @@ -1,12 +1,12 @@ package pkg -import "github.com/anchore/syft/syft/source" +import "github.com/anchore/syft/syft/file" type BinaryMetadata struct { Matches []ClassifierMatch `mapstructure:"Matches" json:"matches"` } type ClassifierMatch struct { - Classifier string `mapstructure:"Classifier" json:"classifier"` - Location source.Location `mapstructure:"Location" json:"location"` + Classifier string `mapstructure:"Classifier" json:"classifier"` + Location file.Location `mapstructure:"Location" json:"location"` } diff --git a/syft/pkg/catalog_test.go b/syft/pkg/catalog_test.go index 5271ac262cb..cfde34d5d6e 100644 --- a/syft/pkg/catalog_test.go +++ b/syft/pkg/catalog_test.go @@ -9,7 +9,7 @@ import ( "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/cpe" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) type expectedIndexes struct { @@ -75,8 +75,8 @@ func TestCatalogDeleteRemovesPackages(t *testing.T) { Name: "debian", Version: "1", Type: DebPkg, - Locations: source.NewLocationSet( - source.NewVirtualLocation("/c/path", "/another/path1"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/c/path", "/another/path1"), ), }, { @@ -84,8 +84,8 @@ func TestCatalogDeleteRemovesPackages(t *testing.T) { Name: "debian", Version: "2", Type: DebPkg, - Locations: source.NewLocationSet( - source.NewVirtualLocation("/d/path", "/another/path2"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/d/path", "/another/path2"), ), }, }, @@ -110,8 +110,8 @@ func TestCatalogDeleteRemovesPackages(t *testing.T) { Name: "debian", Version: "1", Type: DebPkg, - Locations: source.NewLocationSet( - source.NewVirtualLocation("/c/path", "/another/path1"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/c/path", "/another/path1"), ), }, { @@ -119,8 +119,8 @@ func TestCatalogDeleteRemovesPackages(t *testing.T) { Name: "debian", Version: "2", Type: DebPkg, - Locations: source.NewLocationSet( - source.NewVirtualLocation("/d/path", "/another/path2"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/d/path", "/another/path2"), ), }, { @@ -128,8 +128,8 @@ func TestCatalogDeleteRemovesPackages(t *testing.T) { Name: "debian", Version: "3", Type: DebPkg, - Locations: source.NewLocationSet( - source.NewVirtualLocation("/e/path", "/another/path3"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/e/path", "/another/path3"), ), }, }, @@ -155,8 +155,8 @@ func TestCatalogDeleteRemovesPackages(t *testing.T) { Name: "debian", Version: "1", Type: DebPkg, - Locations: source.NewLocationSet( - source.NewVirtualLocation("/c/path", "/another/path1"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/c/path", "/another/path1"), ), }, { @@ -164,8 +164,8 @@ func TestCatalogDeleteRemovesPackages(t *testing.T) { Name: "debian", Version: "2", Type: DebPkg, - Locations: source.NewLocationSet( - source.NewVirtualLocation("/d/path", "/another/path2"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/d/path", "/another/path2"), ), }, }, @@ -206,16 +206,16 @@ func TestCatalogAddPopulatesIndex(t *testing.T) { var pkgs = []Package{ { - Locations: source.NewLocationSet( - source.NewVirtualLocation("/a/path", "/another/path"), - source.NewVirtualLocation("/b/path", "/bee/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/a/path", "/another/path"), + file.NewVirtualLocation("/b/path", "/bee/path"), ), Type: RpmPkg, }, { - Locations: source.NewLocationSet( - source.NewVirtualLocation("/c/path", "/another/path"), - source.NewVirtualLocation("/d/path", "/another/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/c/path", "/another/path"), + file.NewVirtualLocation("/d/path", "/another/path"), ), Type: NpmPkg, }, @@ -291,25 +291,25 @@ func assertIndexes(t *testing.T, c *Collection, expectedIndexes expectedIndexes) func TestCatalog_PathIndexDeduplicatesRealVsVirtualPaths(t *testing.T) { p1 := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/b/path", "/another/path"), - source.NewVirtualLocation("/b/path", "/b/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/b/path", "/another/path"), + file.NewVirtualLocation("/b/path", "/b/path"), ), Type: RpmPkg, Name: "Package-1", } p2 := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/b/path", "/b/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/b/path", "/b/path"), ), Type: RpmPkg, Name: "Package-2", } p2Dup := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/b/path", "/another/path"), - source.NewVirtualLocation("/b/path", "/c/path/b/dup"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/b/path", "/another/path"), + file.NewVirtualLocation("/b/path", "/c/path/b/dup"), ), Type: RpmPkg, Name: "Package-2", @@ -361,7 +361,7 @@ func TestCatalog_MergeRecords(t *testing.T) { var tests = []struct { name string pkgs []Package - expectedLocations []source.Location + expectedLocations []file.Location expectedCPECount int }{ { @@ -369,9 +369,9 @@ func TestCatalog_MergeRecords(t *testing.T) { pkgs: []Package{ { CPEs: []cpe.CPE{cpe.Must("cpe:2.3:a:package:1:1:*:*:*:*:*:*:*")}, - Locations: source.NewLocationSet( - source.NewVirtualLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewVirtualLocationFromCoordinates( + file.Coordinates{ RealPath: "/b/path", FileSystemID: "a", }, @@ -382,9 +382,9 @@ func TestCatalog_MergeRecords(t *testing.T) { }, { CPEs: []cpe.CPE{cpe.Must("cpe:2.3:b:package:1:1:*:*:*:*:*:*:*")}, - Locations: source.NewLocationSet( - source.NewVirtualLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewVirtualLocationFromCoordinates( + file.Coordinates{ RealPath: "/b/path", FileSystemID: "b", }, @@ -394,16 +394,16 @@ func TestCatalog_MergeRecords(t *testing.T) { Type: RpmPkg, }, }, - expectedLocations: []source.Location{ - source.NewVirtualLocationFromCoordinates( - source.Coordinates{ + expectedLocations: []file.Location{ + file.NewVirtualLocationFromCoordinates( + file.Coordinates{ RealPath: "/b/path", FileSystemID: "a", }, "/another/path", ), - source.NewVirtualLocationFromCoordinates( - source.Coordinates{ + file.NewVirtualLocationFromCoordinates( + file.Coordinates{ RealPath: "/b/path", FileSystemID: "b", }, diff --git a/syft/pkg/cataloger.go b/syft/pkg/cataloger.go index 28cc57c35dd..634a17e1dc1 100644 --- a/syft/pkg/cataloger.go +++ b/syft/pkg/cataloger.go @@ -2,7 +2,7 @@ package pkg import ( "github.com/anchore/syft/syft/artifact" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) // Cataloger describes behavior for an object to participate in parsing container image or file system @@ -12,5 +12,5 @@ type Cataloger interface { // Name returns a string that uniquely describes a cataloger Name() string // Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source. - Catalog(resolver source.FileResolver) ([]Package, []artifact.Relationship, error) + Catalog(resolver file.Resolver) ([]Package, []artifact.Relationship, error) } diff --git a/syft/pkg/cataloger/alpm/cataloger_test.go b/syft/pkg/cataloger/alpm/cataloger_test.go index 1dedded2eff..0b8a9156ea5 100644 --- a/syft/pkg/cataloger/alpm/cataloger_test.go +++ b/syft/pkg/cataloger/alpm/cataloger_test.go @@ -9,11 +9,10 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestAlpmCataloger(t *testing.T) { - dbLocation := source.NewLocation("var/lib/pacman/local/gmp-6.2.1-2/desc") + dbLocation := file.NewLocation("var/lib/pacman/local/gmp-6.2.1-2/desc") expectedPkgs := []pkg.Package{ { Name: "gmp", @@ -24,7 +23,7 @@ func TestAlpmCataloger(t *testing.T) { pkg.NewLicenseFromLocations("LGPL3", dbLocation), pkg.NewLicenseFromLocations("GPL", dbLocation), ), - Locations: source.NewLocationSet(dbLocation), + Locations: file.NewLocationSet(dbLocation), CPEs: nil, PURL: "", MetadataType: "AlpmMetadata", diff --git a/syft/pkg/cataloger/alpm/package.go b/syft/pkg/cataloger/alpm/package.go index 2c85db47c62..4ce9bd6b6f2 100644 --- a/syft/pkg/cataloger/alpm/package.go +++ b/syft/pkg/cataloger/alpm/package.go @@ -4,18 +4,18 @@ import ( "strings" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(m *parsedData, release *linux.Release, dbLocation source.Location) pkg.Package { +func newPackage(m *parsedData, release *linux.Release, dbLocation file.Location) pkg.Package { licenseCandidates := strings.Split(m.Licenses, "\n") p := pkg.Package{ Name: m.Package, Version: m.Version, - Locations: source.NewLocationSet(dbLocation), + Locations: file.NewLocationSet(dbLocation), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(dbLocation.WithoutAnnotations(), licenseCandidates...)...), Type: pkg.AlpmPkg, PURL: packageURL(m, release), diff --git a/syft/pkg/cataloger/alpm/parse_alpm_db.go b/syft/pkg/cataloger/alpm/parse_alpm_db.go index 987a52c076a..86c6dd3d2fd 100644 --- a/syft/pkg/cataloger/alpm/parse_alpm_db.go +++ b/syft/pkg/cataloger/alpm/parse_alpm_db.go @@ -17,7 +17,6 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseAlpmDB @@ -36,7 +35,7 @@ type parsedData struct { pkg.AlpmMetadata `mapstructure:",squash"` } -func parseAlpmDB(resolver source.FileResolver, env *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseAlpmDB(resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { data, err := parseAlpmDBEntry(reader) if err != nil { return nil, nil, err @@ -117,7 +116,7 @@ func newScanner(reader io.Reader) *bufio.Scanner { return scanner } -func getFileReader(path string, resolver source.FileResolver) (io.Reader, error) { +func getFileReader(path string, resolver file.Resolver) (io.Reader, error) { locs, err := resolver.FilesByPath(path) if err != nil { return nil, err diff --git a/syft/pkg/cataloger/apkdb/package.go b/syft/pkg/cataloger/apkdb/package.go index 4bc59ba170d..8cb75bbc579 100644 --- a/syft/pkg/cataloger/apkdb/package.go +++ b/syft/pkg/cataloger/apkdb/package.go @@ -4,13 +4,13 @@ import ( "strings" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/license" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(d parsedData, release *linux.Release, dbLocation source.Location) pkg.Package { +func newPackage(d parsedData, release *linux.Release, dbLocation file.Location) pkg.Package { // check if license is a valid spdx expression before splitting licenseStrings := []string{d.License} _, err := license.ParseExpression(d.License) @@ -22,7 +22,7 @@ func newPackage(d parsedData, release *linux.Release, dbLocation source.Location p := pkg.Package{ Name: d.Package, Version: d.Version, - Locations: source.NewLocationSet(dbLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(dbLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(dbLocation, licenseStrings...)...), PURL: packageURL(d.ApkMetadata, release), Type: pkg.ApkPkg, diff --git a/syft/pkg/cataloger/apkdb/parse_apk_db.go b/syft/pkg/cataloger/apkdb/parse_apk_db.go index 748ed7d5852..fd4184f87dd 100644 --- a/syft/pkg/cataloger/apkdb/parse_apk_db.go +++ b/syft/pkg/cataloger/apkdb/parse_apk_db.go @@ -16,7 +16,6 @@ import ( "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -35,7 +34,7 @@ type parsedData struct { // information on specific fields, see https://wiki.alpinelinux.org/wiki/Apk_spec. // //nolint:funlen,gocognit -func parseApkDB(resolver source.FileResolver, env *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseApkDB(resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { scanner := bufio.NewScanner(reader) var apks []parsedData @@ -134,7 +133,7 @@ func parseApkDB(resolver source.FileResolver, env *generic.Environment, reader s return pkgs, discoverPackageDependencies(pkgs), nil } -func findReleases(resolver source.FileResolver, dbPath string) []linux.Release { +func findReleases(resolver file.Resolver, dbPath string) []linux.Release { if resolver == nil { return nil } @@ -157,13 +156,13 @@ func findReleases(resolver source.FileResolver, dbPath string) []linux.Release { return nil } - return parseReleasesFromAPKRepository(source.LocationReadCloser{ + return parseReleasesFromAPKRepository(file.LocationReadCloser{ Location: location, ReadCloser: reposReader, }) } -func parseReleasesFromAPKRepository(reader source.LocationReadCloser) []linux.Release { +func parseReleasesFromAPKRepository(reader file.LocationReadCloser) []linux.Release { var releases []linux.Release reposB, err := io.ReadAll(reader) diff --git a/syft/pkg/cataloger/apkdb/parse_apk_db_test.go b/syft/pkg/cataloger/apkdb/parse_apk_db_test.go index ac344631514..865a02c97ee 100644 --- a/syft/pkg/cataloger/apkdb/parse_apk_db_test.go +++ b/syft/pkg/cataloger/apkdb/parse_apk_db_test.go @@ -18,7 +18,6 @@ import ( "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestExtraFileAttributes(t *testing.T) { @@ -680,8 +679,8 @@ func TestSinglePackageDetails(t *testing.T) { for _, test := range tests { t.Run(test.fixture, func(t *testing.T) { - fixtureLocation := source.NewLocation(test.fixture) - test.expected.Locations = source.NewLocationSet(fixtureLocation) + fixtureLocation := file.NewLocation(test.fixture) + test.expected.Locations = file.NewLocationSet(fixtureLocation) licenses := test.expected.Licenses.ToSlice() for i := range licenses { licenses[i].Locations.Add(fixtureLocation) @@ -694,8 +693,8 @@ func TestSinglePackageDetails(t *testing.T) { func TestMultiplePackages(t *testing.T) { fixture := "test-fixtures/multiple" - location := source.NewLocation(fixture) - fixtureLocationSet := source.NewLocationSet(location) + location := file.NewLocation(fixture) + fixtureLocationSet := file.NewLocationSet(location) expectedPkgs := []pkg.Package{ { Name: "libc-utils", @@ -1024,7 +1023,7 @@ func Test_discoverPackageDependencies(t *testing.T) { t.Run(test.name, func(t *testing.T) { pkgs, wantRelationships := test.genFn() gotRelationships := discoverPackageDependencies(pkgs) - d := cmp.Diff(wantRelationships, gotRelationships, cmpopts.IgnoreUnexported(pkg.Package{}, source.LocationSet{}, pkg.LicenseSet{})) + d := cmp.Diff(wantRelationships, gotRelationships, cmpopts.IgnoreUnexported(pkg.Package{}, file.LocationSet{}, pkg.LicenseSet{})) if d != "" { t.Fail() t.Log(d) @@ -1061,8 +1060,8 @@ func TestPackageDbDependenciesByParse(t *testing.T) { require.NoError(t, err) t.Cleanup(func() { require.NoError(t, f.Close()) }) - pkgs, relationships, err := parseApkDB(nil, nil, source.LocationReadCloser{ - Location: source.NewLocation(test.fixture), + pkgs, relationships, err := parseApkDB(nil, nil, file.LocationReadCloser{ + Location: file.NewLocation(test.fixture), ReadCloser: f, }) require.NoError(t, err) @@ -1172,12 +1171,12 @@ func toPackageNames(pkgs []pkg.Package) []string { return names } -func newLocationReadCloser(t *testing.T, path string) source.LocationReadCloser { +func newLocationReadCloser(t *testing.T, path string) file.LocationReadCloser { f, err := os.Open(path) require.NoError(t, err) t.Cleanup(func() { f.Close() }) - return source.NewLocationReadCloser(source.NewLocation(path), f) + return file.NewLocationReadCloser(file.NewLocation(path), f) } func Test_stripVersionSpecifier(t *testing.T) { @@ -1256,8 +1255,8 @@ https://foo.them.org/alpine/v3.14/community`, for _, tt := range tests { t.Run(tt.desc, func(t *testing.T) { reposReader := io.NopCloser(strings.NewReader(tt.repos)) - got := parseReleasesFromAPKRepository(source.LocationReadCloser{ - Location: source.NewLocation("test"), + got := parseReleasesFromAPKRepository(file.LocationReadCloser{ + Location: file.NewLocation("test"), ReadCloser: reposReader, }) assert.Equal(t, tt.want, got) diff --git a/syft/pkg/cataloger/binary/cataloger.go b/syft/pkg/cataloger/binary/cataloger.go index cf58af15ca7..0cf04b729ed 100644 --- a/syft/pkg/cataloger/binary/cataloger.go +++ b/syft/pkg/cataloger/binary/cataloger.go @@ -3,8 +3,8 @@ package binary import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) const catalogerName = "binary-cataloger" @@ -27,7 +27,7 @@ func (c Cataloger) Name() string { // Catalog is given an object to resolve file references and content, this function returns any discovered Packages // after analyzing the catalog source. -func (c Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (c Cataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { var packages []pkg.Package var relationships []artifact.Relationship @@ -68,7 +68,7 @@ func mergePackages(target *pkg.Package, extra *pkg.Package) { target.Metadata = meta } -func catalog(resolver source.FileResolver, cls classifier) (packages []pkg.Package, err error) { +func catalog(resolver file.Resolver, cls classifier) (packages []pkg.Package, err error) { locations, err := resolver.FilesByGlob(cls.FileGlob) if err != nil { return nil, err diff --git a/syft/pkg/cataloger/binary/cataloger_test.go b/syft/pkg/cataloger/binary/cataloger_test.go index 84771a16fc9..d6622423bb5 100644 --- a/syft/pkg/cataloger/binary/cataloger_test.go +++ b/syft/pkg/cataloger/binary/cataloger_test.go @@ -13,6 +13,7 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/stereoscope/pkg/imagetest" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/source" ) @@ -239,30 +240,6 @@ func Test_Cataloger_DefaultClassifiers_PositiveCases(t *testing.T) { Metadata: metadata("redis-binary"), }, }, - { - name: "positive-argocd-2.5.11", - fixtureDir: "test-fixtures/classifiers/dynamic/argocd-2.5.11", - expected: pkg.Package{ - Name: "argocd", - Version: "2.5.11", - Type: "binary", - PURL: "pkg:golang/github.com/argoproj/argo-cd@2.5.11", - Locations: locations("argocd"), - Metadata: metadata("argocd"), - }, - }, - { - name: "positive-argocd-2.6.4", - fixtureDir: "test-fixtures/classifiers/dynamic/argocd-2.6.4", - expected: pkg.Package{ - Name: "argocd", - Version: "2.6.4", - Type: "binary", - PURL: "pkg:golang/github.com/argoproj/argo-cd@2.6.4", - Locations: locations("argocd"), - Metadata: metadata("argocd"), - }, - }, { name: "positive-helm-3.11.1", fixtureDir: "test-fixtures/classifiers/dynamic/helm-3.11.1", @@ -287,66 +264,6 @@ func Test_Cataloger_DefaultClassifiers_PositiveCases(t *testing.T) { Metadata: metadata("helm"), }, }, - { - name: "positive-kubectl-1.24.11", - fixtureDir: "test-fixtures/classifiers/dynamic/kubectl-1.24.11", - expected: pkg.Package{ - Name: "kubectl", - Version: "1.24.11", - Type: "binary", - PURL: "pkg:golang/k8s.io/kubectl@1.24.11", - Locations: locations("kubectl"), - Metadata: metadata("kubectl"), - }, - }, - { - name: "positive-kubectl-1.25.7", - fixtureDir: "test-fixtures/classifiers/dynamic/kubectl-1.25.7", - expected: pkg.Package{ - Name: "kubectl", - Version: "1.25.7", - Type: "binary", - PURL: "pkg:golang/k8s.io/kubectl@1.25.7", - Locations: locations("kubectl"), - Metadata: metadata("kubectl"), - }, - }, - { - name: "positive-kubectl-1.26.2", - fixtureDir: "test-fixtures/classifiers/dynamic/kubectl-1.26.2", - expected: pkg.Package{ - Name: "kubectl", - Version: "1.26.2", - Type: "binary", - PURL: "pkg:golang/k8s.io/kubectl@1.26.2", - Locations: locations("kubectl"), - Metadata: metadata("kubectl"), - }, - }, - { - name: "positive-kustomize-4.5.7", - fixtureDir: "test-fixtures/classifiers/dynamic/kustomize-4.5.7", - expected: pkg.Package{ - Name: "kustomize", - Version: "4.5.7", - Type: "binary", - PURL: "pkg:golang/sigs.k8s.io/kustomize@4.5.7", - Locations: locations("kustomize"), - Metadata: metadata("kustomize"), - }, - }, - { - name: "positive-kustomize-5.0.0", - fixtureDir: "test-fixtures/classifiers/dynamic/kustomize-5.0.0", - expected: pkg.Package{ - Name: "kustomize", - Version: "5.0.0", - Type: "binary", - PURL: "pkg:golang/sigs.k8s.io/kustomize@5.0.0", - Locations: locations("kustomize"), - Metadata: metadata("kustomize"), - }, - }, { name: "positive-redis-4.0.11", fixtureDir: "test-fixtures/classifiers/positive/redis-server-4.0.11", @@ -812,12 +729,12 @@ func TestClassifierCataloger_DefaultClassifiers_NegativeCases(t *testing.T) { assert.Equal(t, 0, len(actualResults)) } -func locations(locations ...string) source.LocationSet { - var locs []source.Location +func locations(locations ...string) file.LocationSet { + var locs []file.Location for _, s := range locations { - locs = append(locs, source.NewLocation(s)) + locs = append(locs, file.NewLocation(s)) } - return source.NewLocationSet(locs...) + return file.NewLocationSet(locs...) } // metadata paths are: realPath, virtualPath @@ -841,8 +758,8 @@ func match(classifier string, paths ...string) pkg.ClassifierMatch { } return pkg.ClassifierMatch{ Classifier: classifier, - Location: source.NewVirtualLocationFromCoordinates( - source.Coordinates{ + Location: file.NewVirtualLocationFromCoordinates( + file.Coordinates{ RealPath: realPath, }, virtualPath, @@ -901,10 +818,10 @@ func assertPackagesAreEqual(t *testing.T, expected pkg.Package, p pkg.Package) { if len(failMessages) > 0 { assert.Failf(t, strings.Join(failMessages, "; "), "diff: %s", cmp.Diff(expected, p, - cmp.Transformer("Locations", func(l source.LocationSet) []source.Location { + cmp.Transformer("Locations", func(l file.LocationSet) []file.Location { return l.ToSlice() }), - cmpopts.IgnoreUnexported(pkg.Package{}, source.Location{}), + cmpopts.IgnoreUnexported(pkg.Package{}, file.Location{}), cmpopts.IgnoreFields(pkg.Package{}, "CPEs", "FoundBy", "MetadataType", "Type"), )) } @@ -914,22 +831,22 @@ type panicyResolver struct { searchCalled bool } -func (p *panicyResolver) FilesByExtension(_ ...string) ([]source.Location, error) { +func (p *panicyResolver) FilesByExtension(_ ...string) ([]file.Location, error) { p.searchCalled = true return nil, errors.New("not implemented") } -func (p *panicyResolver) FilesByBasename(_ ...string) ([]source.Location, error) { +func (p *panicyResolver) FilesByBasename(_ ...string) ([]file.Location, error) { p.searchCalled = true return nil, errors.New("not implemented") } -func (p *panicyResolver) FilesByBasenameGlob(_ ...string) ([]source.Location, error) { +func (p *panicyResolver) FilesByBasenameGlob(_ ...string) ([]file.Location, error) { p.searchCalled = true return nil, errors.New("not implemented") } -func (p *panicyResolver) FileContentsByLocation(_ source.Location) (io.ReadCloser, error) { +func (p *panicyResolver) FileContentsByLocation(_ file.Location) (io.ReadCloser, error) { p.searchCalled = true return nil, errors.New("not implemented") } @@ -938,34 +855,34 @@ func (p *panicyResolver) HasPath(_ string) bool { return true } -func (p *panicyResolver) FilesByPath(_ ...string) ([]source.Location, error) { +func (p *panicyResolver) FilesByPath(_ ...string) ([]file.Location, error) { p.searchCalled = true return nil, errors.New("not implemented") } -func (p *panicyResolver) FilesByGlob(_ ...string) ([]source.Location, error) { +func (p *panicyResolver) FilesByGlob(_ ...string) ([]file.Location, error) { p.searchCalled = true return nil, errors.New("not implemented") } -func (p *panicyResolver) FilesByMIMEType(_ ...string) ([]source.Location, error) { +func (p *panicyResolver) FilesByMIMEType(_ ...string) ([]file.Location, error) { p.searchCalled = true return nil, errors.New("not implemented") } -func (p *panicyResolver) RelativeFileByPath(_ source.Location, _ string) *source.Location { +func (p *panicyResolver) RelativeFileByPath(_ file.Location, _ string) *file.Location { return nil } -func (p *panicyResolver) AllLocations() <-chan source.Location { +func (p *panicyResolver) AllLocations() <-chan file.Location { return nil } -func (p *panicyResolver) FileMetadataByLocation(_ source.Location) (source.FileMetadata, error) { - return source.FileMetadata{}, errors.New("not implemented") +func (p *panicyResolver) FileMetadataByLocation(_ file.Location) (file.Metadata, error) { + return file.Metadata{}, errors.New("not implemented") } -var _ source.FileResolver = (*panicyResolver)(nil) +var _ file.Resolver = (*panicyResolver)(nil) func Test_Cataloger_ResilientToErrors(t *testing.T) { c := NewCataloger() diff --git a/syft/pkg/cataloger/binary/classifier.go b/syft/pkg/cataloger/binary/classifier.go index 6ab18985c52..c98399f4ce9 100644 --- a/syft/pkg/cataloger/binary/classifier.go +++ b/syft/pkg/cataloger/binary/classifier.go @@ -15,9 +15,9 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/cpe" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader" - "github.com/anchore/syft/syft/source" ) var emptyPURL = packageurl.PackageURL{} @@ -53,10 +53,10 @@ type classifier struct { } // evidenceMatcher is a function called to catalog Packages that match some sort of evidence -type evidenceMatcher func(resolver source.FileResolver, classifier classifier, location source.Location) ([]pkg.Package, error) +type evidenceMatcher func(resolver file.Resolver, classifier classifier, location file.Location) ([]pkg.Package, error) func evidenceMatchers(matchers ...evidenceMatcher) evidenceMatcher { - return func(resolver source.FileResolver, classifier classifier, location source.Location) ([]pkg.Package, error) { + return func(resolver file.Resolver, classifier classifier, location file.Location) ([]pkg.Package, error) { for _, matcher := range matchers { match, err := matcher(resolver, classifier, location) if err != nil { @@ -72,7 +72,7 @@ func evidenceMatchers(matchers ...evidenceMatcher) evidenceMatcher { func fileNameTemplateVersionMatcher(fileNamePattern string, contentTemplate string) evidenceMatcher { pat := regexp.MustCompile(fileNamePattern) - return func(resolver source.FileResolver, classifier classifier, location source.Location) ([]pkg.Package, error) { + return func(resolver file.Resolver, classifier classifier, location file.Location) ([]pkg.Package, error) { if !pat.MatchString(location.RealPath) { return nil, nil } @@ -118,7 +118,7 @@ func fileNameTemplateVersionMatcher(fileNamePattern string, contentTemplate stri func fileContentsVersionMatcher(pattern string) evidenceMatcher { pat := regexp.MustCompile(pattern) - return func(resolver source.FileResolver, classifier classifier, location source.Location) ([]pkg.Package, error) { + return func(resolver file.Resolver, classifier classifier, location file.Location) ([]pkg.Package, error) { contents, err := getContents(resolver, location) if err != nil { return nil, fmt.Errorf("unable to get read contents for file: %w", err) @@ -138,7 +138,7 @@ func fileContentsVersionMatcher(pattern string) evidenceMatcher { //nolint:gocognit func sharedLibraryLookup(sharedLibraryPattern string, sharedLibraryMatcher evidenceMatcher) evidenceMatcher { pat := regexp.MustCompile(sharedLibraryPattern) - return func(resolver source.FileResolver, classifier classifier, location source.Location) (packages []pkg.Package, _ error) { + return func(resolver file.Resolver, classifier classifier, location file.Location) (packages []pkg.Package, _ error) { libs, err := sharedLibraries(resolver, location) if err != nil { return nil, err @@ -159,7 +159,7 @@ func sharedLibraryLookup(sharedLibraryPattern string, sharedLibraryMatcher evide } for _, p := range pkgs { // set the source binary as the first location - locationSet := source.NewLocationSet(location) + locationSet := file.NewLocationSet(location) locationSet.Add(p.Locations.ToSlice()...) p.Locations = locationSet meta, _ := p.Metadata.(pkg.BinaryMetadata) @@ -187,7 +187,7 @@ func mustPURL(purl string) packageurl.PackageURL { return p } -func getContents(resolver source.FileResolver, location source.Location) ([]byte, error) { +func getContents(resolver file.Resolver, location file.Location) ([]byte, error) { reader, err := resolver.FileContentsByLocation(location) if err != nil { return nil, err @@ -216,7 +216,7 @@ func singleCPE(cpeString string) []cpe.CPE { // sharedLibraries returns a list of all shared libraries found within a binary, currently // supporting: elf, macho, and windows pe -func sharedLibraries(resolver source.FileResolver, location source.Location) ([]string, error) { +func sharedLibraries(resolver file.Resolver, location file.Location) ([]string, error) { contents, err := getContents(resolver, location) if err != nil { return nil, err diff --git a/syft/pkg/cataloger/binary/classifier_test.go b/syft/pkg/cataloger/binary/classifier_test.go index 82260a0e633..fbf88c3b1a8 100644 --- a/syft/pkg/cataloger/binary/classifier_test.go +++ b/syft/pkg/cataloger/binary/classifier_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/cpe" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) func Test_ClassifierCPEs(t *testing.T) { @@ -63,12 +63,12 @@ func Test_ClassifierCPEs(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - resolver := source.NewMockResolverForPaths(test.fixture) - locations, err := resolver.FilesByPath(test.fixture) + resolver := file.NewMockResolverForPaths(test.fixture) + ls, err := resolver.FilesByPath(test.fixture) require.NoError(t, err) - require.Len(t, locations, 1) + require.Len(t, ls, 1) - pkgs, err := test.classifier.EvidenceMatcher(resolver, test.classifier, locations[0]) + pkgs, err := test.classifier.EvidenceMatcher(resolver, test.classifier, ls[0]) require.NoError(t, err) require.Len(t, pkgs, 1) diff --git a/syft/pkg/cataloger/binary/default_classifiers.go b/syft/pkg/cataloger/binary/default_classifiers.go index 8aa739b4b9f..5c2f2e17fe4 100644 --- a/syft/pkg/cataloger/binary/default_classifiers.go +++ b/syft/pkg/cataloger/binary/default_classifiers.go @@ -46,15 +46,6 @@ var defaultClassifiers = []classifier{ PURL: mustPURL("pkg:generic/go@version"), CPEs: singleCPE("cpe:2.3:a:golang:go:*:*:*:*:*:*:*:*"), }, - { - Class: "argocd", - FileGlob: "**/argocd", - EvidenceMatcher: fileContentsVersionMatcher( - `(?m)common\.version=(?P[0-9]+\.[0-9]+\.[0-9]+)`), - Package: "argocd", - PURL: mustPURL("pkg:golang/github.com/argoproj/argo-cd@version"), - CPEs: singleCPE("cpe:2.3:a:argoproj:argocd:*:*:*:*:*:*:*"), - }, { Class: "helm", FileGlob: "**/helm", @@ -64,24 +55,6 @@ var defaultClassifiers = []classifier{ PURL: mustPURL("pkg:golang/helm.sh/helm@version"), CPEs: singleCPE("cpe:2.3:a:helm:helm:*:*:*:*:*:*:*"), }, - { - Class: "kustomize", - FileGlob: "**/kustomize", - EvidenceMatcher: fileContentsVersionMatcher( - `(?m)version=kustomize/v(?P[0-9]+\.[0-9]+\.[0-9]+)`), - Package: "kustomize", - PURL: mustPURL("pkg:golang/sigs.k8s.io/kustomize@version"), - CPEs: singleCPE("cpe:2.3:a:kustomize:kustomize:*:*:*:*:*:*:*"), - }, - { - Class: "kubectl", - FileGlob: "**/kubectl", - EvidenceMatcher: fileContentsVersionMatcher( - `(?m)\x00v(?P[0-9]+\.[0-9]+\.[0-9]+)\x00`), - Package: "kubectl", - PURL: mustPURL("pkg:golang/k8s.io/kubectl@version"), - CPEs: singleCPE("cpe:2.3:a:kubectl:kubectl:*:*:*:*:*:*:*"), - }, { Class: "redis-binary", FileGlob: "**/redis-server", diff --git a/syft/pkg/cataloger/binary/package.go b/syft/pkg/cataloger/binary/package.go index 7c1fb7abc60..a677b02a623 100644 --- a/syft/pkg/cataloger/binary/package.go +++ b/syft/pkg/cataloger/binary/package.go @@ -4,11 +4,11 @@ import ( "reflect" "github.com/anchore/syft/syft/cpe" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(classifier classifier, location source.Location, matchMetadata map[string]string) *pkg.Package { +func newPackage(classifier classifier, location file.Location, matchMetadata map[string]string) *pkg.Package { version, ok := matchMetadata["version"] if !ok { return nil @@ -26,7 +26,7 @@ func newPackage(classifier classifier, location source.Location, matchMetadata m p := pkg.Package{ Name: classifier.Package, Version: version, - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), ), Type: pkg.BinaryPkg, diff --git a/syft/pkg/cataloger/binary/test-fixtures/Makefile b/syft/pkg/cataloger/binary/test-fixtures/Makefile index afd160d8f09..4ed523068e4 100644 --- a/syft/pkg/cataloger/binary/test-fixtures/Makefile +++ b/syft/pkg/cataloger/binary/test-fixtures/Makefile @@ -7,15 +7,8 @@ all: \ classifiers/dynamic/ruby-library-3.2.1 \ classifiers/dynamic/ruby-library-2.7.7 \ classifiers/dynamic/ruby-library-2.6.10 \ - classifiers/dynamic/argocd-2.5.11 \ - classifiers/dynamic/argocd-2.6.4 \ classifiers/dynamic/helm-3.11.1 \ classifiers/dynamic/helm-3.10.3 \ - classifiers/dynamic/kubectl-1.24.11 \ - classifiers/dynamic/kubectl-1.25.7 \ - classifiers/dynamic/kubectl-1.26.2 \ - classifiers/dynamic/kustomize-4.5.7 \ - classifiers/dynamic/kustomize-5.0.0 \ classifiers/dynamic/consul-1.15.2 @@ -89,18 +82,6 @@ classifiers/dynamic/ruby-library-2.6.10: /usr/local/lib/libruby.so.2.6 \ $@/libruby.so.2.6 -classifiers/dynamic/argocd-2.5.11: - $(eval $@_image := "argoproj/argocd:v2.5.11@sha256:d1062935b3256ec69422843ebcb50debb54fd389436961586000c8ce6ee7f249") - ./get-image-file.sh $($@_image) \ - /usr/local/bin/argocd \ - $@/argocd - -classifiers/dynamic/argocd-2.6.4: - $(eval $@_image := "argoproj/argocd:v2.6.4@sha256:61fcbba187ff53c00696cb580edf70cada59c45cf399d8477631acf43cf522ee") - ./get-image-file.sh $($@_image) \ - /usr/local/bin/argocd \ - $@/argocd - classifiers/dynamic/helm-3.11.1: $(eval $@_image := "alpine/helm:3.11.1@sha256:8628e3695fb743a8b9de89626f1b7a221280c2152c0e288c2504e59b68233e8b") ./get-image-file.sh $($@_image) \ @@ -113,36 +94,6 @@ classifiers/dynamic/helm-3.10.3: /usr/local/bin/helm \ $@/helm -classifiers/dynamic/kubectl-1.24.11: - $(eval $@_image := "bitnami/kubectl:1.24.11@sha256:79d60c5ac8a1dc84e2c39f56d8e8cc0053159b5ed88f283bdf8fbda1ee86c8bc") - ./get-image-file.sh $($@_image) \ - /opt/bitnami/kubectl/bin/kubectl \ - $@/kubectl - -classifiers/dynamic/kubectl-1.25.7: - $(eval $@_image := "bitnami/kubectl:1.25.7@sha256:d7b00dbfdc6d8890aefe40edfb6c1d4c90cbb6c978794bb51a21744edc34ba7a") - ./get-image-file.sh $($@_image) \ - /opt/bitnami/kubectl/bin/kubectl \ - $@/kubectl - -classifiers/dynamic/kubectl-1.26.2: - $(eval $@_image := "line/kubectl-kustomize:1.26.2-5.0.0@sha256:9ee3b4a9a21f0777fc1d8c64208290f818a2e68c5e9e892e931621bda089bf06") - ./get-image-file.sh $($@_image) \ - /usr/local/bin/kubectl \ - $@/kubectl - -classifiers/dynamic/kustomize-4.5.7: - $(eval $@_image := "argoproj/argocd:v2.6.4@sha256:61fcbba187ff53c00696cb580edf70cada59c45cf399d8477631acf43cf522ee") - ./get-image-file.sh $($@_image) \ - /usr/local/bin/kustomize \ - $@/kustomize - -classifiers/dynamic/kustomize-5.0.0: - $(eval $@_image := "line/kubectl-kustomize:1.26.2-5.0.0@sha256:9ee3b4a9a21f0777fc1d8c64208290f818a2e68c5e9e892e931621bda089bf06") - ./get-image-file.sh $($@_image) \ - /usr/local/bin/kustomize \ - $@/kustomize - classifiers/dynamic/consul-1.15.2: $(eval $@_image := "hashicorp/consul:1.15.2@sha256:c2169f3bb18dd947ae8eb5f6766896695c71fb439f050a3343e0007d895615b8") ./get-image-file.sh $($@_image) \ diff --git a/syft/pkg/cataloger/catalog.go b/syft/pkg/cataloger/catalog.go index 793efab369b..f982223e1a4 100644 --- a/syft/pkg/cataloger/catalog.go +++ b/syft/pkg/cataloger/catalog.go @@ -14,10 +14,10 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/event" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/common/cpe" - "github.com/anchore/syft/syft/source" ) // Monitor provides progress-related data for observing the progress of a Catalog() call (published on the event bus). @@ -50,7 +50,7 @@ func newMonitor() (*progress.Manual, *progress.Manual) { return &filesProcessed, &packagesDiscovered } -func runCataloger(cataloger pkg.Cataloger, resolver source.FileResolver) (catalogerResult *catalogResult, err error) { +func runCataloger(cataloger pkg.Cataloger, resolver file.Resolver) (catalogerResult *catalogResult, err error) { // handle individual cataloger panics defer func() { if e := recover(); e != nil { @@ -105,7 +105,7 @@ func runCataloger(cataloger pkg.Cataloger, resolver source.FileResolver) (catalo // request. // //nolint:funlen -func Catalog(resolver source.FileResolver, _ *linux.Release, parallelism int, catalogers ...pkg.Cataloger) (*pkg.Collection, []artifact.Relationship, error) { +func Catalog(resolver file.Resolver, _ *linux.Release, parallelism int, catalogers ...pkg.Cataloger) (*pkg.Collection, []artifact.Relationship, error) { catalog := pkg.NewCollection() var allRelationships []artifact.Relationship @@ -182,13 +182,13 @@ func Catalog(resolver source.FileResolver, _ *linux.Release, parallelism int, ca return catalog, allRelationships, errs } -func packageFileOwnershipRelationships(p pkg.Package, resolver source.FilePathResolver) ([]artifact.Relationship, error) { +func packageFileOwnershipRelationships(p pkg.Package, resolver file.PathResolver) ([]artifact.Relationship, error) { fileOwner, ok := p.Metadata.(pkg.FileOwner) if !ok { return nil, nil } - locations := map[artifact.ID]source.Location{} + locations := map[artifact.ID]file.Location{} for _, path := range fileOwner.OwnedFiles() { pathRefs, err := resolver.FilesByPath(path) diff --git a/syft/pkg/cataloger/catalog_test.go b/syft/pkg/cataloger/catalog_test.go index 9de59f36d74..950ec133aba 100644 --- a/syft/pkg/cataloger/catalog_test.go +++ b/syft/pkg/cataloger/catalog_test.go @@ -6,14 +6,14 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func Test_CatalogPanicHandling(t *testing.T) { catalog, relationships, err := Catalog( - source.NewMockResolverForPaths(), + file.NewMockResolverForPaths(), &linux.Release{}, 1, panickingCataloger{}, @@ -32,7 +32,7 @@ func (p panickingCataloger) Name() string { return "panicking-cataloger" } -func (p panickingCataloger) Catalog(_ source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (p panickingCataloger) Catalog(_ file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { panic("something bad happened") } @@ -44,7 +44,7 @@ func (p returningCataloger) Name() string { return "returning-cataloger" } -func (p returningCataloger) Catalog(_ source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (p returningCataloger) Catalog(_ file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { pkg1 := pkg.Package{ Name: "package-1", Version: "1.0", diff --git a/syft/pkg/cataloger/cataloger_test.go b/syft/pkg/cataloger/cataloger_test.go index 35cde7797d0..4f4b6ce83fd 100644 --- a/syft/pkg/cataloger/cataloger_test.go +++ b/syft/pkg/cataloger/cataloger_test.go @@ -6,8 +6,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) var _ pkg.Cataloger = (*dummy)(nil) @@ -20,7 +20,7 @@ func (d dummy) Name() string { return d.name } -func (d dummy) Catalog(_ source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (d dummy) Catalog(_ file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { panic("not implemented") } diff --git a/syft/pkg/cataloger/cpp/package.go b/syft/pkg/cataloger/cpp/package.go index ba54add772f..dbbdd0b90c8 100644 --- a/syft/pkg/cataloger/cpp/package.go +++ b/syft/pkg/cataloger/cpp/package.go @@ -4,11 +4,11 @@ import ( "strings" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newConanfilePackage(m pkg.ConanMetadata, locations ...source.Location) *pkg.Package { +func newConanfilePackage(m pkg.ConanMetadata, locations ...file.Location) *pkg.Package { fields := strings.Split(strings.TrimSpace(m.Ref), "/") if len(fields) < 2 { return nil @@ -23,7 +23,7 @@ func newConanfilePackage(m pkg.ConanMetadata, locations ...source.Location) *pkg p := pkg.Package{ Name: pkgName, Version: pkgVersion, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(pkgName, pkgVersion), Language: pkg.CPP, Type: pkg.ConanPkg, @@ -36,7 +36,7 @@ func newConanfilePackage(m pkg.ConanMetadata, locations ...source.Location) *pkg return &p } -func newConanlockPackage(m pkg.ConanLockMetadata, locations ...source.Location) *pkg.Package { +func newConanlockPackage(m pkg.ConanLockMetadata, locations ...file.Location) *pkg.Package { fields := strings.Split(strings.Split(m.Ref, "@")[0], "/") if len(fields) < 2 { return nil @@ -51,7 +51,7 @@ func newConanlockPackage(m pkg.ConanLockMetadata, locations ...source.Location) p := pkg.Package{ Name: pkgName, Version: pkgVersion, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(pkgName, pkgVersion), Language: pkg.CPP, Type: pkg.ConanPkg, diff --git a/syft/pkg/cataloger/cpp/parse_conanfile.go b/syft/pkg/cataloger/cpp/parse_conanfile.go index fdaf08026be..f9ae172f37b 100644 --- a/syft/pkg/cataloger/cpp/parse_conanfile.go +++ b/syft/pkg/cataloger/cpp/parse_conanfile.go @@ -8,9 +8,9 @@ import ( "strings" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseConanfile @@ -20,7 +20,7 @@ type Conanfile struct { } // parseConanfile is a parser function for conanfile.txt contents, returning all packages discovered. -func parseConanfile(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseConanfile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { r := bufio.NewReader(reader) inRequirements := false var pkgs []pkg.Package diff --git a/syft/pkg/cataloger/cpp/parse_conanfile_test.go b/syft/pkg/cataloger/cpp/parse_conanfile_test.go index edb9ff30a41..bca49223a5d 100644 --- a/syft/pkg/cataloger/cpp/parse_conanfile_test.go +++ b/syft/pkg/cataloger/cpp/parse_conanfile_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseConanfile(t *testing.T) { fixture := "test-fixtures/conanfile.txt" - fixtureLocationSet := source.NewLocationSet(source.NewLocation(fixture)) + fixtureLocationSet := file.NewLocationSet(file.NewLocation(fixture)) expected := []pkg.Package{ { Name: "catch2", diff --git a/syft/pkg/cataloger/cpp/parse_conanlock.go b/syft/pkg/cataloger/cpp/parse_conanlock.go index b3bcf31d53e..511000ea16a 100644 --- a/syft/pkg/cataloger/cpp/parse_conanlock.go +++ b/syft/pkg/cataloger/cpp/parse_conanlock.go @@ -5,9 +5,9 @@ import ( "strings" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseConanlock @@ -30,7 +30,7 @@ type conanLock struct { } // parseConanlock is a parser function for conan.lock contents, returning all packages discovered. -func parseConanlock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseConanlock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package var cl conanLock if err := json.NewDecoder(reader).Decode(&cl); err != nil { diff --git a/syft/pkg/cataloger/cpp/parse_conanlock_test.go b/syft/pkg/cataloger/cpp/parse_conanlock_test.go index c5a57fa795e..b699081dee5 100644 --- a/syft/pkg/cataloger/cpp/parse_conanlock_test.go +++ b/syft/pkg/cataloger/cpp/parse_conanlock_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseConanlock(t *testing.T) { @@ -16,7 +16,7 @@ func TestParseConanlock(t *testing.T) { Name: "zlib", Version: "1.2.12", PURL: "pkg:conan/zlib@1.2.12", - Locations: source.NewLocationSet(source.NewLocation(fixture)), + Locations: file.NewLocationSet(file.NewLocation(fixture)), Language: pkg.CPP, Type: pkg.ConanPkg, MetadataType: pkg.ConanLockMetadataType, diff --git a/syft/pkg/cataloger/dart/package.go b/syft/pkg/cataloger/dart/package.go index 1f78045536a..f01d80f602c 100644 --- a/syft/pkg/cataloger/dart/package.go +++ b/syft/pkg/cataloger/dart/package.go @@ -2,11 +2,11 @@ package dart import ( "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPubspecLockPackage(name string, raw pubspecLockPackage, locations ...source.Location) pkg.Package { +func newPubspecLockPackage(name string, raw pubspecLockPackage, locations ...file.Location) pkg.Package { metadata := pkg.DartPubMetadata{ Name: name, Version: raw.Version, @@ -17,7 +17,7 @@ func newPubspecLockPackage(name string, raw pubspecLockPackage, locations ...sou p := pkg.Package{ Name: name, Version: raw.Version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(metadata), Language: pkg.Dart, Type: pkg.DartPubPkg, diff --git a/syft/pkg/cataloger/dart/parse_pubspec_lock.go b/syft/pkg/cataloger/dart/parse_pubspec_lock.go index bde8caf663d..3493f8d1df8 100644 --- a/syft/pkg/cataloger/dart/parse_pubspec_lock.go +++ b/syft/pkg/cataloger/dart/parse_pubspec_lock.go @@ -9,9 +9,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parsePubspecLock @@ -38,7 +38,7 @@ type pubspecLockDescription struct { ResolvedRef string `yaml:"resolved-ref" mapstructure:"resolved-ref"` } -func parsePubspecLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePubspecLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package dec := yaml.NewDecoder(reader) diff --git a/syft/pkg/cataloger/dart/parse_pubspec_lock_test.go b/syft/pkg/cataloger/dart/parse_pubspec_lock_test.go index bbef7e0492c..a5a972e80eb 100644 --- a/syft/pkg/cataloger/dart/parse_pubspec_lock_test.go +++ b/syft/pkg/cataloger/dart/parse_pubspec_lock_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParsePubspecLock(t *testing.T) { fixture := "test-fixtures/pubspec.lock" - fixtureLocationSet := source.NewLocationSet(source.NewLocation(fixture)) + fixtureLocationSet := file.NewLocationSet(file.NewLocation(fixture)) expected := []pkg.Package{ { Name: "ale", diff --git a/syft/pkg/cataloger/deb/cataloger_test.go b/syft/pkg/cataloger/deb/cataloger_test.go index ab3415d5402..64a3c5f8768 100644 --- a/syft/pkg/cataloger/deb/cataloger_test.go +++ b/syft/pkg/cataloger/deb/cataloger_test.go @@ -6,11 +6,10 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestDpkgCataloger(t *testing.T) { - licenseLocation := source.NewVirtualLocation("/usr/share/doc/libpam-runtime/copyright", "/usr/share/doc/libpam-runtime/copyright") + licenseLocation := file.NewVirtualLocation("/usr/share/doc/libpam-runtime/copyright", "/usr/share/doc/libpam-runtime/copyright") expected := []pkg.Package{ { Name: "libpam-runtime", @@ -21,11 +20,11 @@ func TestDpkgCataloger(t *testing.T) { pkg.NewLicenseFromLocations("GPL-2", licenseLocation), pkg.NewLicenseFromLocations("LGPL-2.1", licenseLocation), ), - Locations: source.NewLocationSet( - source.NewVirtualLocation("/var/lib/dpkg/status", "/var/lib/dpkg/status"), - source.NewVirtualLocation("/var/lib/dpkg/info/libpam-runtime.md5sums", "/var/lib/dpkg/info/libpam-runtime.md5sums"), - source.NewVirtualLocation("/var/lib/dpkg/info/libpam-runtime.conffiles", "/var/lib/dpkg/info/libpam-runtime.conffiles"), - source.NewVirtualLocation("/usr/share/doc/libpam-runtime/copyright", "/usr/share/doc/libpam-runtime/copyright"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/var/lib/dpkg/status", "/var/lib/dpkg/status"), + file.NewVirtualLocation("/var/lib/dpkg/info/libpam-runtime.md5sums", "/var/lib/dpkg/info/libpam-runtime.md5sums"), + file.NewVirtualLocation("/var/lib/dpkg/info/libpam-runtime.conffiles", "/var/lib/dpkg/info/libpam-runtime.conffiles"), + file.NewVirtualLocation("/usr/share/doc/libpam-runtime/copyright", "/usr/share/doc/libpam-runtime/copyright"), ), Type: pkg.DebPkg, MetadataType: pkg.DpkgMetadataType, diff --git a/syft/pkg/cataloger/deb/package.go b/syft/pkg/cataloger/deb/package.go index 1685051c959..b37d8f46b7c 100644 --- a/syft/pkg/cataloger/deb/package.go +++ b/syft/pkg/cataloger/deb/package.go @@ -10,9 +10,9 @@ import ( "github.com/anchore/packageurl-go" "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) const ( @@ -21,14 +21,14 @@ const ( docsPath = "/usr/share/doc" ) -func newDpkgPackage(d pkg.DpkgMetadata, dbLocation source.Location, resolver source.FileResolver, release *linux.Release) pkg.Package { +func newDpkgPackage(d pkg.DpkgMetadata, dbLocation file.Location, resolver file.Resolver, release *linux.Release) pkg.Package { // TODO: separate pr to license refactor, but explore extracting dpkg-specific license parsing into a separate function licenses := make([]pkg.License, 0) p := pkg.Package{ Name: d.Package, Version: d.Version, Licenses: pkg.NewLicenseSet(licenses...), - Locations: source.NewLocationSet(dbLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(dbLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), PURL: packageURL(d, release), Type: pkg.DebPkg, MetadataType: pkg.DpkgMetadataType, @@ -83,7 +83,7 @@ func packageURL(m pkg.DpkgMetadata, distro *linux.Release) string { ).ToString() } -func addLicenses(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) { +func addLicenses(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) { metadata, ok := p.Metadata.(pkg.DpkgMetadata) if !ok { log.WithFields("package", p).Warn("unable to extract DPKG metadata to add licenses") @@ -105,7 +105,7 @@ func addLicenses(resolver source.FileResolver, dbLocation source.Location, p *pk } } -func mergeFileListing(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) { +func mergeFileListing(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) { metadata, ok := p.Metadata.(pkg.DpkgMetadata) if !ok { log.WithFields("package", p).Warn("unable to extract DPKG metadata to file listing") @@ -137,10 +137,10 @@ loopNewFiles: p.Locations.Add(infoLocations...) } -func getAdditionalFileListing(resolver source.FileResolver, dbLocation source.Location, m pkg.DpkgMetadata) ([]pkg.DpkgFileRecord, []source.Location) { +func getAdditionalFileListing(resolver file.Resolver, dbLocation file.Location, m pkg.DpkgMetadata) ([]pkg.DpkgFileRecord, []file.Location) { // ensure the default value for a collection is never nil since this may be shown as JSON var files = make([]pkg.DpkgFileRecord, 0) - var locations []source.Location + var locations []file.Location md5Reader, md5Location := fetchMd5Contents(resolver, dbLocation, m) @@ -168,7 +168,7 @@ func getAdditionalFileListing(resolver source.FileResolver, dbLocation source.Lo } //nolint:dupl -func fetchMd5Contents(resolver source.FileResolver, dbLocation source.Location, m pkg.DpkgMetadata) (io.ReadCloser, *source.Location) { +func fetchMd5Contents(resolver file.Resolver, dbLocation file.Location, m pkg.DpkgMetadata) (io.ReadCloser, *file.Location) { var md5Reader io.ReadCloser var err error @@ -204,7 +204,7 @@ func fetchMd5Contents(resolver source.FileResolver, dbLocation source.Location, } //nolint:dupl -func fetchConffileContents(resolver source.FileResolver, dbLocation source.Location, m pkg.DpkgMetadata) (io.ReadCloser, *source.Location) { +func fetchConffileContents(resolver file.Resolver, dbLocation file.Location, m pkg.DpkgMetadata) (io.ReadCloser, *file.Location) { var reader io.ReadCloser var err error @@ -239,7 +239,7 @@ func fetchConffileContents(resolver source.FileResolver, dbLocation source.Locat return reader, &l } -func fetchCopyrightContents(resolver source.FileResolver, dbLocation source.Location, m pkg.DpkgMetadata) (io.ReadCloser, *source.Location) { +func fetchCopyrightContents(resolver file.Resolver, dbLocation file.Location, m pkg.DpkgMetadata) (io.ReadCloser, *file.Location) { if resolver == nil { return nil, nil } diff --git a/syft/pkg/cataloger/deb/parse_dpkg_db.go b/syft/pkg/cataloger/deb/parse_dpkg_db.go index cd4c1ff535e..0a7dccb2d30 100644 --- a/syft/pkg/cataloger/deb/parse_dpkg_db.go +++ b/syft/pkg/cataloger/deb/parse_dpkg_db.go @@ -14,9 +14,9 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var ( @@ -24,7 +24,7 @@ var ( sourceRegexp = regexp.MustCompile(`(?P\S+)( \((?P.*)\))?`) ) -func parseDpkgDB(resolver source.FileResolver, env *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseDpkgDB(resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { metadata, err := parseDpkgStatus(reader) if err != nil { return nil, nil, fmt.Errorf("unable to catalog dpkg DB=%q: %w", reader.RealPath, err) diff --git a/syft/pkg/cataloger/deb/parse_dpkg_db_test.go b/syft/pkg/cataloger/deb/parse_dpkg_db_test.go index fc4e51633ad..0a2c58bd895 100644 --- a/syft/pkg/cataloger/deb/parse_dpkg_db_test.go +++ b/syft/pkg/cataloger/deb/parse_dpkg_db_test.go @@ -15,7 +15,6 @@ import ( "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func Test_parseDpkgStatus(t *testing.T) { @@ -308,7 +307,7 @@ Installed-Size: 10kib Type: "deb", PURL: "pkg:deb/debian/apt?distro=debian-10", Licenses: pkg.NewLicenseSet(), - Locations: source.NewLocationSet(source.NewLocation("place")), + Locations: file.NewLocationSet(file.NewLocation("place")), MetadataType: "DpkgMetadata", Metadata: pkg.DpkgMetadata{ Package: "apt", diff --git a/syft/pkg/cataloger/dotnet/package.go b/syft/pkg/cataloger/dotnet/package.go index 15ef7b71071..c8cb261a6fd 100644 --- a/syft/pkg/cataloger/dotnet/package.go +++ b/syft/pkg/cataloger/dotnet/package.go @@ -4,11 +4,11 @@ import ( "strings" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newDotnetDepsPackage(nameVersion string, lib dotnetDepsLibrary, locations ...source.Location) *pkg.Package { +func newDotnetDepsPackage(nameVersion string, lib dotnetDepsLibrary, locations ...file.Location) *pkg.Package { if lib.Type != "package" { return nil } @@ -28,7 +28,7 @@ func newDotnetDepsPackage(nameVersion string, lib dotnetDepsLibrary, locations . p := &pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(m), Language: pkg.Dotnet, Type: pkg.DotnetPkg, diff --git a/syft/pkg/cataloger/dotnet/parse_dotnet_deps.go b/syft/pkg/cataloger/dotnet/parse_dotnet_deps.go index 0e322d3db25..2c7e1cf0bf9 100644 --- a/syft/pkg/cataloger/dotnet/parse_dotnet_deps.go +++ b/syft/pkg/cataloger/dotnet/parse_dotnet_deps.go @@ -6,9 +6,9 @@ import ( "sort" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseDotnetDeps @@ -24,7 +24,7 @@ type dotnetDepsLibrary struct { HashPath string `json:"hashPath"` } -func parseDotnetDeps(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseDotnetDeps(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package dec := json.NewDecoder(reader) diff --git a/syft/pkg/cataloger/dotnet/parse_dotnet_deps_test.go b/syft/pkg/cataloger/dotnet/parse_dotnet_deps_test.go index 0065f110f6b..b8535374472 100644 --- a/syft/pkg/cataloger/dotnet/parse_dotnet_deps_test.go +++ b/syft/pkg/cataloger/dotnet/parse_dotnet_deps_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseDotnetDeps(t *testing.T) { fixture := "test-fixtures/TestLibrary.deps.json" - fixtureLocationSet := source.NewLocationSet(source.NewLocation(fixture)) + fixtureLocationSet := file.NewLocationSet(file.NewLocation(fixture)) expected := []pkg.Package{ { Name: "AWSSDK.Core", diff --git a/syft/pkg/cataloger/elixir/package.go b/syft/pkg/cataloger/elixir/package.go index fc1ca514736..85dcd1f4253 100644 --- a/syft/pkg/cataloger/elixir/package.go +++ b/syft/pkg/cataloger/elixir/package.go @@ -2,16 +2,16 @@ package elixir import ( "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(d pkg.MixLockMetadata, locations ...source.Location) pkg.Package { +func newPackage(d pkg.MixLockMetadata, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: d.Name, Version: d.Version, Language: pkg.Elixir, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(d), Type: pkg.HexPkg, MetadataType: pkg.MixLockMetadataType, diff --git a/syft/pkg/cataloger/elixir/parse_mix_lock.go b/syft/pkg/cataloger/elixir/parse_mix_lock.go index 6de1fc8f703..46b4f4aa36a 100644 --- a/syft/pkg/cataloger/elixir/parse_mix_lock.go +++ b/syft/pkg/cataloger/elixir/parse_mix_lock.go @@ -9,9 +9,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -20,7 +20,7 @@ var _ generic.Parser = parseMixLock var mixLockDelimiter = regexp.MustCompile(`[%{}\n" ,:]+`) // parseMixLock parses a mix.lock and returns the discovered Elixir packages. -func parseMixLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseMixLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { r := bufio.NewReader(reader) var packages []pkg.Package diff --git a/syft/pkg/cataloger/elixir/parse_mix_lock_test.go b/syft/pkg/cataloger/elixir/parse_mix_lock_test.go index 2f5de43d4e1..4b01f04cd1e 100644 --- a/syft/pkg/cataloger/elixir/parse_mix_lock_test.go +++ b/syft/pkg/cataloger/elixir/parse_mix_lock_test.go @@ -4,13 +4,13 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseMixLock(t *testing.T) { - locations := source.NewLocationSet(source.NewLocation("test-fixtures/mix.lock")) + locations := file.NewLocationSet(file.NewLocation("test-fixtures/mix.lock")) expected := []pkg.Package{ { Name: "castore", diff --git a/syft/pkg/cataloger/erlang/package.go b/syft/pkg/cataloger/erlang/package.go index 2eb89053819..5fa28e59503 100644 --- a/syft/pkg/cataloger/erlang/package.go +++ b/syft/pkg/cataloger/erlang/package.go @@ -2,16 +2,16 @@ package erlang import ( "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(d pkg.RebarLockMetadata, locations ...source.Location) pkg.Package { +func newPackage(d pkg.RebarLockMetadata, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: d.Name, Version: d.Version, Language: pkg.Erlang, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(d), Type: pkg.HexPkg, MetadataType: pkg.RebarLockMetadataType, diff --git a/syft/pkg/cataloger/erlang/parse_rebar_lock.go b/syft/pkg/cataloger/erlang/parse_rebar_lock.go index 547a4d3ec4f..a2066f2cad5 100644 --- a/syft/pkg/cataloger/erlang/parse_rebar_lock.go +++ b/syft/pkg/cataloger/erlang/parse_rebar_lock.go @@ -3,15 +3,15 @@ package erlang import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // parseRebarLock parses a rebar.lock and returns the discovered Elixir packages. // //nolint:funlen -func parseRebarLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseRebarLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { doc, err := parseErlang(reader) if err != nil { return nil, nil, err diff --git a/syft/pkg/cataloger/erlang/parse_rebar_lock_test.go b/syft/pkg/cataloger/erlang/parse_rebar_lock_test.go index b1293143277..dc4ee9104a7 100644 --- a/syft/pkg/cataloger/erlang/parse_rebar_lock_test.go +++ b/syft/pkg/cataloger/erlang/parse_rebar_lock_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseRebarLock(t *testing.T) { @@ -263,7 +263,7 @@ func TestParseRebarLock(t *testing.T) { var expectedRelationships []artifact.Relationship for idx := range test.expected { - test.expected[idx].Locations = source.NewLocationSet(source.NewLocation(test.fixture)) + test.expected[idx].Locations = file.NewLocationSet(file.NewLocation(test.fixture)) } pkgtest.TestFileParser(t, test.fixture, parseRebarLock, test.expected, expectedRelationships) diff --git a/syft/pkg/cataloger/generic/cataloger.go b/syft/pkg/cataloger/generic/cataloger.go index d2069ffff52..b898133f74d 100644 --- a/syft/pkg/cataloger/generic/cataloger.go +++ b/syft/pkg/cataloger/generic/cataloger.go @@ -4,15 +4,15 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -type processor func(resolver source.FileResolver, env Environment) []request +type processor func(resolver file.Resolver, env Environment) []request type request struct { - source.Location + file.Location Parser } @@ -25,7 +25,7 @@ type Cataloger struct { func (c *Cataloger) WithParserByGlobs(parser Parser, globs ...string) *Cataloger { c.processor = append(c.processor, - func(resolver source.FileResolver, env Environment) []request { + func(resolver file.Resolver, env Environment) []request { var requests []request for _, g := range globs { log.WithFields("glob", g).Trace("searching for paths matching glob") @@ -45,7 +45,7 @@ func (c *Cataloger) WithParserByGlobs(parser Parser, globs ...string) *Cataloger func (c *Cataloger) WithParserByMimeTypes(parser Parser, types ...string) *Cataloger { c.processor = append(c.processor, - func(resolver source.FileResolver, env Environment) []request { + func(resolver file.Resolver, env Environment) []request { var requests []request log.WithFields("mimetypes", types).Trace("searching for paths matching mimetype") matches, err := resolver.FilesByMIMEType(types...) @@ -62,7 +62,7 @@ func (c *Cataloger) WithParserByMimeTypes(parser Parser, types ...string) *Catal func (c *Cataloger) WithParserByPath(parser Parser, paths ...string) *Cataloger { c.processor = append(c.processor, - func(resolver source.FileResolver, env Environment) []request { + func(resolver file.Resolver, env Environment) []request { var requests []request for _, p := range paths { log.WithFields("path", p).Trace("searching for path") @@ -80,7 +80,7 @@ func (c *Cataloger) WithParserByPath(parser Parser, paths ...string) *Cataloger return c } -func makeRequests(parser Parser, locations []source.Location) []request { +func makeRequests(parser Parser, locations []file.Location) []request { var requests []request for _, l := range locations { requests = append(requests, request{ @@ -104,7 +104,7 @@ func (c *Cataloger) Name() string { } // Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source. -func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (c *Cataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { var packages []pkg.Package var relationships []artifact.Relationship @@ -126,7 +126,7 @@ func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []arti continue } - discoveredPackages, discoveredRelationships, err := parser(resolver, &env, source.NewLocationReadCloser(location, contentReader)) + discoveredPackages, discoveredRelationships, err := parser(resolver, &env, file.NewLocationReadCloser(location, contentReader)) internal.CloseAndLogError(contentReader, location.VirtualPath) if err != nil { logger.WithFields("location", location.RealPath, "error", err).Warnf("cataloger failed") @@ -144,7 +144,7 @@ func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []arti } // selectFiles takes a set of file trees and resolves and file references of interest for future cataloging -func (c *Cataloger) selectFiles(resolver source.FileResolver) []request { +func (c *Cataloger) selectFiles(resolver file.Resolver) []request { var requests []request for _, proc := range c.processor { requests = append(requests, proc(resolver, Environment{})...) diff --git a/syft/pkg/cataloger/generic/cataloger_test.go b/syft/pkg/cataloger/generic/cataloger_test.go index 5476888d23a..d2aabf28c8d 100644 --- a/syft/pkg/cataloger/generic/cataloger_test.go +++ b/syft/pkg/cataloger/generic/cataloger_test.go @@ -9,13 +9,13 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func Test_Cataloger(t *testing.T) { allParsedPaths := make(map[string]bool) - parser := func(resolver source.FileResolver, env *Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { + parser := func(resolver file.Resolver, env *Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { allParsedPaths[reader.AccessPath()] = true contents, err := io.ReadAll(reader) require.NoError(t, err) @@ -26,7 +26,7 @@ func Test_Cataloger(t *testing.T) { p := pkg.Package{ Name: string(contents), - Locations: source.NewLocationSet(reader.Location), + Locations: file.NewLocationSet(reader.Location), } r := artifact.Relationship{ From: p, @@ -40,7 +40,7 @@ func Test_Cataloger(t *testing.T) { upstream := "some-other-cataloger" expectedSelection := []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt", "test-fixtures/empty.txt"} - resolver := source.NewMockResolverForPaths(expectedSelection...) + resolver := file.NewMockResolverForPaths(expectedSelection...) cataloger := NewCataloger(upstream). WithParserByPath(parser, "test-fixtures/another-path.txt", "test-fixtures/last/path.txt"). WithParserByGlobs(parser, "**/a-path.txt", "**/empty.txt") diff --git a/syft/pkg/cataloger/generic/parser.go b/syft/pkg/cataloger/generic/parser.go index 32b62f579aa..c95808fc175 100644 --- a/syft/pkg/cataloger/generic/parser.go +++ b/syft/pkg/cataloger/generic/parser.go @@ -2,13 +2,13 @@ package generic import ( "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) type Environment struct { LinuxRelease *linux.Release } -type Parser func(source.FileResolver, *Environment, source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) +type Parser func(file.Resolver, *Environment, file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) diff --git a/syft/pkg/cataloger/golang/cataloger.go b/syft/pkg/cataloger/golang/cataloger.go index d28a9ed9aae..bde2a9b5715 100644 --- a/syft/pkg/cataloger/golang/cataloger.go +++ b/syft/pkg/cataloger/golang/cataloger.go @@ -7,9 +7,9 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/event" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // NewGoModFileCataloger returns a new Go module cataloger object. @@ -45,7 +45,7 @@ func (p *progressingCataloger) Name() string { return p.cataloger.Name() } -func (p *progressingCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (p *progressingCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { defer p.progress.SetCompleted() return p.cataloger.Catalog(resolver) } diff --git a/syft/pkg/cataloger/golang/licenses.go b/syft/pkg/cataloger/golang/licenses.go index e85ad7ec7f0..829a73dd3f3 100644 --- a/syft/pkg/cataloger/golang/licenses.go +++ b/syft/pkg/cataloger/golang/licenses.go @@ -22,13 +22,14 @@ import ( "github.com/anchore/syft/internal/licenses" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/event" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/internal/fileresolver" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) type goLicenses struct { opts GoCatalogerOpts - localModCacheResolver source.WritableFileResolver + localModCacheResolver file.WritableResolver progress *event.CatalogerTask } @@ -55,27 +56,27 @@ func remotesForModule(proxies []string, noProxy []string, module string) []strin return proxies } -func modCacheResolver(modCacheDir string) source.WritableFileResolver { - var r source.WritableFileResolver +func modCacheResolver(modCacheDir string) file.WritableResolver { + var r file.WritableResolver if modCacheDir == "" { log.Trace("unable to determine mod cache directory, skipping mod cache resolver") - r = source.EmptyResolver{} + r = fileresolver.Empty{} } else { stat, err := os.Stat(modCacheDir) if os.IsNotExist(err) || stat == nil || !stat.IsDir() { log.Tracef("unable to open mod cache directory: %s, skipping mod cache resolver", modCacheDir) - r = source.EmptyResolver{} + r = fileresolver.Empty{} } else { - r = source.NewUnindexedDirectoryResolver(modCacheDir) + r = fileresolver.NewFromUnindexedDirectory(modCacheDir) } } return r } -func (c *goLicenses) getLicenses(resolver source.FileResolver, moduleName, moduleVersion string) (licenses []pkg.License, err error) { +func (c *goLicenses) getLicenses(resolver file.Resolver, moduleName, moduleVersion string) (licenses []pkg.License, err error) { licenses, err = findLicenses(resolver, fmt.Sprintf(`**/go/pkg/mod/%s@%s/*`, processCaps(moduleName), moduleVersion), ) @@ -131,7 +132,7 @@ func (c *goLicenses) getLicensesFromRemote(moduleName, moduleVersion string) ([] if err != nil { return err } - return c.localModCacheResolver.Write(source.NewLocation(path.Join(dir, filePath)), f) + return c.localModCacheResolver.Write(file.NewLocation(path.Join(dir, filePath)), f) }) if err != nil { @@ -156,7 +157,7 @@ func requireCollection(licenses []pkg.License) []pkg.License { return licenses } -func findLicenses(resolver source.FileResolver, globMatch string) (out []pkg.License, err error) { +func findLicenses(resolver file.Resolver, globMatch string) (out []pkg.License, err error) { out = make([]pkg.License, 0) if resolver == nil { return diff --git a/syft/pkg/cataloger/golang/licenses_test.go b/syft/pkg/cataloger/golang/licenses_test.go index 8f4545198bf..37df6547dee 100644 --- a/syft/pkg/cataloger/golang/licenses_test.go +++ b/syft/pkg/cataloger/golang/licenses_test.go @@ -14,14 +14,15 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/internal" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/internal/fileresolver" "github.com/anchore/syft/syft/license" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func Test_LocalLicenseSearch(t *testing.T) { - loc1 := source.NewLocation("github.com/someorg/somename@v0.3.2/LICENSE") - loc2 := source.NewLocation("github.com/!cap!o!r!g/!cap!project@v4.111.5/LICENSE.txt") + loc1 := file.NewLocation("github.com/someorg/somename@v0.3.2/LICENSE") + loc2 := file.NewLocation("github.com/!cap!o!r!g/!cap!project@v4.111.5/LICENSE.txt") tests := []struct { name string @@ -35,7 +36,7 @@ func Test_LocalLicenseSearch(t *testing.T) { Value: "Apache-2.0", SPDXExpression: "Apache-2.0", Type: license.Concluded, - Locations: source.NewLocationSet(loc1), + Locations: file.NewLocationSet(loc1), URLs: internal.NewStringSet(), }, }, @@ -46,7 +47,7 @@ func Test_LocalLicenseSearch(t *testing.T) { Value: "MIT", SPDXExpression: "MIT", Type: license.Concluded, - Locations: source.NewLocationSet(loc2), + Locations: file.NewLocationSet(loc2), URLs: internal.NewStringSet(), }, }, @@ -63,7 +64,7 @@ func Test_LocalLicenseSearch(t *testing.T) { localModCacheDir: path.Join(wd, "test-fixtures", "licenses", "pkg", "mod"), }, ) - licenses, err := l.getLicenses(source.EmptyResolver{}, test.name, test.version) + licenses, err := l.getLicenses(fileresolver.Empty{}, test.name, test.version) require.NoError(t, err) require.Len(t, licenses, 1) @@ -74,8 +75,8 @@ func Test_LocalLicenseSearch(t *testing.T) { } func Test_RemoteProxyLicenseSearch(t *testing.T) { - loc1 := source.NewLocation("github.com/someorg/somename@v0.3.2/LICENSE") - loc2 := source.NewLocation("github.com/!cap!o!r!g/!cap!project@v4.111.5/LICENSE.txt") + loc1 := file.NewLocation("github.com/someorg/somename@v0.3.2/LICENSE") + loc2 := file.NewLocation("github.com/!cap!o!r!g/!cap!project@v4.111.5/LICENSE.txt") server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { buf := &bytes.Buffer{} @@ -126,7 +127,7 @@ func Test_RemoteProxyLicenseSearch(t *testing.T) { Value: "Apache-2.0", SPDXExpression: "Apache-2.0", Type: license.Concluded, - Locations: source.NewLocationSet(loc1), + Locations: file.NewLocationSet(loc1), URLs: internal.NewStringSet(), }, }, @@ -137,7 +138,7 @@ func Test_RemoteProxyLicenseSearch(t *testing.T) { Value: "MIT", SPDXExpression: "MIT", Type: license.Concluded, - Locations: source.NewLocationSet(loc2), + Locations: file.NewLocationSet(loc2), URLs: internal.NewStringSet(), }, }, @@ -153,7 +154,7 @@ func Test_RemoteProxyLicenseSearch(t *testing.T) { localModCacheDir: modDir, }) - licenses, err := l.getLicenses(source.EmptyResolver{}, test.name, test.version) + licenses, err := l.getLicenses(fileresolver.Empty{}, test.name, test.version) require.NoError(t, err) require.Len(t, licenses, 1) diff --git a/syft/pkg/cataloger/golang/package.go b/syft/pkg/cataloger/golang/package.go index 2e4c2808994..30ba083b48f 100644 --- a/syft/pkg/cataloger/golang/package.go +++ b/syft/pkg/cataloger/golang/package.go @@ -7,18 +7,18 @@ import ( "github.com/anchore/packageurl-go" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func (c *goBinaryCataloger) newGoBinaryPackage(resolver source.FileResolver, dep *debug.Module, mainModule, goVersion, architecture string, buildSettings map[string]string, locations ...source.Location) pkg.Package { +func (c *goBinaryCataloger) newGoBinaryPackage(resolver file.Resolver, dep *debug.Module, mainModule, goVersion, architecture string, buildSettings map[string]string, locations ...file.Location) pkg.Package { if dep.Replace != nil { dep = dep.Replace } licenses, err := c.licenses.getLicenses(resolver, dep.Path, dep.Version) if err != nil { - log.Tracef("error getting licenses for package: %s %v", dep.Path, err) + log.Tracef("error getting licenses for golang package: %s %v", dep.Path, err) } p := pkg.Package{ @@ -28,7 +28,7 @@ func (c *goBinaryCataloger) newGoBinaryPackage(resolver source.FileResolver, dep PURL: packageURL(dep.Path, dep.Version), Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), MetadataType: pkg.GolangBinMetadataType, Metadata: pkg.GolangBinMetadata{ GoCompiledVersion: goVersion, diff --git a/syft/pkg/cataloger/golang/parse_go_binary.go b/syft/pkg/cataloger/golang/parse_go_binary.go index c7b99fd2501..89ed4ba0742 100644 --- a/syft/pkg/cataloger/golang/parse_go_binary.go +++ b/syft/pkg/cataloger/golang/parse_go_binary.go @@ -8,6 +8,7 @@ import ( "errors" "fmt" "io" + "regexp" "runtime/debug" "strings" "time" @@ -17,11 +18,11 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/golang/internal/xcoff" "github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader" - "github.com/anchore/syft/syft/source" ) const GOARCH = "GOARCH" @@ -34,6 +35,11 @@ var ( // devel is used to recognize the current default version when a golang main distribution is built // https://github.com/golang/go/issues/29228 this issue has more details on the progress of being able to // inject the correct version into the main module of the build process + + knownBuildFlagPatterns = []*regexp.Regexp{ + regexp.MustCompile(`(?m)\.([gG]it)?([bB]uild)?[vV]ersion=(\S+/)*(?Pv?\d+.\d+.\d+[-\w]*)`), + regexp.MustCompile(`(?m)\.([tT]ag)=(\S+/)*(?Pv?\d+.\d+.\d+[-\w]*)`), + } ) const devel = "(devel)" @@ -43,7 +49,7 @@ type goBinaryCataloger struct { } // Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing rpm db installation. -func (c *goBinaryCataloger) parseGoBinary(resolver source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func (c *goBinaryCataloger) parseGoBinary(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package unionReader, err := unionreader.GetUnionReader(reader.ReadCloser) @@ -60,7 +66,7 @@ func (c *goBinaryCataloger) parseGoBinary(resolver source.FileResolver, _ *gener return pkgs, nil, nil } -func (c *goBinaryCataloger) makeGoMainPackage(resolver source.FileResolver, mod *debug.BuildInfo, arch string, location source.Location) pkg.Package { +func (c *goBinaryCataloger) makeGoMainPackage(resolver file.Resolver, mod *debug.BuildInfo, arch string, location file.Location) pkg.Package { gbs := getBuildSettings(mod.Settings) main := c.newGoBinaryPackage( resolver, @@ -71,27 +77,79 @@ func (c *goBinaryCataloger) makeGoMainPackage(resolver source.FileResolver, mod gbs, location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), ) - if main.Version == devel { - if version, ok := gbs["vcs.revision"]; ok { - if timestamp, ok := gbs["vcs.time"]; ok { - //NOTE: err is ignored, because if parsing fails - // we still use the empty Time{} struct to generate an empty date, like 00010101000000 - // for consistency with the pseudo-version format: https://go.dev/ref/mod#pseudo-versions - ts, _ := time.Parse(time.RFC3339, timestamp) - if len(version) >= 12 { - version = version[:12] - } - version = module.PseudoVersion("", "", ts, version) - } - main.Version = version - main.PURL = packageURL(main.Name, main.Version) - main.SetID() + + if main.Version != devel { + return main + } + + version, hasVersion := gbs["vcs.revision"] + timestamp, hasTimestamp := gbs["vcs.time"] + + var ldflags string + if metadata, ok := main.Metadata.(pkg.GolangBinMetadata); ok { + // we've found a specific version from the ldflags! use it as the version. + // why not combine that with the pseudo version (e.g. v1.2.3-0.20210101000000-abcdef123456)? + // short answer: we're assuming that if a specific semver was provided in the ldflags that + // there is a matching vcs tag to match that could be referenced. This assumption could + // be incorrect in terms of the go.mod contents, but is not incorrect in terms of the logical + // version of the package. + ldflags = metadata.BuildSettings["-ldflags"] + } + + majorVersion, fullVersion := extractVersionFromLDFlags(ldflags) + if fullVersion != "" { + version = fullVersion + } else if hasVersion && hasTimestamp { + //NOTE: err is ignored, because if parsing fails + // we still use the empty Time{} struct to generate an empty date, like 00010101000000 + // for consistency with the pseudo-version format: https://go.dev/ref/mod#pseudo-versions + ts, _ := time.Parse(time.RFC3339, timestamp) + if len(version) >= 12 { + version = version[:12] } + + version = module.PseudoVersion(majorVersion, fullVersion, ts, version) + } + if version != "" { + main.Version = version + main.PURL = packageURL(main.Name, main.Version) + + main.SetID() } return main } +func extractVersionFromLDFlags(ldflags string) (majorVersion string, fullVersion string) { + if ldflags == "" { + return "", "" + } + + for _, pattern := range knownBuildFlagPatterns { + groups := internal.MatchNamedCaptureGroups(pattern, ldflags) + v, ok := groups["version"] + + if !ok { + continue + } + + fullVersion = v + if !strings.HasPrefix(v, "v") { + fullVersion = fmt.Sprintf("v%s", v) + } + components := strings.Split(v, ".") + + if len(components) == 0 { + continue + } + + majorVersion = strings.TrimPrefix(components[0], "v") + return majorVersion, fullVersion + } + + return "", "" +} + // getArchs finds a binary architecture by two ways: // 1) reading build info from binaries compiled by go1.18+ // 2) reading file headers from binaries compiled by < go1.18 @@ -197,7 +255,7 @@ func createMainModuleFromPath(path string) (mod debug.Module) { return } -func (c *goBinaryCataloger) buildGoPkgInfo(resolver source.FileResolver, location source.Location, mod *debug.BuildInfo, arch string) []pkg.Package { +func (c *goBinaryCataloger) buildGoPkgInfo(resolver file.Resolver, location file.Location, mod *debug.BuildInfo, arch string) []pkg.Package { var pkgs []pkg.Package if mod == nil { return pkgs diff --git a/syft/pkg/cataloger/golang/parse_go_binary_test.go b/syft/pkg/cataloger/golang/parse_go_binary_test.go index d180d158fdc..d578b46adaa 100644 --- a/syft/pkg/cataloger/golang/parse_go_binary_test.go +++ b/syft/pkg/cataloger/golang/parse_go_binary_test.go @@ -14,8 +14,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/internal/fileresolver" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) // make will run the default make target for the given test fixture path @@ -135,9 +136,9 @@ func TestBuildGoPkgInfo(t *testing.T) { Type: pkg.GoModulePkg, Version: "(devel)", PURL: "pkg:golang/github.com/anchore/syft@(devel)", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -182,9 +183,9 @@ func TestBuildGoPkgInfo(t *testing.T) { PURL: "pkg:golang/github.com/adrg/xdg", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -225,9 +226,9 @@ func TestBuildGoPkgInfo(t *testing.T) { PURL: "pkg:golang/github.com/adrg/xdg@v0.2.1", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -261,9 +262,9 @@ func TestBuildGoPkgInfo(t *testing.T) { PURL: "pkg:golang/github.com/a/b/c@(devel)", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -299,7 +300,7 @@ func TestBuildGoPkgInfo(t *testing.T) { expected: []pkg.Package{unmodifiedMain}, }, { - name: "parse main mod and replace devel version", + name: "parse main mod and replace devel pseudo version and ldflags exists (but contains no version)", arch: archDetails, mod: &debug.BuildInfo{ GoVersion: goCompiledVersion, @@ -310,6 +311,7 @@ func TestBuildGoPkgInfo(t *testing.T) { {Key: "GOAMD64", Value: "v1"}, {Key: "vcs.revision", Value: "41bc6bb410352845f22766e27dd48ba93aa825a4"}, {Key: "vcs.time", Value: "2022-10-14T19:54:57Z"}, + {Key: "-ldflags", Value: `build -ldflags="-w -s -extldflags '-static' -X blah=foobar`}, }, }, expected: []pkg.Package{ @@ -319,9 +321,231 @@ func TestBuildGoPkgInfo(t *testing.T) { Type: pkg.GoModulePkg, Version: "v0.0.0-20221014195457-41bc6bb41035", PURL: "pkg:golang/github.com/anchore/syft@v0.0.0-20221014195457-41bc6bb41035", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ + RealPath: "/a-path", + FileSystemID: "layer-id", + }, + ).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), + ), + MetadataType: pkg.GolangBinMetadataType, + Metadata: pkg.GolangBinMetadata{ + GoCompiledVersion: goCompiledVersion, + Architecture: archDetails, + BuildSettings: map[string]string{ + "GOARCH": archDetails, + "GOOS": "darwin", + "GOAMD64": "v1", + "vcs.revision": "41bc6bb410352845f22766e27dd48ba93aa825a4", + "vcs.time": "2022-10-14T19:54:57Z", + "-ldflags": `build -ldflags="-w -s -extldflags '-static' -X blah=foobar`, + }, + MainModule: "github.com/anchore/syft", + }, + }, + }, + }, + { + name: "parse main mod and replace devel version with one from ldflags with vcs. build settings", + arch: archDetails, + mod: &debug.BuildInfo{ + GoVersion: goCompiledVersion, + Main: debug.Module{Path: "github.com/anchore/syft", Version: "(devel)"}, + Settings: []debug.BuildSetting{ + {Key: "GOARCH", Value: archDetails}, + {Key: "GOOS", Value: "darwin"}, + {Key: "GOAMD64", Value: "v1"}, + {Key: "vcs.revision", Value: "41bc6bb410352845f22766e27dd48ba93aa825a4"}, + {Key: "vcs.time", Value: "2022-10-14T19:54:57Z"}, + {Key: "-ldflags", Value: `build -ldflags="-w -s -extldflags '-static' -X github.com/anchore/syft/internal/version.version=0.79.0`}, + }, + }, + expected: []pkg.Package{ + { + Name: "github.com/anchore/syft", + Language: pkg.Go, + Type: pkg.GoModulePkg, + Version: "v0.79.0", + PURL: "pkg:golang/github.com/anchore/syft@v0.79.0", + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ + RealPath: "/a-path", + FileSystemID: "layer-id", + }, + ).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), + ), + MetadataType: pkg.GolangBinMetadataType, + Metadata: pkg.GolangBinMetadata{ + GoCompiledVersion: goCompiledVersion, + Architecture: archDetails, + BuildSettings: map[string]string{ + "GOARCH": archDetails, + "GOOS": "darwin", + "GOAMD64": "v1", + "vcs.revision": "41bc6bb410352845f22766e27dd48ba93aa825a4", + "vcs.time": "2022-10-14T19:54:57Z", + "-ldflags": `build -ldflags="-w -s -extldflags '-static' -X github.com/anchore/syft/internal/version.version=0.79.0`, + }, + MainModule: "github.com/anchore/syft", + }, + }, + }, + }, + { + name: "parse main mod and replace devel version with one from ldflags without any vcs. build settings", + arch: archDetails, + mod: &debug.BuildInfo{ + GoVersion: goCompiledVersion, + Main: debug.Module{Path: "github.com/anchore/syft", Version: "(devel)"}, + Settings: []debug.BuildSetting{ + {Key: "GOARCH", Value: archDetails}, + {Key: "GOOS", Value: "darwin"}, + {Key: "GOAMD64", Value: "v1"}, + {Key: "-ldflags", Value: `build -ldflags="-w -s -extldflags '-static' -X github.com/anchore/syft/internal/version.version=0.79.0`}, + }, + }, + expected: []pkg.Package{ + { + Name: "github.com/anchore/syft", + Language: pkg.Go, + Type: pkg.GoModulePkg, + Version: "v0.79.0", + PURL: "pkg:golang/github.com/anchore/syft@v0.79.0", + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ + RealPath: "/a-path", + FileSystemID: "layer-id", + }, + ).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), + ), + MetadataType: pkg.GolangBinMetadataType, + Metadata: pkg.GolangBinMetadata{ + GoCompiledVersion: goCompiledVersion, + Architecture: archDetails, + BuildSettings: map[string]string{ + "GOARCH": archDetails, + "GOOS": "darwin", + "GOAMD64": "v1", + "-ldflags": `build -ldflags="-w -s -extldflags '-static' -X github.com/anchore/syft/internal/version.version=0.79.0`, + }, + MainModule: "github.com/anchore/syft", + }, + }, + }, + }, + { + name: "parse main mod and replace devel version with one from ldflags main.version without any vcs. build settings", + arch: archDetails, + mod: &debug.BuildInfo{ + GoVersion: goCompiledVersion, + Main: debug.Module{Path: "github.com/anchore/syft", Version: "(devel)"}, + Settings: []debug.BuildSetting{ + {Key: "GOARCH", Value: archDetails}, + {Key: "GOOS", Value: "darwin"}, + {Key: "GOAMD64", Value: "v1"}, + {Key: "-ldflags", Value: `build -ldflags="-w -s -extldflags '-static' -X main.version=0.79.0`}, + }, + }, + expected: []pkg.Package{ + { + Name: "github.com/anchore/syft", + Language: pkg.Go, + Type: pkg.GoModulePkg, + Version: "v0.79.0", + PURL: "pkg:golang/github.com/anchore/syft@v0.79.0", + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ + RealPath: "/a-path", + FileSystemID: "layer-id", + }, + ).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), + ), + MetadataType: pkg.GolangBinMetadataType, + Metadata: pkg.GolangBinMetadata{ + GoCompiledVersion: goCompiledVersion, + Architecture: archDetails, + BuildSettings: map[string]string{ + "GOARCH": archDetails, + "GOOS": "darwin", + "GOAMD64": "v1", + "-ldflags": `build -ldflags="-w -s -extldflags '-static' -X main.version=0.79.0`, + }, + MainModule: "github.com/anchore/syft", + }, + }, + }, + }, + { + name: "parse main mod and replace devel version with one from ldflags main.Version without any vcs. build settings", + arch: archDetails, + mod: &debug.BuildInfo{ + GoVersion: goCompiledVersion, + Main: debug.Module{Path: "github.com/anchore/syft", Version: "(devel)"}, + Settings: []debug.BuildSetting{ + {Key: "GOARCH", Value: archDetails}, + {Key: "GOOS", Value: "darwin"}, + {Key: "GOAMD64", Value: "v1"}, + {Key: "-ldflags", Value: `build -ldflags="-w -s -extldflags '-static' -X main.Version=0.79.0`}, + }, + }, + expected: []pkg.Package{ + { + Name: "github.com/anchore/syft", + Language: pkg.Go, + Type: pkg.GoModulePkg, + Version: "v0.79.0", + PURL: "pkg:golang/github.com/anchore/syft@v0.79.0", + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ + RealPath: "/a-path", + FileSystemID: "layer-id", + }, + ).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), + ), + MetadataType: pkg.GolangBinMetadataType, + Metadata: pkg.GolangBinMetadata{ + GoCompiledVersion: goCompiledVersion, + Architecture: archDetails, + BuildSettings: map[string]string{ + "GOARCH": archDetails, + "GOOS": "darwin", + "GOAMD64": "v1", + "-ldflags": `build -ldflags="-w -s -extldflags '-static' -X main.Version=0.79.0`, + }, + MainModule: "github.com/anchore/syft", + }, + }, + }, + }, + { + name: "parse main mod and replace devel version with a pseudo version", + arch: archDetails, + mod: &debug.BuildInfo{ + GoVersion: goCompiledVersion, + Main: debug.Module{Path: "github.com/anchore/syft", Version: "(devel)"}, + Settings: []debug.BuildSetting{ + {Key: "GOARCH", Value: archDetails}, + {Key: "GOOS", Value: "darwin"}, + {Key: "GOAMD64", Value: "v1"}, + {Key: "vcs.revision", Value: "41bc6bb410352845f22766e27dd48ba93aa825a4"}, + {Key: "vcs.time", Value: "2022-10-14T19:54:57Z"}, + }, + }, + expected: []pkg.Package{ + { + Name: "github.com/anchore/syft", + Language: pkg.Go, + Type: pkg.GoModulePkg, + Version: "v0.0.0-20221014195457-41bc6bb41035", + PURL: "pkg:golang/github.com/anchore/syft@v0.0.0-20221014195457-41bc6bb41035", + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -374,9 +598,9 @@ func TestBuildGoPkgInfo(t *testing.T) { PURL: "pkg:golang/github.com/adrg/xdg@v0.2.1", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -396,9 +620,9 @@ func TestBuildGoPkgInfo(t *testing.T) { PURL: "pkg:golang/github.com/anchore/client-go@v0.0.0-20210222170800-9c70f9b80bcf", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -451,9 +675,9 @@ func TestBuildGoPkgInfo(t *testing.T) { PURL: "pkg:golang/golang.org/x/sys@v0.0.0-20211006194710-c8a6f5223071", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -472,9 +696,9 @@ func TestBuildGoPkgInfo(t *testing.T) { PURL: "pkg:golang/golang.org/x/term@v0.0.0-20210916214954-140adaaadfaf", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -499,16 +723,174 @@ func TestBuildGoPkgInfo(t *testing.T) { p := &test.expected[i] p.SetID() } - location := source.NewLocationFromCoordinates( - source.Coordinates{ + location := file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, ) c := goBinaryCataloger{} - pkgs := c.buildGoPkgInfo(source.EmptyResolver{}, location, test.mod, test.arch) + pkgs := c.buildGoPkgInfo(fileresolver.Empty{}, location, test.mod, test.arch) assert.Equal(t, test.expected, pkgs) }) } } + +func Test_extractVersionFromLDFlags(t *testing.T) { + tests := []struct { + name string + ldflags string + wantMajorVersion string + wantFullVersion string + }{ + { + name: "empty ldflags", + ldflags: "", + }, + { + name: "syft ldflags", + ldflags: ` build -ldflags="-w -s -extldflags '-static' -X github.com/anchore/syft/internal/version.version=0.79.0 -X github.com/anchore/syft/internal/version.gitCommit=b2b332e8b2b66af0905e98b54ebd713a922be1a8 -X github.com/anchore/syft/internal/version.buildDate=2023-04-21T16:20:25Z -X github.com/anchore/syft/internal/version.gitDescription=v0.79.0 "`, + wantMajorVersion: "0", + wantFullVersion: "v0.79.0", + }, + { + name: "kubectl ldflags", + ldflags: ` build -asmflags=all=-trimpath=/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes + build -compiler=gc + build -gcflags="all=-trimpath=/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes " + build -ldflags="all=-X 'k8s.io/kubernetes/vendor/k8s.io/client-go/pkg/version.buildDate=2023-04-12T12:16:51Z' -X 'k8s.io/kubernetes/vendor/k8s.io/component-base/version.buildDate=2023-04-12T12:16:51Z' -X 'k8s.io/client-go/pkg/version.buildDate=2023-04-12T12:16:51Z' -X 'k8s.io/component-base/version.buildDate=2023-04-12T12:16:51Z' -X 'k8s.io/kubernetes/vendor/k8s.io/client-go/pkg/version.gitCommit=a1a87a0a2bcd605820920c6b0e618a8ab7d117d4' -X 'k8s.io/kubernetes/vendor/k8s.io/component-base/version.gitCommit=a1a87a0a2bcd605820920c6b0e618a8ab7d117d4' -X 'k8s.io/client-go/pkg/version.gitCommit=a1a87a0a2bcd605820920c6b0e618a8ab7d117d4' -X 'k8s.io/component-base/version.gitCommit=a1a87a0a2bcd605820920c6b0e618a8ab7d117d4' -X 'k8s.io/kubernetes/vendor/k8s.io/client-go/pkg/version.gitTreeState=clean' -X 'k8s.io/kubernetes/vendor/k8s.io/component-base/version.gitTreeState=clean' -X 'k8s.io/client-go/pkg/version.gitTreeState=clean' -X 'k8s.io/component-base/version.gitTreeState=clean' -X 'k8s.io/kubernetes/vendor/k8s.io/client-go/pkg/version.gitVersion=v1.25.9' -X 'k8s.io/kubernetes/vendor/k8s.io/component-base/version.gitVersion=v1.25.9' -X 'k8s.io/client-go/pkg/version.gitVersion=v1.25.9' -X 'k8s.io/component-base/version.gitVersion=v1.25.9' -X 'k8s.io/kubernetes/vendor/k8s.io/client-go/pkg/version.gitMajor=1' -X 'k8s.io/kubernetes/vendor/k8s.io/component-base/version.gitMajor=1' -X 'k8s.io/client-go/pkg/version.gitMajor=1' -X 'k8s.io/component-base/version.gitMajor=1' -X 'k8s.io/kubernetes/vendor/k8s.io/client-go/pkg/version.gitMinor=25' -X 'k8s.io/kubernetes/vendor/k8s.io/component-base/version.gitMinor=25' -X 'k8s.io/client-go/pkg/version.gitMinor=25' -X 'k8s.io/component-base/version.gitMinor=25' -s -w"`, + wantMajorVersion: "1", + wantFullVersion: "v1.25.9", + }, + { + name: "nerdctl ldflags", + ldflags: ` build -ldflags="-s -w -X github.com/containerd/nerdctl/pkg/version.Version=v1.3.1 -X github.com/containerd/nerdctl/pkg/version.Revision=b224b280ff3086516763c7335fc0e0997aca617a"`, + wantMajorVersion: "1", + wantFullVersion: "v1.3.1", + }, + { + name: "limactl ldflags", + ldflags: ` build -ldflags="-s -w -X github.com/lima-vm/lima/pkg/version.Version=v0.15.1"`, + wantMajorVersion: "0", + wantFullVersion: "v0.15.1", + }, + { + name: "terraform ldflags", + ldflags: ` build -ldflags="-w -s -X 'github.com/hashicorp/terraform/version.Version=1.4.6' -X 'github.com/hashicorp/terraform/version.Prerelease='"`, + wantMajorVersion: "1", + wantFullVersion: "v1.4.6", + }, + { + name: "kube-apiserver ldflags", + ldflags: ` build -asmflags=all=-trimpath=/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes + build -buildmode=exe + build -compiler=gc + build -gcflags="all=-trimpath=/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes " + build -ldflags="all=-X 'k8s.io/kubernetes/vendor/k8s.io/client-go/pkg/version.buildDate=2023-04-14T13:14:42Z' -X 'k8s.io/kubernetes/vendor/k8s.io/component-base/version.buildDate=2023-04-14T13:14:42Z' -X 'k8s.io/client-go/pkg/version.buildDate=2023-04-14T13:14:42Z' -X 'k8s.io/component-base/version.buildDate=2023-04-14T13:14:42Z' -X 'k8s.io/kubernetes/vendor/k8s.io/client-go/pkg/version.gitCommit=4c9411232e10168d7b050c49a1b59f6df9d7ea4b' -X 'k8s.io/kubernetes/vendor/k8s.io/component-base/version.gitCommit=4c9411232e10168d7b050c49a1b59f6df9d7ea4b' -X 'k8s.io/client-go/pkg/version.gitCommit=4c9411232e10168d7b050c49a1b59f6df9d7ea4b' -X 'k8s.io/component-base/version.gitCommit=4c9411232e10168d7b050c49a1b59f6df9d7ea4b' -X 'k8s.io/kubernetes/vendor/k8s.io/client-go/pkg/version.gitTreeState=clean' -X 'k8s.io/kubernetes/vendor/k8s.io/component-base/version.gitTreeState=clean' -X 'k8s.io/client-go/pkg/version.gitTreeState=clean' -X 'k8s.io/component-base/version.gitTreeState=clean' -X 'k8s.io/kubernetes/vendor/k8s.io/client-go/pkg/version.gitVersion=v1.27.1' -X 'k8s.io/kubernetes/vendor/k8s.io/component-base/version.gitVersion=v1.27.1' -X 'k8s.io/client-go/pkg/version.gitVersion=v1.27.1' -X 'k8s.io/component-base/version.gitVersion=v1.27.1' -X 'k8s.io/kubernetes/vendor/k8s.io/client-go/pkg/version.gitMajor=1' -X 'k8s.io/kubernetes/vendor/k8s.io/component-base/version.gitMajor=1' -X 'k8s.io/client-go/pkg/version.gitMajor=1' -X 'k8s.io/component-base/version.gitMajor=1' -X 'k8s.io/kubernetes/vendor/k8s.io/client-go/pkg/version.gitMinor=27' -X 'k8s.io/kubernetes/vendor/k8s.io/component-base/version.gitMinor=27' -X 'k8s.io/client-go/pkg/version.gitMinor=27' -X 'k8s.io/component-base/version.gitMinor=27' -s -w"`, + wantMajorVersion: "1", + wantFullVersion: "v1.27.1", + }, + { + name: "prometheus ldflags", + ldflags: ` build -ldflags="-X github.com/prometheus/common/version.Version=2.44.0 -X github.com/prometheus/common/version.Revision=1ac5131f698ebc60f13fe2727f89b115a41f6558 -X github.com/prometheus/common/version.Branch=HEAD -X github.com/prometheus/common/version.BuildUser=root@739e8181c5db -X github.com/prometheus/common/version.BuildDate=20230514-06:18:11 -extldflags '-static'" + build -tags=netgo,builtinassets,stringlabels`, + wantMajorVersion: "2", + wantFullVersion: "v2.44.0", + }, + { + name: "influxdb ldflags", + ldflags: ` build -ldflags="-s -w -X main.version=v2.7.1 -X main.commit=407fa622e9 -X main.date=2023-04-28T13:24:27Z -linkmode=external -extld=/musl/x86_64/bin/musl-gcc -extldflags '-fno-PIC -static-pie -Wl,-z,stack-size=8388608'" + build -tags=assets,sqlite_foreign_keys,sqlite_json,static_build,noasm`, + wantMajorVersion: "2", + wantFullVersion: "v2.7.1", + }, + { + name: "gitea ldflags", + ldflags: ` build -ldflags=" -X \"main.MakeVersion=GNU Make 4.1\" -X \"main.Version=1.19.3\" -X \"main.Tags=bindata sqlite sqlite_unlock_notify\" "`, + wantMajorVersion: "1", + wantFullVersion: "v1.19.3", + }, + { + name: "docker sbom cli ldflags", + ldflags: ` build -ldflags="-w -s -extldflags '-static' -X github.com/docker/sbom-cli-plugin/internal/version.version=0.6.1-SNAPSHOT-02cf1c8 -X github.com/docker/sbom-cli-plugin/internal/version.gitCommit=02cf1c888ad6662109ac6e3be618392514a56316 -X github.com/docker/sbom-cli-plugin/internal/version.gitDescription=v0.6.1-dirty "`, + wantMajorVersion: "0", + wantFullVersion: "v0.6.1-SNAPSHOT-02cf1c8", + }, + { + name: "docker scout ldflags", + ldflags: ` build -ldflags="-w -s -extldflags '-static' -X github.com/docker/scout-cli-plugin/internal.version=0.10.0 "`, + wantMajorVersion: "0", + wantFullVersion: "v0.10.0", + }, + { + name: "influx telegraf ldflags", + ldflags: ` build -ldflags="-w -s -X github.com/influxdata/telegraf/internal.Commit=a3a884a1 -X github.com/influxdata/telegraf/internal.Branch=HEAD -X github.com/influxdata/telegraf/internal.Version=1.26.2"`, + wantMajorVersion: "1", + wantFullVersion: "v1.26.2", + }, + { + name: "argocd ldflags", + ldflags: ` build -ldflags="-X github.com/argoproj/argo-cd/v2/common.version=2.7.2 -X github.com/argoproj/argo-cd/v2/common.buildDate=2023-05-12T14:06:49Z -X github.com/argoproj/argo-cd/v2/common.gitCommit=cbee7e6011407ed2d1066c482db74e97e0cc6bdb -X github.com/argoproj/argo-cd/v2/common.gitTreeState=clean -X github.com/argoproj/argo-cd/v2/common.kubectlVersion=v0.24.2 -extldflags=\"-static\""`, + wantMajorVersion: "2", + wantFullVersion: "v2.7.2", + }, + { + name: "kustomize ldflags", + ldflags: ` build -ldflags="-s -X sigs.k8s.io/kustomize/api/provenance.version=kustomize/v4.5.7 -X sigs.k8s.io/kustomize/api/provenance.gitCommit=56d82a8378dfc8dc3b3b1085e5a6e67b82966bd7 -X sigs.k8s.io/kustomize/api/provenance.buildDate=2022-08-02T16:35:54Z "`, + wantMajorVersion: "4", + wantFullVersion: "v4.5.7", + }, + ////////////////////////////////////////////////////////////////// + // negative cases + { + name: "hugo ldflags", + ldflags: ` build -ldflags="-s -w -X github.com/gohugoio/hugo/common/hugo.vendorInfo=gohugoio"`, + }, + { + name: "ghostunnel ldflags", + ldflags: ` build -ldflags="-X main.version=77d9aaa"`, + }, + { + name: "opa ldflags", + ldflags: `build -ldflags=" -X github.com/open-policy-agent/opa/version.Hostname=9549178459bc"`, + }, + /////////////////////////////////////////////////////////////////// + // trickier cases + { + name: "macvlan plugin for cri-o ldflags", + ldflags: ` build -ldflags="-extldflags -static -X github.com/containernetworking/plugins/pkg/utils/buildversion.BuildVersion=v1.2.0"`, + wantMajorVersion: "1", + wantFullVersion: "v1.2.0", + }, + { + name: "coder ldflags", + ldflags: ` build -ldflags="-s -w -X 'github.com/coder/coder/buildinfo.tag=0.23.4'"`, + wantMajorVersion: "0", + wantFullVersion: "v0.23.4", + }, + /////////////////////////////////////////////////////////////////// + // don't know how to handle these... yet + //{ + // // package name: pkgName: "github.com/krakendio/krakend-ce/v2", + // name: "krakenD ldflags", + // ldflags: ` build -ldflags="-X github.com/luraproject/lura/v2/core.KrakendVersion=2.3.2 -X github.com/luraproject/lura/v2/core.GoVersion=1.20.4 -X github.com/luraproject/lura/v2/core.GlibcVersion=GLIBC-2.31_(debian-11) "`, + // wantMajorVersion: "2.3.2", + // wantFullVersion: "v2.3.2", + //}, + //{ + // // package name: pkgName: "github.com/krakendio/krakend-ce/v2", + // name: "krakenD ldflags -- answer embedded in the middle", + // ldflags: ` build -ldflags=" -X github.com/luraproject/lura/v2/core.GoVersion=1.20.4 -X github.com/luraproject/lura/v2/core.KrakendVersion=2.3.2 -X github.com/luraproject/lura/v2/core.GlibcVersion=GLIBC-2.31_(debian-11) "`, + // wantMajorVersion: "2.3.2", + // wantFullVersion: "v2.3.2", + //}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotMajorVersion, gotFullVersion := extractVersionFromLDFlags(tt.ldflags) + assert.Equal(t, tt.wantMajorVersion, gotMajorVersion, "unexpected major version") + assert.Equal(t, tt.wantFullVersion, gotFullVersion, "unexpected full version") + }) + } +} diff --git a/syft/pkg/cataloger/golang/parse_go_mod.go b/syft/pkg/cataloger/golang/parse_go_mod.go index 3fdc45b9a71..7ef4ac0a70f 100644 --- a/syft/pkg/cataloger/golang/parse_go_mod.go +++ b/syft/pkg/cataloger/golang/parse_go_mod.go @@ -11,9 +11,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) type goModCataloger struct { @@ -23,7 +23,7 @@ type goModCataloger struct { // parseGoModFile takes a go.mod and lists all packages discovered. // //nolint:funlen -func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func (c *goModCataloger) parseGoModFile(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { packages := make(map[string]pkg.Package) contents, err := io.ReadAll(reader) @@ -31,7 +31,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic return nil, nil, fmt.Errorf("failed to read go module: %w", err) } - file, err := modfile.Parse(reader.RealPath, contents, nil) + f, err := modfile.Parse(reader.RealPath, contents, nil) if err != nil { return nil, nil, fmt.Errorf("failed to parse go module: %w", err) } @@ -41,7 +41,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic log.Debugf("unable to get go.sum: %v", err) } - for _, m := range file.Require { + for _, m := range f.Require { licenses, err := c.licenses.getLicenses(resolver, m.Mod.Path, m.Mod.Version) if err != nil { log.Tracef("error getting licenses for package: %s %v", m.Mod.Path, err) @@ -51,7 +51,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic Name: m.Mod.Path, Version: m.Mod.Version, Licenses: pkg.NewLicenseSet(licenses...), - Locations: source.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), PURL: packageURL(m.Mod.Path, m.Mod.Version), Language: pkg.Go, Type: pkg.GoModulePkg, @@ -63,7 +63,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic } // remove any old packages and replace with new ones... - for _, m := range file.Replace { + for _, m := range f.Replace { licenses, err := c.licenses.getLicenses(resolver, m.New.Path, m.New.Version) if err != nil { log.Tracef("error getting licenses for package: %s %v", m.New.Path, err) @@ -73,7 +73,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic Name: m.New.Path, Version: m.New.Version, Licenses: pkg.NewLicenseSet(licenses...), - Locations: source.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), PURL: packageURL(m.New.Path, m.New.Version), Language: pkg.Go, Type: pkg.GoModulePkg, @@ -85,7 +85,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic } // remove any packages from the exclude fields - for _, m := range file.Exclude { + for _, m := range f.Exclude { delete(packages, m.Mod.Path) } @@ -104,7 +104,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic return pkgsSlice, nil, nil } -func parseGoSumFile(resolver source.FileResolver, reader source.LocationReadCloser) (map[string]string, error) { +func parseGoSumFile(resolver file.Resolver, reader file.LocationReadCloser) (map[string]string, error) { out := map[string]string{} if resolver == nil { diff --git a/syft/pkg/cataloger/golang/parse_go_mod_test.go b/syft/pkg/cataloger/golang/parse_go_mod_test.go index 83b75beb108..f22b7ca2a56 100644 --- a/syft/pkg/cataloger/golang/parse_go_mod_test.go +++ b/syft/pkg/cataloger/golang/parse_go_mod_test.go @@ -3,9 +3,9 @@ package golang import ( "testing" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseGoMod(t *testing.T) { @@ -20,7 +20,7 @@ func TestParseGoMod(t *testing.T) { Name: "github.com/bmatcuk/doublestar", Version: "v1.3.1", PURL: "pkg:golang/github.com/bmatcuk/doublestar@v1.3.1", - Locations: source.NewLocationSet(source.NewLocation("test-fixtures/one-package")), + Locations: file.NewLocationSet(file.NewLocation("test-fixtures/one-package")), Language: pkg.Go, Type: pkg.GoModulePkg, MetadataType: pkg.GolangModMetadataType, @@ -36,7 +36,7 @@ func TestParseGoMod(t *testing.T) { Name: "github.com/anchore/go-testutils", Version: "v0.0.0-20200624184116-66aa578126db", PURL: "pkg:golang/github.com/anchore/go-testutils@v0.0.0-20200624184116-66aa578126db", - Locations: source.NewLocationSet(source.NewLocation("test-fixtures/many-packages")), + Locations: file.NewLocationSet(file.NewLocation("test-fixtures/many-packages")), Language: pkg.Go, Type: pkg.GoModulePkg, MetadataType: pkg.GolangModMetadataType, @@ -46,7 +46,7 @@ func TestParseGoMod(t *testing.T) { Name: "github.com/anchore/go-version", Version: "v1.2.2-0.20200701162849-18adb9c92b9b", PURL: "pkg:golang/github.com/anchore/go-version@v1.2.2-0.20200701162849-18adb9c92b9b", - Locations: source.NewLocationSet(source.NewLocation("test-fixtures/many-packages")), + Locations: file.NewLocationSet(file.NewLocation("test-fixtures/many-packages")), Language: pkg.Go, Type: pkg.GoModulePkg, MetadataType: pkg.GolangModMetadataType, @@ -56,7 +56,7 @@ func TestParseGoMod(t *testing.T) { Name: "github.com/anchore/stereoscope", Version: "v0.0.0-20200706164556-7cf39d7f4639", PURL: "pkg:golang/github.com/anchore/stereoscope@v0.0.0-20200706164556-7cf39d7f4639", - Locations: source.NewLocationSet(source.NewLocation("test-fixtures/many-packages")), + Locations: file.NewLocationSet(file.NewLocation("test-fixtures/many-packages")), Language: pkg.Go, Type: pkg.GoModulePkg, MetadataType: pkg.GolangModMetadataType, @@ -66,7 +66,7 @@ func TestParseGoMod(t *testing.T) { Name: "github.com/bmatcuk/doublestar", Version: "v8.8.8", PURL: "pkg:golang/github.com/bmatcuk/doublestar@v8.8.8", - Locations: source.NewLocationSet(source.NewLocation("test-fixtures/many-packages")), + Locations: file.NewLocationSet(file.NewLocation("test-fixtures/many-packages")), Language: pkg.Go, Type: pkg.GoModulePkg, MetadataType: pkg.GolangModMetadataType, @@ -76,7 +76,7 @@ func TestParseGoMod(t *testing.T) { Name: "github.com/go-test/deep", Version: "v1.0.6", PURL: "pkg:golang/github.com/go-test/deep@v1.0.6", - Locations: source.NewLocationSet(source.NewLocation("test-fixtures/many-packages")), + Locations: file.NewLocationSet(file.NewLocation("test-fixtures/many-packages")), Language: pkg.Go, Type: pkg.GoModulePkg, MetadataType: pkg.GolangModMetadataType, @@ -109,7 +109,7 @@ func Test_GoSumHashes(t *testing.T) { Name: "github.com/CycloneDX/cyclonedx-go", Version: "v0.6.0", PURL: "pkg:golang/github.com/CycloneDX/cyclonedx-go@v0.6.0", - Locations: source.NewLocationSet(source.NewLocation("go.mod")), + Locations: file.NewLocationSet(file.NewLocation("go.mod")), FoundBy: "go-mod-file-cataloger", Language: pkg.Go, Type: pkg.GoModulePkg, @@ -120,7 +120,7 @@ func Test_GoSumHashes(t *testing.T) { Name: "github.com/acarl005/stripansi", Version: "v0.0.0-20180116102854-5a71ef0e047d", PURL: "pkg:golang/github.com/acarl005/stripansi@v0.0.0-20180116102854-5a71ef0e047d", - Locations: source.NewLocationSet(source.NewLocation("go.mod")), + Locations: file.NewLocationSet(file.NewLocation("go.mod")), FoundBy: "go-mod-file-cataloger", Language: pkg.Go, Type: pkg.GoModulePkg, @@ -133,7 +133,7 @@ func Test_GoSumHashes(t *testing.T) { Name: "github.com/mgutz/ansi", Version: "v0.0.0-20200706080929-d51e80ef957d", PURL: "pkg:golang/github.com/mgutz/ansi@v0.0.0-20200706080929-d51e80ef957d", - Locations: source.NewLocationSet(source.NewLocation("go.mod")), + Locations: file.NewLocationSet(file.NewLocation("go.mod")), FoundBy: "go-mod-file-cataloger", Language: pkg.Go, Type: pkg.GoModulePkg, diff --git a/syft/pkg/cataloger/haskell/package.go b/syft/pkg/cataloger/haskell/package.go index c7c1aa1581a..ed47921b9f0 100644 --- a/syft/pkg/cataloger/haskell/package.go +++ b/syft/pkg/cataloger/haskell/package.go @@ -2,15 +2,15 @@ package haskell import ( "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(name, version string, m *pkg.HackageMetadata, locations ...source.Location) pkg.Package { +func newPackage(name, version string, m *pkg.HackageMetadata, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(name, version), Language: pkg.Haskell, Type: pkg.HackagePkg, diff --git a/syft/pkg/cataloger/haskell/parse_cabal_freeze.go b/syft/pkg/cataloger/haskell/parse_cabal_freeze.go index d95446984cc..abb2c82c9b2 100644 --- a/syft/pkg/cataloger/haskell/parse_cabal_freeze.go +++ b/syft/pkg/cataloger/haskell/parse_cabal_freeze.go @@ -8,15 +8,15 @@ import ( "strings" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseCabalFreeze // parseCabalFreeze is a parser function for cabal.project.freeze contents, returning all packages discovered. -func parseCabalFreeze(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseCabalFreeze(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { r := bufio.NewReader(reader) var pkgs []pkg.Package for { diff --git a/syft/pkg/cataloger/haskell/parse_cabal_freeze_test.go b/syft/pkg/cataloger/haskell/parse_cabal_freeze_test.go index 2c4a96c77b8..acb58b74e97 100644 --- a/syft/pkg/cataloger/haskell/parse_cabal_freeze_test.go +++ b/syft/pkg/cataloger/haskell/parse_cabal_freeze_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseCabalFreeze(t *testing.T) { fixture := "test-fixtures/cabal.project.freeze" - locationSet := source.NewLocationSet(source.NewLocation(fixture)) + locationSet := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { diff --git a/syft/pkg/cataloger/haskell/parse_stack_lock.go b/syft/pkg/cataloger/haskell/parse_stack_lock.go index de41a57672d..3eabd79784f 100644 --- a/syft/pkg/cataloger/haskell/parse_stack_lock.go +++ b/syft/pkg/cataloger/haskell/parse_stack_lock.go @@ -8,9 +8,9 @@ import ( "gopkg.in/yaml.v3" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseStackLock @@ -38,7 +38,7 @@ type completedSnapshot struct { } // parseStackLock is a parser function for stack.yaml.lock contents, returning all packages discovered. -func parseStackLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseStackLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { bytes, err := io.ReadAll(reader) if err != nil { return nil, nil, fmt.Errorf("failed to load stack.yaml.lock file: %w", err) diff --git a/syft/pkg/cataloger/haskell/parse_stack_lock_test.go b/syft/pkg/cataloger/haskell/parse_stack_lock_test.go index 2cdfbc75b86..d41b8704261 100644 --- a/syft/pkg/cataloger/haskell/parse_stack_lock_test.go +++ b/syft/pkg/cataloger/haskell/parse_stack_lock_test.go @@ -4,15 +4,15 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseStackLock(t *testing.T) { url := "https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/19/14.yaml" fixture := "test-fixtures/stack.yaml.lock" - locationSet := source.NewLocationSet(source.NewLocation(fixture)) + locationSet := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { diff --git a/syft/pkg/cataloger/haskell/parse_stack_yaml.go b/syft/pkg/cataloger/haskell/parse_stack_yaml.go index 8404f4bf47c..c31bc6a5cf3 100644 --- a/syft/pkg/cataloger/haskell/parse_stack_yaml.go +++ b/syft/pkg/cataloger/haskell/parse_stack_yaml.go @@ -7,9 +7,9 @@ import ( "gopkg.in/yaml.v3" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseStackYaml @@ -19,7 +19,7 @@ type stackYaml struct { } // parseStackYaml is a parser function for stack.yaml contents, returning all packages discovered. -func parseStackYaml(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseStackYaml(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { bytes, err := io.ReadAll(reader) if err != nil { return nil, nil, fmt.Errorf("failed to load stack.yaml file: %w", err) diff --git a/syft/pkg/cataloger/haskell/parse_stack_yaml_test.go b/syft/pkg/cataloger/haskell/parse_stack_yaml_test.go index 1e035a7a60e..9946de99be8 100644 --- a/syft/pkg/cataloger/haskell/parse_stack_yaml_test.go +++ b/syft/pkg/cataloger/haskell/parse_stack_yaml_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseStackYaml(t *testing.T) { fixture := "test-fixtures/stack.yaml" - locationSet := source.NewLocationSet(source.NewLocation(fixture)) + locationSet := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { diff --git a/syft/pkg/cataloger/internal/pkgtest/observing_resolver.go b/syft/pkg/cataloger/internal/pkgtest/observing_resolver.go index fd0a5428a08..6e4c23ebe83 100644 --- a/syft/pkg/cataloger/internal/pkgtest/observing_resolver.go +++ b/syft/pkg/cataloger/internal/pkgtest/observing_resolver.go @@ -7,23 +7,23 @@ import ( "github.com/scylladb/go-set/strset" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) -var _ source.FileResolver = (*ObservingResolver)(nil) +var _ file.Resolver = (*ObservingResolver)(nil) type ObservingResolver struct { - decorated source.FileResolver + decorated file.Resolver pathQueries map[string][]string - pathResponses []source.Location - contentQueries []source.Location + pathResponses []file.Location + contentQueries []file.Location emptyPathResponses map[string][]string } -func NewObservingResolver(resolver source.FileResolver) *ObservingResolver { +func NewObservingResolver(resolver file.Resolver) *ObservingResolver { return &ObservingResolver{ decorated: resolver, - pathResponses: make([]source.Location, 0), + pathResponses: make([]file.Location, 0), emptyPathResponses: make(map[string][]string), pathQueries: make(map[string][]string), } @@ -138,11 +138,11 @@ func (r *ObservingResolver) addPathQuery(name string, input ...string) { r.pathQueries[name] = append(r.pathQueries[name], input...) } -func (r *ObservingResolver) addPathResponse(locs ...source.Location) { +func (r *ObservingResolver) addPathResponse(locs ...file.Location) { r.pathResponses = append(r.pathResponses, locs...) } -func (r *ObservingResolver) addEmptyPathResponse(name string, locs []source.Location, paths ...string) { +func (r *ObservingResolver) addEmptyPathResponse(name string, locs []file.Location, paths ...string) { if len(locs) == 0 { results := r.emptyPathResponses[name] results = append(results, paths...) @@ -150,7 +150,7 @@ func (r *ObservingResolver) addEmptyPathResponse(name string, locs []source.Loca } } -func (r *ObservingResolver) FilesByPath(paths ...string) ([]source.Location, error) { +func (r *ObservingResolver) FilesByPath(paths ...string) ([]file.Location, error) { name := "FilesByPath" r.addPathQuery(name, paths...) @@ -161,7 +161,7 @@ func (r *ObservingResolver) FilesByPath(paths ...string) ([]source.Location, err return locs, err } -func (r *ObservingResolver) FilesByGlob(patterns ...string) ([]source.Location, error) { +func (r *ObservingResolver) FilesByGlob(patterns ...string) ([]file.Location, error) { name := "FilesByGlob" r.addPathQuery(name, patterns...) @@ -172,7 +172,7 @@ func (r *ObservingResolver) FilesByGlob(patterns ...string) ([]source.Location, return locs, err } -func (r *ObservingResolver) FilesByMIMEType(types ...string) ([]source.Location, error) { +func (r *ObservingResolver) FilesByMIMEType(types ...string) ([]file.Location, error) { name := "FilesByMIMEType" r.addPathQuery(name, types...) @@ -183,7 +183,7 @@ func (r *ObservingResolver) FilesByMIMEType(types ...string) ([]source.Location, return locs, err } -func (r *ObservingResolver) RelativeFileByPath(l source.Location, path string) *source.Location { +func (r *ObservingResolver) RelativeFileByPath(l file.Location, path string) *file.Location { name := "RelativeFileByPath" r.addPathQuery(name, path) @@ -201,7 +201,7 @@ func (r *ObservingResolver) RelativeFileByPath(l source.Location, path string) * // For the content resolver methods... -func (r *ObservingResolver) FileContentsByLocation(location source.Location) (io.ReadCloser, error) { +func (r *ObservingResolver) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { r.contentQueries = append(r.contentQueries, location) reader, err := r.decorated.FileContentsByLocation(location) return reader, err @@ -209,7 +209,7 @@ func (r *ObservingResolver) FileContentsByLocation(location source.Location) (io // For the remaining resolver methods... -func (r *ObservingResolver) AllLocations() <-chan source.Location { +func (r *ObservingResolver) AllLocations() <-chan file.Location { return r.decorated.AllLocations() } @@ -217,6 +217,6 @@ func (r *ObservingResolver) HasPath(s string) bool { return r.decorated.HasPath(s) } -func (r *ObservingResolver) FileMetadataByLocation(location source.Location) (source.FileMetadata, error) { +func (r *ObservingResolver) FileMetadataByLocation(location file.Location) (file.Metadata, error) { return r.decorated.FileMetadataByLocation(location) } diff --git a/syft/pkg/cataloger/internal/pkgtest/test_generic_parser.go b/syft/pkg/cataloger/internal/pkgtest/test_generic_parser.go index 5d230b0119a..9545c66b6fd 100644 --- a/syft/pkg/cataloger/internal/pkgtest/test_generic_parser.go +++ b/syft/pkg/cataloger/internal/pkgtest/test_generic_parser.go @@ -14,13 +14,14 @@ import ( "github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/source" ) -type locationComparer func(x, y source.Location) bool +type locationComparer func(x, y file.Location) bool type licenseComparer func(x, y pkg.License) bool type CatalogTester struct { @@ -32,8 +33,8 @@ type CatalogTester struct { ignoreUnfulfilledPathResponses map[string][]string ignoreAnyUnfulfilledPaths []string env *generic.Environment - reader source.LocationReadCloser - resolver source.FileResolver + reader file.LocationReadCloser + resolver file.Resolver wantErr require.ErrorAssertionFunc compareOptions []cmp.Option locationComparer locationComparer @@ -58,13 +59,13 @@ func NewCatalogTester() *CatalogTester { } } -func DefaultLocationComparer(x, y source.Location) bool { +func DefaultLocationComparer(x, y file.Location) bool { return cmp.Equal(x.Coordinates, y.Coordinates) && cmp.Equal(x.VirtualPath, y.VirtualPath) } func DefaultLicenseComparer(x, y pkg.License) bool { return cmp.Equal(x, y, cmp.Comparer(DefaultLocationComparer), cmp.Comparer( - func(x, y source.LocationSet) bool { + func(x, y file.LocationSet) bool { xs := x.ToSlice() ys := y.ToSlice() if len(xs) != len(ys) { @@ -100,16 +101,16 @@ func (p *CatalogTester) FromFile(t *testing.T, path string) *CatalogTester { fixture, err := os.Open(path) require.NoError(t, err) - p.reader = source.LocationReadCloser{ - Location: source.NewLocation(fixture.Name()), + p.reader = file.LocationReadCloser{ + Location: file.NewLocation(fixture.Name()), ReadCloser: fixture, } return p } func (p *CatalogTester) FromString(location, data string) *CatalogTester { - p.reader = source.LocationReadCloser{ - Location: source.NewLocation(location), + p.reader = file.LocationReadCloser{ + Location: file.NewLocation(location), ReadCloser: io.NopCloser(strings.NewReader(data)), } return p @@ -139,7 +140,7 @@ func (p *CatalogTester) WithErrorAssertion(a require.ErrorAssertionFunc) *Catalo return p } -func (p *CatalogTester) WithResolver(r source.FileResolver) *CatalogTester { +func (p *CatalogTester) WithResolver(r file.Resolver) *CatalogTester { p.resolver = r return p } @@ -158,14 +159,14 @@ func (p *CatalogTester) WithImageResolver(t *testing.T, fixtureName string) *Cat } func (p *CatalogTester) IgnoreLocationLayer() *CatalogTester { - p.locationComparer = func(x, y source.Location) bool { + p.locationComparer = func(x, y file.Location) bool { return cmp.Equal(x.Coordinates.RealPath, y.Coordinates.RealPath) && cmp.Equal(x.VirtualPath, y.VirtualPath) } // we need to update the license comparer to use the ignored location layer p.licenseComparer = func(x, y pkg.License) bool { return cmp.Equal(x, y, cmp.Comparer(p.locationComparer), cmp.Comparer( - func(x, y source.LocationSet) bool { + func(x, y file.LocationSet) bool { xs := x.ToSlice() ys := y.ToSlice() if len(xs) != len(ys) { @@ -259,7 +260,7 @@ func (p *CatalogTester) assertPkgs(t *testing.T, pkgs []pkg.Package, relationshi cmpopts.IgnoreFields(pkg.Package{}, "id"), // note: ID is not deterministic for test purposes cmpopts.SortSlices(pkg.Less), cmp.Comparer( - func(x, y source.LocationSet) bool { + func(x, y file.LocationSet) bool { xs := x.ToSlice() ys := y.ToSlice() @@ -345,7 +346,7 @@ func AssertPackagesEqual(t *testing.T, a, b pkg.Package) { opts := []cmp.Option{ cmpopts.IgnoreFields(pkg.Package{}, "id"), // note: ID is not deterministic for test purposes cmp.Comparer( - func(x, y source.LocationSet) bool { + func(x, y file.LocationSet) bool { xs := x.ToSlice() ys := y.ToSlice() diff --git a/syft/pkg/cataloger/java/archive_parser.go b/syft/pkg/cataloger/java/archive_parser.go index d6c0ad926f5..a1efd022d0c 100644 --- a/syft/pkg/cataloger/java/archive_parser.go +++ b/syft/pkg/cataloger/java/archive_parser.go @@ -7,13 +7,12 @@ import ( "path" "strings" - "github.com/anchore/syft/internal/file" + intFile "github.com/anchore/syft/internal/file" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" - syftFile "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseJavaArchive @@ -43,8 +42,8 @@ var javaArchiveHashes = []crypto.Hash{ } type archiveParser struct { - fileManifest file.ZipFileManifest - location source.Location + fileManifest intFile.ZipFileManifest + location file.Location archivePath string contentPath string fileInfo archiveFilename @@ -52,7 +51,7 @@ type archiveParser struct { } // parseJavaArchive is a parser function for java archive contents, returning all Java libraries and nested archives. -func parseJavaArchive(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseJavaArchive(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { parser, cleanupFn, err := newJavaArchiveParser(reader, true) // note: even on error, we should always run cleanup functions defer cleanupFn() @@ -72,7 +71,7 @@ func uniquePkgKey(p *pkg.Package) string { // newJavaArchiveParser returns a new java archive parser object for the given archive. Can be configured to discover // and parse nested archives or ignore them. -func newJavaArchiveParser(reader source.LocationReadCloser, detectNested bool) (*archiveParser, func(), error) { +func newJavaArchiveParser(reader file.LocationReadCloser, detectNested bool) (*archiveParser, func(), error) { // fetch the last element of the virtual path virtualElements := strings.Split(reader.AccessPath(), ":") currentFilepath := virtualElements[len(virtualElements)-1] @@ -82,7 +81,7 @@ func newJavaArchiveParser(reader source.LocationReadCloser, detectNested bool) ( return nil, cleanupFn, fmt.Errorf("unable to process java archive: %w", err) } - fileManifest, err := file.NewZipFileManifest(archivePath) + fileManifest, err := intFile.NewZipFileManifest(archivePath) if err != nil { return nil, cleanupFn, fmt.Errorf("unable to read files from java archive: %w", err) } @@ -160,7 +159,7 @@ func (j *archiveParser) discoverMainPackage() (*pkg.Package, error) { } // fetch the manifest file - contents, err := file.ContentsFromZip(j.archivePath, manifestMatches...) + contents, err := intFile.ContentsFromZip(j.archivePath, manifestMatches...) if err != nil { return nil, fmt.Errorf("unable to extract java manifests (%s): %w", j.location, err) } @@ -180,7 +179,7 @@ func (j *archiveParser) discoverMainPackage() (*pkg.Package, error) { defer archiveCloser.Close() // grab and assign digest for the entire archive - digests, err := syftFile.DigestsFromFile(archiveCloser, javaArchiveHashes) + digests, err := file.NewDigestsFromFile(archiveCloser, javaArchiveHashes) if err != nil { log.Warnf("failed to create digest for file=%q: %+v", j.archivePath, err) } @@ -192,7 +191,7 @@ func (j *archiveParser) discoverMainPackage() (*pkg.Package, error) { Version: selectVersion(manifest, j.fileInfo), Language: pkg.Java, Licenses: pkg.NewLicenseSet(licenses...), - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( j.location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), ), Type: j.fileInfo.pkgType(), @@ -250,9 +249,9 @@ func (j *archiveParser) discoverPkgsFromNestedArchives(parentPkg *pkg.Package) ( // discoverPkgsFromZip finds Java archives within Java archives, returning all listed Java packages found and // associating each discovered package to the given parent package. -func discoverPkgsFromZip(location source.Location, archivePath, contentPath string, fileManifest file.ZipFileManifest, parentPkg *pkg.Package) ([]pkg.Package, []artifact.Relationship, error) { +func discoverPkgsFromZip(location file.Location, archivePath, contentPath string, fileManifest intFile.ZipFileManifest, parentPkg *pkg.Package) ([]pkg.Package, []artifact.Relationship, error) { // search and parse pom.properties files & fetch the contents - openers, err := file.ExtractFromZipToUniqueTempFile(archivePath, contentPath, fileManifest.GlobMatch(archiveFormatGlobs...)...) + openers, err := intFile.ExtractFromZipToUniqueTempFile(archivePath, contentPath, fileManifest.GlobMatch(archiveFormatGlobs...)...) if err != nil { return nil, nil, fmt.Errorf("unable to extract files from zip: %w", err) } @@ -261,7 +260,7 @@ func discoverPkgsFromZip(location source.Location, archivePath, contentPath stri } // discoverPkgsFromOpeners finds Java archives within the given files and associates them with the given parent package. -func discoverPkgsFromOpeners(location source.Location, openers map[string]file.Opener, parentPkg *pkg.Package) ([]pkg.Package, []artifact.Relationship, error) { +func discoverPkgsFromOpeners(location file.Location, openers map[string]intFile.Opener, parentPkg *pkg.Package) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package var relationships []artifact.Relationship @@ -290,7 +289,7 @@ func discoverPkgsFromOpeners(location source.Location, openers map[string]file.O } // discoverPkgsFromOpener finds Java archives within the given file. -func discoverPkgsFromOpener(location source.Location, pathWithinArchive string, archiveOpener file.Opener) ([]pkg.Package, []artifact.Relationship, error) { +func discoverPkgsFromOpener(location file.Location, pathWithinArchive string, archiveOpener intFile.Opener) ([]pkg.Package, []artifact.Relationship, error) { archiveReadCloser, err := archiveOpener.Open() if err != nil { return nil, nil, fmt.Errorf("unable to open archived file from tempdir: %w", err) @@ -302,9 +301,9 @@ func discoverPkgsFromOpener(location source.Location, pathWithinArchive string, }() nestedPath := fmt.Sprintf("%s:%s", location.AccessPath(), pathWithinArchive) - nestedLocation := source.NewLocationFromCoordinates(location.Coordinates) + nestedLocation := file.NewLocationFromCoordinates(location.Coordinates) nestedLocation.VirtualPath = nestedPath - nestedPkgs, nestedRelationships, err := parseJavaArchive(nil, nil, source.LocationReadCloser{ + nestedPkgs, nestedRelationships, err := parseJavaArchive(nil, nil, file.LocationReadCloser{ Location: nestedLocation, ReadCloser: archiveReadCloser, }) @@ -315,8 +314,8 @@ func discoverPkgsFromOpener(location source.Location, pathWithinArchive string, return nestedPkgs, nestedRelationships, nil } -func pomPropertiesByParentPath(archivePath string, location source.Location, extractPaths []string) (map[string]pkg.PomProperties, error) { - contentsOfMavenPropertiesFiles, err := file.ContentsFromZip(archivePath, extractPaths...) +func pomPropertiesByParentPath(archivePath string, location file.Location, extractPaths []string) (map[string]pkg.PomProperties, error) { + contentsOfMavenPropertiesFiles, err := intFile.ContentsFromZip(archivePath, extractPaths...) if err != nil { return nil, fmt.Errorf("unable to extract maven files: %w", err) } @@ -344,8 +343,8 @@ func pomPropertiesByParentPath(archivePath string, location source.Location, ext return propertiesByParentPath, nil } -func pomProjectByParentPath(archivePath string, location source.Location, extractPaths []string) (map[string]pkg.PomProject, error) { - contentsOfMavenProjectFiles, err := file.ContentsFromZip(archivePath, extractPaths...) +func pomProjectByParentPath(archivePath string, location file.Location, extractPaths []string) (map[string]pkg.PomProject, error) { + contentsOfMavenProjectFiles, err := intFile.ContentsFromZip(archivePath, extractPaths...) if err != nil { return nil, fmt.Errorf("unable to extract maven files: %w", err) } @@ -374,7 +373,7 @@ func pomProjectByParentPath(archivePath string, location source.Location, extrac // packagesFromPomProperties processes a single Maven POM properties for a given parent package, returning all listed Java packages found and // associating each discovered package to the given parent package. Note the pom.xml is optional, the pom.properties is not. -func newPackageFromMavenData(pomProperties pkg.PomProperties, pomProject *pkg.PomProject, parentPkg *pkg.Package, location source.Location) *pkg.Package { +func newPackageFromMavenData(pomProperties pkg.PomProperties, pomProject *pkg.PomProject, parentPkg *pkg.Package, location file.Location) *pkg.Package { // keep the artifact name within the virtual path if this package does not match the parent package vPathSuffix := "" if !strings.HasPrefix(pomProperties.ArtifactID, parentPkg.Name) { @@ -386,7 +385,7 @@ func newPackageFromMavenData(pomProperties pkg.PomProperties, pomProject *pkg.Po p := pkg.Package{ Name: pomProperties.ArtifactID, Version: pomProperties.Version, - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), ), Language: pkg.Java, diff --git a/syft/pkg/cataloger/java/archive_parser_test.go b/syft/pkg/cataloger/java/archive_parser_test.go index 5385dec7382..422de7d480f 100644 --- a/syft/pkg/cataloger/java/archive_parser_test.go +++ b/syft/pkg/cataloger/java/archive_parser_test.go @@ -16,9 +16,9 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/internal" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func generateJavaBuildFixture(t *testing.T, fixturePath string) { @@ -100,7 +100,7 @@ func TestParseJar(t *testing.T) { Version: "1.0-SNAPSHOT", PURL: "pkg:maven/io.jenkins.plugins/example-jenkins-plugin@1.0-SNAPSHOT", Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT License", source.NewLocation("test-fixtures/java-builds/packages/example-jenkins-plugin.hpi")), + pkg.NewLicenseFromLocations("MIT License", file.NewLocation("test-fixtures/java-builds/packages/example-jenkins-plugin.hpi")), ), Language: pkg.Java, Type: pkg.JenkinsPluginPkg, @@ -272,12 +272,12 @@ func TestParseJar(t *testing.T) { for k := range test.expected { p := test.expected[k] - p.Locations.Add(source.NewLocation(test.fixture)) + p.Locations.Add(file.NewLocation(test.fixture)) test.expected[k] = p } - parser, cleanupFn, err := newJavaArchiveParser(source.LocationReadCloser{ - Location: source.NewLocation(fixture.Name()), + parser, cleanupFn, err := newJavaArchiveParser(file.LocationReadCloser{ + Location: file.NewLocation(fixture.Name()), ReadCloser: fixture, }, false) defer cleanupFn() @@ -546,8 +546,8 @@ func TestParseNestedJar(t *testing.T) { fixture, err := os.Open(test.fixture) require.NoError(t, err) - actual, _, err := parseJavaArchive(nil, nil, source.LocationReadCloser{ - Location: source.NewLocation(fixture.Name()), + actual, _, err := parseJavaArchive(nil, nil, file.LocationReadCloser{ + Location: file.NewLocation(fixture.Name()), ReadCloser: fixture, }) require.NoError(t, err) @@ -975,7 +975,7 @@ func Test_newPackageFromMavenData(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - locations := source.NewLocationSet(source.NewLocation(virtualPath)) + locations := file.NewLocationSet(file.NewLocation(virtualPath)) if test.expectedPackage != nil { test.expectedPackage.Locations = locations if test.expectedPackage.Metadata.(pkg.JavaMetadata).Parent != nil { @@ -987,7 +987,7 @@ func Test_newPackageFromMavenData(t *testing.T) { } test.expectedParent.Locations = locations - actualPackage := newPackageFromMavenData(test.props, test.project, test.parent, source.NewLocation(virtualPath)) + actualPackage := newPackageFromMavenData(test.props, test.project, test.parent, file.NewLocation(virtualPath)) if test.expectedPackage == nil { require.Nil(t, actualPackage) } else { diff --git a/syft/pkg/cataloger/java/graalvm_native_image_cataloger.go b/syft/pkg/cataloger/java/graalvm_native_image_cataloger.go index 2e8b63c932b..db462ea8c99 100644 --- a/syft/pkg/cataloger/java/graalvm_native_image_cataloger.go +++ b/syft/pkg/cataloger/java/graalvm_native_image_cataloger.go @@ -17,9 +17,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/cpe" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader" - "github.com/anchore/syft/syft/source" ) type nativeImageCycloneDX struct { @@ -571,7 +571,7 @@ func fetchPkgs(reader unionreader.UnionReader, filename string) []pkg.Package { } // Catalog attempts to find any native image executables reachable from a resolver. -func (c *NativeImageCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (c *NativeImageCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package fileMatches, err := resolver.FilesByMIMEType(internal.ExecutableMIMETypeSet.List()...) if err != nil { diff --git a/syft/pkg/cataloger/java/parse_gradle_lockfile.go b/syft/pkg/cataloger/java/parse_gradle_lockfile.go index 803639ab43a..65adf7aebb2 100644 --- a/syft/pkg/cataloger/java/parse_gradle_lockfile.go +++ b/syft/pkg/cataloger/java/parse_gradle_lockfile.go @@ -5,9 +5,9 @@ import ( "strings" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) const gradleLockfileGlob = "**/gradle.lockfile*" @@ -19,7 +19,7 @@ type LockfileDependency struct { Version string } -func parseGradleLockfile(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseGradleLockfile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package // Create a new scanner to read the file @@ -51,7 +51,7 @@ func parseGradleLockfile(_ source.FileResolver, _ *generic.Environment, reader s mappedPkg := pkg.Package{ Name: dep.Name, Version: dep.Version, - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), ), Language: pkg.Java, diff --git a/syft/pkg/cataloger/java/parse_gradle_lockfile_test.go b/syft/pkg/cataloger/java/parse_gradle_lockfile_test.go index 65129efcff2..babc3d3e558 100644 --- a/syft/pkg/cataloger/java/parse_gradle_lockfile_test.go +++ b/syft/pkg/cataloger/java/parse_gradle_lockfile_test.go @@ -3,9 +3,9 @@ package java import ( "testing" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func Test_parserGradleLockfile(t *testing.T) { @@ -44,7 +44,7 @@ func Test_parserGradleLockfile(t *testing.T) { for _, test := range tests { t.Run(test.input, func(t *testing.T) { for i := range test.expected { - test.expected[i].Locations.Add(source.NewLocation(test.input)) + test.expected[i].Locations.Add(file.NewLocation(test.input)) } pkgtest.TestFileParser(t, test.input, parseGradleLockfile, test.expected, nil) }) diff --git a/syft/pkg/cataloger/java/parse_pom_xml.go b/syft/pkg/cataloger/java/parse_pom_xml.go index 8df940869ed..8b43ada5013 100644 --- a/syft/pkg/cataloger/java/parse_pom_xml.go +++ b/syft/pkg/cataloger/java/parse_pom_xml.go @@ -12,16 +12,16 @@ import ( "golang.org/x/net/html/charset" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) const pomXMLGlob = "*pom.xml" var propertyMatcher = regexp.MustCompile("[$][{][^}]+[}]") -func parserPomXML(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parserPomXML(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { pom, err := decodePomXML(reader) if err != nil { return nil, nil, err @@ -65,10 +65,11 @@ func newPomProject(path string, p gopom.Project) *pkg.PomProject { } } -func newPackageFromPom(pom gopom.Project, dep gopom.Dependency, locations ...source.Location) pkg.Package { +func newPackageFromPom(pom gopom.Project, dep gopom.Dependency, locations ...file.Location) pkg.Package { m := pkg.JavaMetadata{ PomProperties: &pkg.PomProperties{ GroupID: resolveProperty(pom, dep.GroupID), + Scope: resolveProperty(pom, dep.Scope), }, } @@ -78,7 +79,7 @@ func newPackageFromPom(pom gopom.Project, dep gopom.Dependency, locations ...sou p := pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(name, version, m), Language: pkg.Java, Type: pkg.JavaPkg, // TODO: should we differentiate between packages from jar/war/zip versus packages from a pom.xml that were not installed yet? diff --git a/syft/pkg/cataloger/java/parse_pom_xml_test.go b/syft/pkg/cataloger/java/parse_pom_xml_test.go index 2e4d7a846b6..cebe979be0b 100644 --- a/syft/pkg/cataloger/java/parse_pom_xml_test.go +++ b/syft/pkg/cataloger/java/parse_pom_xml_test.go @@ -7,9 +7,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/vifraa/gopom" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func Test_parserPomXML(t *testing.T) { @@ -39,7 +39,10 @@ func Test_parserPomXML(t *testing.T) { Type: pkg.JavaPkg, MetadataType: pkg.JavaMetadataType, Metadata: pkg.JavaMetadata{ - PomProperties: &pkg.PomProperties{GroupID: "junit"}, + PomProperties: &pkg.PomProperties{ + GroupID: "junit", + Scope: "test", + }, }, }, }, @@ -49,7 +52,7 @@ func Test_parserPomXML(t *testing.T) { for _, test := range tests { t.Run(test.input, func(t *testing.T) { for i := range test.expected { - test.expected[i].Locations.Add(source.NewLocation(test.input)) + test.expected[i].Locations.Add(file.NewLocation(test.input)) } pkgtest.TestFileParser(t, test.input, parserPomXML, test.expected, nil) }) @@ -83,7 +86,10 @@ func Test_parseCommonsTextPomXMLProject(t *testing.T) { Type: pkg.JavaPkg, MetadataType: pkg.JavaMetadataType, Metadata: pkg.JavaMetadata{ - PomProperties: &pkg.PomProperties{GroupID: "org.junit.jupiter"}, + PomProperties: &pkg.PomProperties{ + GroupID: "org.junit.jupiter", + Scope: "test", + }, }, }, { @@ -94,7 +100,10 @@ func Test_parseCommonsTextPomXMLProject(t *testing.T) { Type: pkg.JavaPkg, MetadataType: pkg.JavaMetadataType, Metadata: pkg.JavaMetadata{ - PomProperties: &pkg.PomProperties{GroupID: "org.assertj"}, + PomProperties: &pkg.PomProperties{ + GroupID: "org.assertj", + Scope: "test", + }, }, }, { @@ -105,7 +114,10 @@ func Test_parseCommonsTextPomXMLProject(t *testing.T) { Type: pkg.JavaPkg, MetadataType: pkg.JavaMetadataType, Metadata: pkg.JavaMetadata{ - PomProperties: &pkg.PomProperties{GroupID: "commons-io"}, + PomProperties: &pkg.PomProperties{ + GroupID: "commons-io", + Scope: "test", + }, }, }, { @@ -116,7 +128,10 @@ func Test_parseCommonsTextPomXMLProject(t *testing.T) { Type: pkg.JavaPkg, MetadataType: pkg.JavaMetadataType, Metadata: pkg.JavaMetadata{ - PomProperties: &pkg.PomProperties{GroupID: "org.mockito"}, + PomProperties: &pkg.PomProperties{ + GroupID: "org.mockito", + Scope: "test", + }, }, }, { @@ -127,7 +142,10 @@ func Test_parseCommonsTextPomXMLProject(t *testing.T) { Type: pkg.JavaPkg, MetadataType: pkg.JavaMetadataType, Metadata: pkg.JavaMetadata{ - PomProperties: &pkg.PomProperties{GroupID: "org.graalvm.js"}, + PomProperties: &pkg.PomProperties{ + GroupID: "org.graalvm.js", + Scope: "test", + }, }, }, { @@ -138,7 +156,10 @@ func Test_parseCommonsTextPomXMLProject(t *testing.T) { Type: pkg.JavaPkg, MetadataType: pkg.JavaMetadataType, Metadata: pkg.JavaMetadata{ - PomProperties: &pkg.PomProperties{GroupID: "org.graalvm.js"}, + PomProperties: &pkg.PomProperties{ + GroupID: "org.graalvm.js", + Scope: "test", + }, }, }, { @@ -149,7 +170,10 @@ func Test_parseCommonsTextPomXMLProject(t *testing.T) { Type: pkg.JavaPkg, MetadataType: pkg.JavaMetadataType, Metadata: pkg.JavaMetadata{ - PomProperties: &pkg.PomProperties{GroupID: "org.apache.commons"}, + PomProperties: &pkg.PomProperties{ + GroupID: "org.apache.commons", + Scope: "test", + }, }, }, { @@ -160,7 +184,10 @@ func Test_parseCommonsTextPomXMLProject(t *testing.T) { Type: pkg.JavaPkg, MetadataType: pkg.JavaMetadataType, Metadata: pkg.JavaMetadata{ - PomProperties: &pkg.PomProperties{GroupID: "org.openjdk.jmh"}, + PomProperties: &pkg.PomProperties{ + GroupID: "org.openjdk.jmh", + Scope: "test", + }, }, }, { @@ -171,7 +198,10 @@ func Test_parseCommonsTextPomXMLProject(t *testing.T) { Type: pkg.JavaPkg, MetadataType: pkg.JavaMetadataType, Metadata: pkg.JavaMetadata{ - PomProperties: &pkg.PomProperties{GroupID: "org.openjdk.jmh"}, + PomProperties: &pkg.PomProperties{ + GroupID: "org.openjdk.jmh", + Scope: "test", + }, }, }, }, @@ -181,7 +211,7 @@ func Test_parseCommonsTextPomXMLProject(t *testing.T) { for _, test := range tests { t.Run(test.input, func(t *testing.T) { for i := range test.expected { - test.expected[i].Locations.Add(source.NewLocation(test.input)) + test.expected[i].Locations.Add(file.NewLocation(test.input)) } pkgtest.TestFileParser(t, test.input, parserPomXML, test.expected, nil) }) diff --git a/syft/pkg/cataloger/java/tar_wrapped_archive_parser.go b/syft/pkg/cataloger/java/tar_wrapped_archive_parser.go index 99c723f44b5..05ab6dd22a5 100644 --- a/syft/pkg/cataloger/java/tar_wrapped_archive_parser.go +++ b/syft/pkg/cataloger/java/tar_wrapped_archive_parser.go @@ -3,11 +3,11 @@ package java import ( "fmt" - "github.com/anchore/syft/internal/file" + intFile "github.com/anchore/syft/internal/file" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var genericTarGlobs = []string{ @@ -45,7 +45,7 @@ var genericTarGlobs = []string{ // note: for compressed tars this is an extremely expensive operation and can lead to performance degradation. This is // due to the fact that there is no central directory header (say as in zip), which means that in order to get // a file listing within the archive you must decompress the entire archive and seek through all of the entries. -func parseTarWrappedJavaArchive(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseTarWrappedJavaArchive(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { contentPath, archivePath, cleanupFn, err := saveArchiveToTmp(reader.AccessPath(), reader) // note: even on error, we should always run cleanup functions defer cleanupFn() @@ -57,8 +57,8 @@ func parseTarWrappedJavaArchive(_ source.FileResolver, _ *generic.Environment, r return discoverPkgsFromTar(reader.Location, archivePath, contentPath) } -func discoverPkgsFromTar(location source.Location, archivePath, contentPath string) ([]pkg.Package, []artifact.Relationship, error) { - openers, err := file.ExtractGlobsFromTarToUniqueTempFile(archivePath, contentPath, archiveFormatGlobs...) +func discoverPkgsFromTar(location file.Location, archivePath, contentPath string) ([]pkg.Package, []artifact.Relationship, error) { + openers, err := intFile.ExtractGlobsFromTarToUniqueTempFile(archivePath, contentPath, archiveFormatGlobs...) if err != nil { return nil, nil, fmt.Errorf("unable to extract files from tar: %w", err) } diff --git a/syft/pkg/cataloger/java/tar_wrapped_archive_parser_test.go b/syft/pkg/cataloger/java/tar_wrapped_archive_parser_test.go index 6f40c175d48..1a3d1d1f32c 100644 --- a/syft/pkg/cataloger/java/tar_wrapped_archive_parser_test.go +++ b/syft/pkg/cataloger/java/tar_wrapped_archive_parser_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) func Test_parseTarWrappedJavaArchive(t *testing.T) { @@ -40,8 +40,8 @@ func Test_parseTarWrappedJavaArchive(t *testing.T) { t.Fatalf("failed to open fixture: %+v", err) } - actualPkgs, _, err := parseTarWrappedJavaArchive(nil, nil, source.LocationReadCloser{ - Location: source.NewLocation(test.fixture), + actualPkgs, _, err := parseTarWrappedJavaArchive(nil, nil, file.LocationReadCloser{ + Location: file.NewLocation(test.fixture), ReadCloser: fixture, }) require.NoError(t, err) diff --git a/syft/pkg/cataloger/java/zip_wrapped_archive_parser.go b/syft/pkg/cataloger/java/zip_wrapped_archive_parser.go index dffe5df74a6..930427f38f5 100644 --- a/syft/pkg/cataloger/java/zip_wrapped_archive_parser.go +++ b/syft/pkg/cataloger/java/zip_wrapped_archive_parser.go @@ -3,11 +3,11 @@ package java import ( "fmt" - "github.com/anchore/syft/internal/file" + intFile "github.com/anchore/syft/internal/file" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var genericZipGlobs = []string{ @@ -17,7 +17,7 @@ var genericZipGlobs = []string{ // TODO: when the generic archive cataloger is implemented, this should be removed (https://github.com/anchore/syft/issues/246) // parseZipWrappedJavaArchive is a parser function for java archive contents contained within arbitrary zip files. -func parseZipWrappedJavaArchive(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseZipWrappedJavaArchive(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { contentPath, archivePath, cleanupFn, err := saveArchiveToTmp(reader.AccessPath(), reader) // note: even on error, we should always run cleanup functions defer cleanupFn() @@ -29,7 +29,7 @@ func parseZipWrappedJavaArchive(_ source.FileResolver, _ *generic.Environment, r // functions support zips with shell scripts prepended to the file. Specifically, the helpers use the central // header at the end of the file to determine where the beginning of the zip payload is (unlike the standard lib // or archiver). - fileManifest, err := file.NewZipFileManifest(archivePath) + fileManifest, err := intFile.NewZipFileManifest(archivePath) if err != nil { return nil, nil, fmt.Errorf("unable to read files from java archive: %w", err) } diff --git a/syft/pkg/cataloger/java/zip_wrapped_archive_parser_test.go b/syft/pkg/cataloger/java/zip_wrapped_archive_parser_test.go index aa1e5108945..2f5b3328ac9 100644 --- a/syft/pkg/cataloger/java/zip_wrapped_archive_parser_test.go +++ b/syft/pkg/cataloger/java/zip_wrapped_archive_parser_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) func Test_parseZipWrappedJavaArchive(t *testing.T) { @@ -33,8 +33,8 @@ func Test_parseZipWrappedJavaArchive(t *testing.T) { t.Fatalf("failed to open fixture: %+v", err) } - actualPkgs, _, err := parseZipWrappedJavaArchive(nil, nil, source.LocationReadCloser{ - Location: source.NewLocation(test.fixture), + actualPkgs, _, err := parseZipWrappedJavaArchive(nil, nil, file.LocationReadCloser{ + Location: file.NewLocation(test.fixture), ReadCloser: fixture, }) require.NoError(t, err) diff --git a/syft/pkg/cataloger/javascript/cataloger_test.go b/syft/pkg/cataloger/javascript/cataloger_test.go index 5b9c18f0ed2..ca5169bafe5 100644 --- a/syft/pkg/cataloger/javascript/cataloger_test.go +++ b/syft/pkg/cataloger/javascript/cataloger_test.go @@ -3,13 +3,13 @@ package javascript import ( "testing" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func Test_JavascriptCataloger(t *testing.T) { - locationSet := source.NewLocationSet(source.NewLocation("package-lock.json")) + locationSet := file.NewLocationSet(file.NewLocation("package-lock.json")) expectedPkgs := []pkg.Package{ { Name: "@actions/core", @@ -20,7 +20,7 @@ func Test_JavascriptCataloger(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation("package-lock.json")), + pkg.NewLicenseFromLocations("MIT", file.NewLocation("package-lock.json")), ), MetadataType: pkg.NpmPackageLockJSONMetadataType, Metadata: pkg.NpmPackageLockJSONMetadata{Resolved: "https://registry.npmjs.org/@actions/core/-/core-1.6.0.tgz", Integrity: "sha512-NB1UAZomZlCV/LmJqkLhNTqtKfFXJZAUPcfl/zqG7EfsQdeUJtaWO98SGbuQ3pydJ3fHl2CvI/51OKYlCYYcaw=="}, @@ -45,7 +45,7 @@ func Test_JavascriptCataloger(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation("package-lock.json")), + pkg.NewLicenseFromLocations("MIT", file.NewLocation("package-lock.json")), ), MetadataType: pkg.NpmPackageLockJSONMetadataType, Metadata: pkg.NpmPackageLockJSONMetadata{Resolved: "https://registry.npmjs.org/cowsay/-/cowsay-1.4.0.tgz", Integrity: "sha512-rdg5k5PsHFVJheO/pmE3aDg2rUDDTfPJau6yYkZYlHFktUz+UxbE+IgnUAEyyCyv4noL5ltxXD0gZzmHPCy/9g=="}, diff --git a/syft/pkg/cataloger/javascript/package.go b/syft/pkg/cataloger/javascript/package.go index 468854a3927..4eaea055beb 100644 --- a/syft/pkg/cataloger/javascript/package.go +++ b/syft/pkg/cataloger/javascript/package.go @@ -8,11 +8,11 @@ import ( "github.com/anchore/packageurl-go" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackageJSONPackage(u packageJSON, indexLocation source.Location) pkg.Package { +func newPackageJSONPackage(u packageJSON, indexLocation file.Location) pkg.Package { licenseCandidates, err := u.licensesFromJSON() if err != nil { log.Warnf("unable to extract licenses from javascript package.json: %+v", err) @@ -23,7 +23,7 @@ func newPackageJSONPackage(u packageJSON, indexLocation source.Location) pkg.Pac Name: u.Name, Version: u.Version, PURL: packageURL(u.Name, u.Version), - Locations: source.NewLocationSet(indexLocation), + Locations: file.NewLocationSet(indexLocation), Language: pkg.JavaScript, Licenses: pkg.NewLicenseSet(license...), Type: pkg.NpmPkg, @@ -44,7 +44,7 @@ func newPackageJSONPackage(u packageJSON, indexLocation source.Location) pkg.Pac return p } -func newPackageLockV1Package(resolver source.FileResolver, location source.Location, name string, u lockDependency) pkg.Package { +func newPackageLockV1Package(resolver file.Resolver, location file.Location, name string, u lockDependency) pkg.Package { version := u.Version const aliasPrefixPackageLockV1 = "npm:" @@ -66,7 +66,7 @@ func newPackageLockV1Package(resolver source.FileResolver, location source.Locat pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), PURL: packageURL(name, version), Language: pkg.JavaScript, Type: pkg.NpmPkg, @@ -76,14 +76,14 @@ func newPackageLockV1Package(resolver source.FileResolver, location source.Locat ) } -func newPackageLockV2Package(resolver source.FileResolver, location source.Location, name string, u lockPackage) pkg.Package { +func newPackageLockV2Package(resolver file.Resolver, location file.Location, name string, u lockPackage) pkg.Package { return finalizeLockPkg( resolver, location, pkg.Package{ Name: name, Version: u.Version, - Locations: source.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(location, u.License...)...), PURL: packageURL(name, u.Version), Language: pkg.JavaScript, @@ -94,14 +94,14 @@ func newPackageLockV2Package(resolver source.FileResolver, location source.Locat ) } -func newPnpmPackage(resolver source.FileResolver, location source.Location, name, version string) pkg.Package { +func newPnpmPackage(resolver file.Resolver, location file.Location, name, version string) pkg.Package { return finalizeLockPkg( resolver, location, pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), PURL: packageURL(name, version), Language: pkg.JavaScript, Type: pkg.NpmPkg, @@ -109,14 +109,14 @@ func newPnpmPackage(resolver source.FileResolver, location source.Location, name ) } -func newYarnLockPackage(resolver source.FileResolver, location source.Location, name, version string) pkg.Package { +func newYarnLockPackage(resolver file.Resolver, location file.Location, name, version string) pkg.Package { return finalizeLockPkg( resolver, location, pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), PURL: packageURL(name, version), Language: pkg.JavaScript, Type: pkg.NpmPkg, @@ -124,14 +124,14 @@ func newYarnLockPackage(resolver source.FileResolver, location source.Location, ) } -func finalizeLockPkg(resolver source.FileResolver, location source.Location, p pkg.Package) pkg.Package { +func finalizeLockPkg(resolver file.Resolver, location file.Location, p pkg.Package) pkg.Package { licenseCandidate := addLicenses(p.Name, resolver, location) p.Licenses.Add(pkg.NewLicensesFromLocation(location, licenseCandidate...)...) p.SetID() return p } -func addLicenses(name string, resolver source.FileResolver, location source.Location) (allLicenses []string) { +func addLicenses(name string, resolver file.Resolver, location file.Location) (allLicenses []string) { if resolver == nil { return allLicenses } diff --git a/syft/pkg/cataloger/javascript/parse_package_json.go b/syft/pkg/cataloger/javascript/parse_package_json.go index 59c8a5c508d..0c05aedc0e3 100644 --- a/syft/pkg/cataloger/javascript/parse_package_json.go +++ b/syft/pkg/cataloger/javascript/parse_package_json.go @@ -12,9 +12,9 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -51,7 +51,7 @@ type repository struct { var authorPattern = regexp.MustCompile(`^\s*(?P[^<(]*)(\s+<(?P.*)>)?(\s\((?P.*)\))?\s*$`) // parsePackageJSON parses a package.json and returns the discovered JavaScript packages. -func parsePackageJSON(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePackageJSON(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package dec := json.NewDecoder(reader) diff --git a/syft/pkg/cataloger/javascript/parse_package_json_test.go b/syft/pkg/cataloger/javascript/parse_package_json_test.go index c0e0b17b088..3a57f3c8272 100644 --- a/syft/pkg/cataloger/javascript/parse_package_json_test.go +++ b/syft/pkg/cataloger/javascript/parse_package_json_test.go @@ -5,9 +5,9 @@ import ( "github.com/stretchr/testify/assert" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParsePackageJSON(t *testing.T) { @@ -24,7 +24,7 @@ func TestParsePackageJSON(t *testing.T) { Type: pkg.NpmPkg, Language: pkg.JavaScript, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("Artistic-2.0", source.NewLocation("test-fixtures/pkg-json/package.json")), + pkg.NewLicenseFromLocations("Artistic-2.0", file.NewLocation("test-fixtures/pkg-json/package.json")), ), MetadataType: pkg.NpmPackageJSONMetadataType, Metadata: pkg.NpmPackageJSONMetadata{ @@ -46,7 +46,7 @@ func TestParsePackageJSON(t *testing.T) { Type: pkg.NpmPkg, Language: pkg.JavaScript, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("ISC", source.NewLocation("test-fixtures/pkg-json/package-license-object.json")), + pkg.NewLicenseFromLocations("ISC", file.NewLocation("test-fixtures/pkg-json/package-license-object.json")), ), MetadataType: pkg.NpmPackageJSONMetadataType, Metadata: pkg.NpmPackageJSONMetadata{ @@ -67,8 +67,8 @@ func TestParsePackageJSON(t *testing.T) { PURL: "pkg:npm/npm@6.14.6", Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation("test-fixtures/pkg-json/package-license-objects.json")), - pkg.NewLicenseFromLocations("Apache-2.0", source.NewLocation("test-fixtures/pkg-json/package-license-objects.json")), + pkg.NewLicenseFromLocations("MIT", file.NewLocation("test-fixtures/pkg-json/package-license-objects.json")), + pkg.NewLicenseFromLocations("Apache-2.0", file.NewLocation("test-fixtures/pkg-json/package-license-objects.json")), ), Language: pkg.JavaScript, MetadataType: pkg.NpmPackageJSONMetadataType, @@ -128,7 +128,7 @@ func TestParsePackageJSON(t *testing.T) { PURL: "pkg:npm/npm@6.14.6", Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("Artistic-2.0", source.NewLocation("test-fixtures/pkg-json/package-nested-author.json")), + pkg.NewLicenseFromLocations("Artistic-2.0", file.NewLocation("test-fixtures/pkg-json/package-nested-author.json")), ), Language: pkg.JavaScript, MetadataType: pkg.NpmPackageJSONMetadataType, @@ -150,7 +150,7 @@ func TestParsePackageJSON(t *testing.T) { PURL: "pkg:npm/function-bind@1.1.1", Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation("test-fixtures/pkg-json/package-repo-string.json")), + pkg.NewLicenseFromLocations("MIT", file.NewLocation("test-fixtures/pkg-json/package-repo-string.json")), ), Language: pkg.JavaScript, MetadataType: pkg.NpmPackageJSONMetadataType, @@ -172,7 +172,7 @@ func TestParsePackageJSON(t *testing.T) { PURL: "pkg:npm/npm@6.14.6", Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("Artistic-2.0", source.NewLocation("test-fixtures/pkg-json/package-private.json")), + pkg.NewLicenseFromLocations("Artistic-2.0", file.NewLocation("test-fixtures/pkg-json/package-private.json")), ), Language: pkg.JavaScript, MetadataType: pkg.NpmPackageJSONMetadataType, @@ -191,7 +191,7 @@ func TestParsePackageJSON(t *testing.T) { for _, test := range tests { t.Run(test.Fixture, func(t *testing.T) { - test.ExpectedPkg.Locations.Add(source.NewLocation(test.Fixture)) + test.ExpectedPkg.Locations.Add(file.NewLocation(test.Fixture)) pkgtest.TestFileParser(t, test.Fixture, parsePackageJSON, []pkg.Package{test.ExpectedPkg}, nil) }) } diff --git a/syft/pkg/cataloger/javascript/parse_package_lock.go b/syft/pkg/cataloger/javascript/parse_package_lock.go index 7ca2669b38a..91663b1b250 100644 --- a/syft/pkg/cataloger/javascript/parse_package_lock.go +++ b/syft/pkg/cataloger/javascript/parse_package_lock.go @@ -9,9 +9,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -44,7 +44,7 @@ type lockPackage struct { type packageLockLicense []string // parsePackageLock parses a package-lock.json and returns the discovered JavaScript packages. -func parsePackageLock(resolver source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePackageLock(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { // in the case we find package-lock.json files in the node_modules directories, skip those // as the whole purpose of the lock file is for the specific dependencies of the root project if pathContainsNodeModulesDirectory(reader.AccessPath()) { diff --git a/syft/pkg/cataloger/javascript/parse_package_lock_test.go b/syft/pkg/cataloger/javascript/parse_package_lock_test.go index dec36fb5f3f..baa27b397b4 100644 --- a/syft/pkg/cataloger/javascript/parse_package_lock_test.go +++ b/syft/pkg/cataloger/javascript/parse_package_lock_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParsePackageLock(t *testing.T) { @@ -114,7 +114,7 @@ func TestParsePackageLock(t *testing.T) { } fixture := "test-fixtures/pkg-lock/package-lock.json" for i := range expectedPkgs { - expectedPkgs[i].Locations.Add(source.NewLocation(fixture)) + expectedPkgs[i].Locations.Add(file.NewLocation(fixture)) } pkgtest.TestFileParser(t, fixture, parsePackageLock, expectedPkgs, expectedRelationships) @@ -140,7 +140,7 @@ func TestParsePackageLockV2(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), MetadataType: "NpmPackageLockJsonMetadata", Metadata: pkg.NpmPackageLockJSONMetadata{Resolved: "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", Integrity: "sha1-XxnSuFqY6VWANvajysyIGUIPBc8="}, @@ -152,7 +152,7 @@ func TestParsePackageLockV2(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), MetadataType: "NpmPackageLockJsonMetadata", Metadata: pkg.NpmPackageLockJSONMetadata{Resolved: "https://registry.npmjs.org/@types/react/-/react-18.0.17.tgz", Integrity: "sha1-RYPZwyLWfv5LOak10iPtzHBQzPQ="}, @@ -164,7 +164,7 @@ func TestParsePackageLockV2(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), MetadataType: "NpmPackageLockJsonMetadata", Metadata: pkg.NpmPackageLockJSONMetadata{Resolved: "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", Integrity: "sha1-GmL4lSVyPd4kuhsBsJK/XfitTTk="}, @@ -176,14 +176,14 @@ func TestParsePackageLockV2(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), MetadataType: "NpmPackageLockJsonMetadata", Metadata: pkg.NpmPackageLockJSONMetadata{Resolved: "https://registry.npmjs.org/csstype/-/csstype-3.1.0.tgz", Integrity: "sha1-TdysNxjXh8+d8NG30VAzklyPKfI="}, }, } for i := range expectedPkgs { - expectedPkgs[i].Locations.Add(source.NewLocation(fixture)) + expectedPkgs[i].Locations.Add(file.NewLocation(fixture)) } pkgtest.TestFileParser(t, fixture, parsePackageLock, expectedPkgs, expectedRelationships) } @@ -239,7 +239,7 @@ func TestParsePackageLockV3(t *testing.T) { }, } for i := range expectedPkgs { - expectedPkgs[i].Locations.Add(source.NewLocation(fixture)) + expectedPkgs[i].Locations.Add(file.NewLocation(fixture)) } pkgtest.TestFileParser(t, fixture, parsePackageLock, expectedPkgs, expectedRelationships) } @@ -287,7 +287,7 @@ func TestParsePackageLockAlias(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("ISC", source.NewLocation(packageLockV2)), + pkg.NewLicenseFromLocations("ISC", file.NewLocation(packageLockV2)), ), MetadataType: "NpmPackageLockJsonMetadata", Metadata: pkg.NpmPackageLockJSONMetadata{}, @@ -302,7 +302,7 @@ func TestParsePackageLockAlias(t *testing.T) { } for i := range expected { - expected[i].Locations.Add(source.NewLocation(pl)) + expected[i].Locations.Add(file.NewLocation(pl)) } pkgtest.TestFileParser(t, pl, parsePackageLock, expected, expectedRelationships) } @@ -318,7 +318,7 @@ func TestParsePackageLockLicenseWithArray(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("ISC", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("ISC", file.NewLocation(fixture)), ), PURL: "pkg:npm/tmp@1.0.0", MetadataType: "NpmPackageLockJsonMetadata", @@ -331,8 +331,8 @@ func TestParsePackageLockLicenseWithArray(t *testing.T) { Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), - pkg.NewLicenseFromLocations("Apache2", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), + pkg.NewLicenseFromLocations("Apache2", file.NewLocation(fixture)), ), PURL: "pkg:npm/pause-stream@0.0.11", MetadataType: "NpmPackageLockJsonMetadata", @@ -344,7 +344,7 @@ func TestParsePackageLockLicenseWithArray(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), PURL: "pkg:npm/through@2.3.8", MetadataType: "NpmPackageLockJsonMetadata", @@ -352,7 +352,7 @@ func TestParsePackageLockLicenseWithArray(t *testing.T) { }, } for i := range expectedPkgs { - expectedPkgs[i].Locations.Add(source.NewLocation(fixture)) + expectedPkgs[i].Locations.Add(file.NewLocation(fixture)) } pkgtest.TestFileParser(t, fixture, parsePackageLock, expectedPkgs, expectedRelationships) } diff --git a/syft/pkg/cataloger/javascript/parse_pnpm_lock.go b/syft/pkg/cataloger/javascript/parse_pnpm_lock.go index 071334b466e..1b786752e67 100644 --- a/syft/pkg/cataloger/javascript/parse_pnpm_lock.go +++ b/syft/pkg/cataloger/javascript/parse_pnpm_lock.go @@ -3,25 +3,29 @@ package javascript import ( "fmt" "io" + "regexp" + "strconv" "strings" "gopkg.in/yaml.v3" + "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check var _ generic.Parser = parsePnpmLock type pnpmLockYaml struct { - Dependencies map[string]string `json:"dependencies"` - Packages map[string]interface{} `json:"packages"` + Version string `json:"lockfileVersion" yaml:"lockfileVersion"` + Dependencies map[string]interface{} `json:"dependencies" yaml:"dependencies"` + Packages map[string]interface{} `json:"packages" yaml:"packages"` } -func parsePnpmLock(resolver source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePnpmLock(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { bytes, err := io.ReadAll(reader) if err != nil { return nil, nil, fmt.Errorf("failed to load pnpm-lock.yaml file: %w", err) @@ -34,19 +38,55 @@ func parsePnpmLock(resolver source.FileResolver, _ *generic.Environment, reader return nil, nil, fmt.Errorf("failed to parse pnpm-lock.yaml file: %w", err) } - for name, version := range lockFile.Dependencies { + lockVersion, _ := strconv.ParseFloat(lockFile.Version, 64) + + for name, info := range lockFile.Dependencies { + version := "" + + switch info := info.(type) { + case string: + version = info + case map[string]interface{}: + v, ok := info["version"] + if !ok { + break + } + ver, ok := v.(string) + if ok { + version = parseVersion(ver) + } + default: + log.Tracef("unsupported pnpm dependency type: %+v", info) + continue + } + + if hasPkg(pkgs, name, version) { + continue + } + pkgs = append(pkgs, newPnpmPackage(resolver, reader.Location, name, version)) } + packageNameRegex := regexp.MustCompile(`^/?([^(]*)(?:\(.*\))*$`) + splitChar := "/" + if lockVersion >= 6.0 { + splitChar = "@" + } + // parse packages from packages section of pnpm-lock.yaml for nameVersion := range lockFile.Packages { - nameVersionSplit := strings.Split(strings.TrimPrefix(nameVersion, "/"), "/") + nameVersion = packageNameRegex.ReplaceAllString(nameVersion, "$1") + nameVersionSplit := strings.Split(strings.TrimPrefix(nameVersion, "/"), splitChar) // last element in split array is version version := nameVersionSplit[len(nameVersionSplit)-1] // construct name from all array items other than last item (version) - name := strings.Join(nameVersionSplit[:len(nameVersionSplit)-1], "/") + name := strings.Join(nameVersionSplit[:len(nameVersionSplit)-1], splitChar) + + if hasPkg(pkgs, name, version) { + continue + } pkgs = append(pkgs, newPnpmPackage(resolver, reader.Location, name, version)) } @@ -55,3 +95,16 @@ func parsePnpmLock(resolver source.FileResolver, _ *generic.Environment, reader return pkgs, nil, nil } + +func hasPkg(pkgs []pkg.Package, name, version string) bool { + for _, p := range pkgs { + if p.Name == name && p.Version == version { + return true + } + } + return false +} + +func parseVersion(version string) string { + return strings.SplitN(version, "(", 2)[0] +} diff --git a/syft/pkg/cataloger/javascript/parse_pnpm_lock_test.go b/syft/pkg/cataloger/javascript/parse_pnpm_lock_test.go index 275cc0439a6..7c0ed1c4db8 100644 --- a/syft/pkg/cataloger/javascript/parse_pnpm_lock_test.go +++ b/syft/pkg/cataloger/javascript/parse_pnpm_lock_test.go @@ -4,16 +4,16 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParsePnpmLock(t *testing.T) { var expectedRelationships []artifact.Relationship fixture := "test-fixtures/pnpm/pnpm-lock.yaml" - locationSet := source.NewLocationSet(source.NewLocation(fixture)) + locationSet := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { @@ -52,3 +52,95 @@ func TestParsePnpmLock(t *testing.T) { pkgtest.TestFileParser(t, fixture, parsePnpmLock, expectedPkgs, expectedRelationships) } + +func TestParsePnpmV6Lock(t *testing.T) { + var expectedRelationships []artifact.Relationship + fixture := "test-fixtures/pnpm-v6/pnpm-lock.yaml" + + locationSet := file.NewLocationSet(file.NewLocation(fixture)) + + expectedPkgs := []pkg.Package{ + { + Name: "@testing-library/jest-dom", + Version: "5.16.5", + PURL: "pkg:npm/%40testing-library/jest-dom@5.16.5", + Locations: locationSet, + Language: pkg.JavaScript, + Type: pkg.NpmPkg, + }, + { + Name: "@testing-library/react", + Version: "13.4.0", + PURL: "pkg:npm/%40testing-library/react@13.4.0", + Locations: locationSet, + Language: pkg.JavaScript, + Type: pkg.NpmPkg, + }, + { + Name: "@testing-library/user-event", + Version: "13.5.0", + PURL: "pkg:npm/%40testing-library/user-event@13.5.0", + Locations: locationSet, + Language: pkg.JavaScript, + Type: pkg.NpmPkg, + }, + { + Name: "react", + Version: "18.2.0", + PURL: "pkg:npm/react@18.2.0", + Locations: locationSet, + Language: pkg.JavaScript, + Type: pkg.NpmPkg, + }, + { + Name: "react-dom", + Version: "18.2.0", + PURL: "pkg:npm/react-dom@18.2.0", + Locations: locationSet, + Language: pkg.JavaScript, + Type: pkg.NpmPkg, + }, + { + Name: "web-vitals", + Version: "2.1.4", + PURL: "pkg:npm/web-vitals@2.1.4", + Locations: locationSet, + Language: pkg.JavaScript, + Type: pkg.NpmPkg, + }, + { + Name: "@babel/core", + Version: "7.21.4", + PURL: "pkg:npm/%40babel/core@7.21.4", + Locations: locationSet, + Language: pkg.JavaScript, + Type: pkg.NpmPkg, + }, + { + Name: "@types/eslint", + Version: "8.37.0", + PURL: "pkg:npm/%40types/eslint@8.37.0", + Locations: locationSet, + Language: pkg.JavaScript, + Type: pkg.NpmPkg, + }, + { + Name: "read-cache", + Version: "1.0.0", + PURL: "pkg:npm/read-cache@1.0.0", + Locations: locationSet, + Language: pkg.JavaScript, + Type: pkg.NpmPkg, + }, + { + Name: "schema-utils", + Version: "3.1.2", + PURL: "pkg:npm/schema-utils@3.1.2", + Locations: locationSet, + Language: pkg.JavaScript, + Type: pkg.NpmPkg, + }, + } + + pkgtest.TestFileParser(t, fixture, parsePnpmLock, expectedPkgs, expectedRelationships) +} diff --git a/syft/pkg/cataloger/javascript/parse_yarn_lock.go b/syft/pkg/cataloger/javascript/parse_yarn_lock.go index 048f8f05c76..a90392fe2c1 100644 --- a/syft/pkg/cataloger/javascript/parse_yarn_lock.go +++ b/syft/pkg/cataloger/javascript/parse_yarn_lock.go @@ -7,9 +7,9 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -42,7 +42,7 @@ const ( noVersion = "" ) -func parseYarnLock(resolver source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseYarnLock(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { // in the case we find yarn.lock files in the node_modules directories, skip those // as the whole purpose of the lock file is for the specific dependencies of the project if pathContainsNodeModulesDirectory(reader.AccessPath()) { diff --git a/syft/pkg/cataloger/javascript/parse_yarn_lock_test.go b/syft/pkg/cataloger/javascript/parse_yarn_lock_test.go index ded8850b1f4..cb2dacc407c 100644 --- a/syft/pkg/cataloger/javascript/parse_yarn_lock_test.go +++ b/syft/pkg/cataloger/javascript/parse_yarn_lock_test.go @@ -6,15 +6,15 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseYarnBerry(t *testing.T) { var expectedRelationships []artifact.Relationship fixture := "test-fixtures/yarn-berry/yarn.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { @@ -98,7 +98,7 @@ func TestParseYarnBerry(t *testing.T) { func TestParseYarnLock(t *testing.T) { var expectedRelationships []artifact.Relationship fixture := "test-fixtures/yarn/yarn.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { diff --git a/syft/pkg/cataloger/javascript/test-fixtures/pnpm-v6/pnpm-lock.yaml b/syft/pkg/cataloger/javascript/test-fixtures/pnpm-v6/pnpm-lock.yaml new file mode 100644 index 00000000000..5098519b66e --- /dev/null +++ b/syft/pkg/cataloger/javascript/test-fixtures/pnpm-v6/pnpm-lock.yaml @@ -0,0 +1,127 @@ +lockfileVersion: '6.0' + +dependencies: + '@testing-library/jest-dom': + specifier: ^5.16.5 + version: 5.16.5 + '@testing-library/react': + specifier: ^13.4.0 + version: 13.4.0(react-dom@18.2.0)(react@18.2.0) + '@testing-library/user-event': + specifier: ^13.5.0 + version: 13.5.0(@testing-library/dom@9.2.0) + react: + specifier: ^18.2.0 + version: 18.2.0 + react-dom: + specifier: ^18.2.0 + version: 18.2.0(react@18.2.0) + web-vitals: + specifier: ^2.1.4 + version: 2.1.4 + +packages: + /@babel/core@7.21.4: + resolution: {integrity: sha512-qt/YV149Jman/6AfmlxJ04LMIu8bMoyl3RB91yTFrxQmgbrSvQMy7cI8Q62FHx1t8wJ8B5fu0UDoLwHAhUo1QA==} + engines: {node: '>=6.9.0'} + dependencies: + '@ampproject/remapping': 2.2.1 + '@babel/code-frame': 7.21.4 + '@babel/generator': 7.21.4 + '@babel/helper-compilation-targets': 7.21.4(@babel/core@7.21.4) + '@babel/helper-module-transforms': 7.21.2 + '@babel/helpers': 7.21.0 + '@babel/parser': 7.21.4 + '@babel/template': 7.20.7 + '@babel/traverse': 7.21.4 + '@babel/types': 7.21.4 + convert-source-map: 1.9.0 + debug: 4.3.4 + gensync: 1.0.0-beta.2 + json5: 2.2.3 + semver: 6.3.0 + transitivePeerDependencies: + - supports-color + dev: false + + /@testing-library/jest-dom@5.16.5: + resolution: {integrity: sha512-N5ixQ2qKpi5OLYfwQmUb/5mSV9LneAcaUfp32pn4yCnpb8r/Yz0pXFPck21dIicKmi+ta5WRAknkZCfA8refMA==} + engines: {node: '>=8', npm: '>=6', yarn: '>=1'} + dependencies: + '@adobe/css-tools': 4.2.0 + '@babel/runtime': 7.21.0 + '@types/testing-library__jest-dom': 5.14.5 + aria-query: 5.1.3 + chalk: 3.0.0 + css.escape: 1.5.1 + dom-accessibility-api: 0.5.16 + lodash: 4.17.21 + redent: 3.0.0 + dev: false + + /@testing-library/react@13.4.0(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-sXOGON+WNTh3MLE9rve97ftaZukN3oNf2KjDy7YTx6hcTO2uuLHuCGynMDhFwGw/jYf4OJ2Qk0i4i79qMNNkyw==} + engines: {node: '>=12'} + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + dependencies: + '@babel/runtime': 7.21.0 + '@testing-library/dom': 8.20.0 + '@types/react-dom': 18.2.1 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + dev: false + + /@testing-library/user-event@13.5.0(@testing-library/dom@9.2.0): + resolution: {integrity: sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg==} + engines: {node: '>=10', npm: '>=6'} + peerDependencies: + '@testing-library/dom': '>=7.21.4' + dependencies: + '@babel/runtime': 7.21.0 + '@testing-library/dom': 9.2.0 + dev: false + + /@types/eslint@8.37.0: + resolution: {integrity: sha512-Piet7dG2JBuDIfohBngQ3rCt7MgO9xCO4xIMKxBThCq5PNRB91IjlJ10eJVwfoNtvTErmxLzwBZ7rHZtbOMmFQ==} + dependencies: + '@types/estree': 1.0.1 + '@types/json-schema': 7.0.11 + dev: false + + /react-dom@18.2.0(react@18.2.0): + resolution: {integrity: sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==} + peerDependencies: + react: ^18.2.0 + dependencies: + loose-envify: 1.4.0 + react: 18.2.0 + scheduler: 0.23.0 + dev: false + + /react@18.2.0: + resolution: {integrity: sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==} + engines: {node: '>=0.10.0'} + dependencies: + loose-envify: 1.4.0 + dev: false + + /read-cache@1.0.0: + resolution: {integrity: sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==} + dependencies: + pify: 2.3.0 + dev: false + + /schema-utils@3.1.2: + resolution: {integrity: sha512-pvjEHOgWc9OWA/f/DE3ohBWTD6EleVLf7iFUkoSwAxttdBhB9QUebQgxER2kWueOvRJXPHNnyrvvh9eZINB8Eg==} + engines: {node: '>= 10.13.0'} + dependencies: + '@types/json-schema': 7.0.11 + ajv: 6.12.6 + ajv-keywords: 3.5.2(ajv@6.12.6) + dev: false + + /web-vitals@2.1.4: + resolution: {integrity: sha512-sVWcwhU5mX6crfI5Vd2dC4qchyTqxV8URinzt25XqVh+bHEPGH4C3NPrNionCP7Obx59wrYEbNlw4Z8sjALzZg==} + dev: false diff --git a/syft/pkg/cataloger/kernel/cataloger.go b/syft/pkg/cataloger/kernel/cataloger.go index 492c2043364..67c5bb5b727 100644 --- a/syft/pkg/cataloger/kernel/cataloger.go +++ b/syft/pkg/cataloger/kernel/cataloger.go @@ -8,9 +8,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ pkg.Cataloger = (*LinuxKernelCataloger)(nil) @@ -53,7 +53,7 @@ func (l LinuxKernelCataloger) Name() string { return "linux-kernel-cataloger" } -func (l LinuxKernelCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (l LinuxKernelCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { var allPackages []pkg.Package var allRelationships []artifact.Relationship var errs error diff --git a/syft/pkg/cataloger/kernel/cataloger_test.go b/syft/pkg/cataloger/kernel/cataloger_test.go index b223acf1a49..0557c4bd865 100644 --- a/syft/pkg/cataloger/kernel/cataloger_test.go +++ b/syft/pkg/cataloger/kernel/cataloger_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func Test_KernelCataloger(t *testing.T) { @@ -14,8 +14,8 @@ func Test_KernelCataloger(t *testing.T) { Name: "linux-kernel", Version: "6.0.7-301.fc37.x86_64", FoundBy: "linux-kernel-cataloger", - Locations: source.NewLocationSet( - source.NewVirtualLocation( + Locations: file.NewLocationSet( + file.NewVirtualLocation( "/lib/modules/6.0.7-301.fc37.x86_64/vmlinuz", "/lib/modules/6.0.7-301.fc37.x86_64/vmlinuz", ), @@ -42,14 +42,14 @@ func Test_KernelCataloger(t *testing.T) { Name: "ttynull", Version: "", FoundBy: "linux-kernel-cataloger", - Locations: source.NewLocationSet( - source.NewVirtualLocation("/lib/modules/6.0.7-301.fc37.x86_64/kernel/drivers/tty/ttynull.ko", + Locations: file.NewLocationSet( + file.NewVirtualLocation("/lib/modules/6.0.7-301.fc37.x86_64/kernel/drivers/tty/ttynull.ko", "/lib/modules/6.0.7-301.fc37.x86_64/kernel/drivers/tty/ttynull.ko", ), ), Licenses: pkg.NewLicenseSet( pkg.NewLicenseFromLocations("GPL v2", - source.NewVirtualLocation( + file.NewVirtualLocation( "/lib/modules/6.0.7-301.fc37.x86_64/kernel/drivers/tty/ttynull.ko", "/lib/modules/6.0.7-301.fc37.x86_64/kernel/drivers/tty/ttynull.ko", ), diff --git a/syft/pkg/cataloger/kernel/package.go b/syft/pkg/cataloger/kernel/package.go index 3ea60668827..92dcb5ef14a 100644 --- a/syft/pkg/cataloger/kernel/package.go +++ b/syft/pkg/cataloger/kernel/package.go @@ -4,17 +4,17 @@ import ( "strings" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) const linuxKernelPackageName = "linux-kernel" -func newLinuxKernelPackage(metadata pkg.LinuxKernelMetadata, archiveLocation source.Location) pkg.Package { +func newLinuxKernelPackage(metadata pkg.LinuxKernelMetadata, archiveLocation file.Location) pkg.Package { p := pkg.Package{ Name: linuxKernelPackageName, Version: metadata.Version, - Locations: source.NewLocationSet(archiveLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(archiveLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), PURL: packageURL(linuxKernelPackageName, metadata.Version), Type: pkg.LinuxKernelPkg, MetadataType: pkg.LinuxKernelMetadataType, @@ -26,11 +26,11 @@ func newLinuxKernelPackage(metadata pkg.LinuxKernelMetadata, archiveLocation sou return p } -func newLinuxKernelModulePackage(metadata pkg.LinuxKernelModuleMetadata, kmLocation source.Location) pkg.Package { +func newLinuxKernelModulePackage(metadata pkg.LinuxKernelModuleMetadata, kmLocation file.Location) pkg.Package { p := pkg.Package{ Name: metadata.Name, Version: metadata.Version, - Locations: source.NewLocationSet(kmLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(kmLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(kmLocation, metadata.License)...), PURL: packageURL(metadata.Name, metadata.Version), Type: pkg.LinuxKernelModulePkg, diff --git a/syft/pkg/cataloger/kernel/parse_linux_kernel_file.go b/syft/pkg/cataloger/kernel/parse_linux_kernel_file.go index 0be32c5b658..54c26eb4297 100644 --- a/syft/pkg/cataloger/kernel/parse_linux_kernel_file.go +++ b/syft/pkg/cataloger/kernel/parse_linux_kernel_file.go @@ -9,15 +9,15 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader" - "github.com/anchore/syft/syft/source" ) const linuxKernelMagicName = "Linux kernel" -func parseLinuxKernelFile(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseLinuxKernelFile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { unionReader, err := unionreader.GetUnionReader(reader) if err != nil { return nil, nil, fmt.Errorf("unable to get union reader for file: %w", err) diff --git a/syft/pkg/cataloger/kernel/parse_linux_kernel_module_file.go b/syft/pkg/cataloger/kernel/parse_linux_kernel_module_file.go index 3adeb5632f7..34974f6272c 100644 --- a/syft/pkg/cataloger/kernel/parse_linux_kernel_module_file.go +++ b/syft/pkg/cataloger/kernel/parse_linux_kernel_module_file.go @@ -6,15 +6,15 @@ import ( "strings" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader" - "github.com/anchore/syft/syft/source" ) const modinfoName = ".modinfo" -func parseLinuxKernelModuleFile(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseLinuxKernelModuleFile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { unionReader, err := unionreader.GetUnionReader(reader) if err != nil { return nil, nil, fmt.Errorf("unable to get union reader for file: %w", err) diff --git a/syft/pkg/cataloger/nix/cataloger.go b/syft/pkg/cataloger/nix/cataloger.go index b4b440c2687..5d920f2300c 100644 --- a/syft/pkg/cataloger/nix/cataloger.go +++ b/syft/pkg/cataloger/nix/cataloger.go @@ -7,8 +7,8 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) const ( @@ -27,10 +27,10 @@ func (c *StoreCataloger) Name() string { return catalogerName } -func (c *StoreCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (c *StoreCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { // we want to search for only directories, which isn't possible via the stereoscope API, so we need to apply the glob manually on all returned paths var pkgs []pkg.Package - var filesByPath = make(map[string]*source.LocationSet) + var filesByPath = make(map[string]*file.LocationSet) for location := range resolver.AllLocations() { matchesStorePath, err := doublestar.Match(nixStoreGlob, location.RealPath) if err != nil { @@ -40,7 +40,7 @@ func (c *StoreCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, [ parentStorePath := findParentNixStorePath(location.RealPath) if parentStorePath != "" { if _, ok := filesByPath[parentStorePath]; !ok { - s := source.NewLocationSet() + s := file.NewLocationSet() filesByPath[parentStorePath] = &s } filesByPath[parentStorePath].Add(location) @@ -80,7 +80,7 @@ func (c *StoreCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, [ return pkgs, nil, nil } -func appendFiles(p *pkg.Package, location ...source.Location) { +func appendFiles(p *pkg.Package, location ...file.Location) { metadata, ok := p.Metadata.(pkg.NixStoreMetadata) if !ok { log.WithFields("package", p.Name).Warn("nix package metadata missing") diff --git a/syft/pkg/cataloger/nix/cataloger_test.go b/syft/pkg/cataloger/nix/cataloger_test.go index 10b544fc056..f43babde93b 100644 --- a/syft/pkg/cataloger/nix/cataloger_test.go +++ b/syft/pkg/cataloger/nix/cataloger_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestCataloger_Catalog(t *testing.T) { @@ -23,7 +23,7 @@ func TestCataloger_Catalog(t *testing.T) { Name: "glibc", Version: "2.34-210", PURL: "pkg:nix/glibc@2.34-210?output=bin&outputhash=h0cnbmfcn93xm5dg2x27ixhag1cwndga", - Locations: source.NewLocationSet(source.NewLocation("nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin")), + Locations: file.NewLocationSet(file.NewLocation("nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin")), FoundBy: catalogerName, Type: pkg.NixPkg, MetadataType: pkg.NixStoreMetadataType, diff --git a/syft/pkg/cataloger/nix/package.go b/syft/pkg/cataloger/nix/package.go index 6e473d6fdcd..090dfe1379c 100644 --- a/syft/pkg/cataloger/nix/package.go +++ b/syft/pkg/cataloger/nix/package.go @@ -2,16 +2,16 @@ package nix import ( "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newNixStorePackage(storePath nixStorePath, locations ...source.Location) pkg.Package { +func newNixStorePackage(storePath nixStorePath, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: storePath.name, Version: storePath.version, FoundBy: catalogerName, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), Type: pkg.NixPkg, PURL: packageURL(storePath), MetadataType: pkg.NixStoreMetadataType, diff --git a/syft/pkg/cataloger/php/package.go b/syft/pkg/cataloger/php/package.go index 507fd26dab2..7255d58d53e 100644 --- a/syft/pkg/cataloger/php/package.go +++ b/syft/pkg/cataloger/php/package.go @@ -4,15 +4,15 @@ import ( "strings" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newComposerLockPackage(m parsedData, indexLocation source.Location) pkg.Package { +func newComposerLockPackage(m parsedData, indexLocation file.Location) pkg.Package { p := pkg.Package{ Name: m.Name, Version: m.Version, - Locations: source.NewLocationSet(indexLocation), + Locations: file.NewLocationSet(indexLocation), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(indexLocation, m.License...)...), PURL: packageURL(m), Language: pkg.PHP, diff --git a/syft/pkg/cataloger/php/parse_composer_lock.go b/syft/pkg/cataloger/php/parse_composer_lock.go index 248b7519eb8..836befe138f 100644 --- a/syft/pkg/cataloger/php/parse_composer_lock.go +++ b/syft/pkg/cataloger/php/parse_composer_lock.go @@ -7,9 +7,9 @@ import ( "io" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseComposerLock @@ -25,7 +25,7 @@ type composerLock struct { } // parseComposerLock is a parser function for Composer.lock contents, returning "Default" php packages discovered. -func parseComposerLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseComposerLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { pkgs := make([]pkg.Package, 0) dec := json.NewDecoder(reader) diff --git a/syft/pkg/cataloger/php/parse_composer_lock_test.go b/syft/pkg/cataloger/php/parse_composer_lock_test.go index ad7814a97d5..f1038a5d4c3 100644 --- a/syft/pkg/cataloger/php/parse_composer_lock_test.go +++ b/syft/pkg/cataloger/php/parse_composer_lock_test.go @@ -4,15 +4,15 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseComposerFileLock(t *testing.T) { var expectedRelationships []artifact.Relationship fixture := "test-fixtures/composer.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { Name: "adoy/fastcgi-client", @@ -20,7 +20,7 @@ func TestParseComposerFileLock(t *testing.T) { PURL: "pkg:composer/adoy/fastcgi-client@1.0.2", Locations: locations, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), Language: pkg.PHP, Type: pkg.PhpComposerPkg, @@ -61,7 +61,7 @@ func TestParseComposerFileLock(t *testing.T) { PURL: "pkg:composer/alcaeus/mongo-php-adapter@1.1.11", Language: pkg.PHP, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), Type: pkg.PhpComposerPkg, MetadataType: pkg.PhpComposerJSONMetadataType, diff --git a/syft/pkg/cataloger/php/parse_installed_json.go b/syft/pkg/cataloger/php/parse_installed_json.go index 8c1213200f4..060e01903e9 100644 --- a/syft/pkg/cataloger/php/parse_installed_json.go +++ b/syft/pkg/cataloger/php/parse_installed_json.go @@ -7,9 +7,9 @@ import ( "io" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseComposerLock @@ -41,7 +41,7 @@ func (w *installedJSONComposerV2) UnmarshalJSON(data []byte) error { } // parseInstalledJSON is a parser function for Composer.lock contents, returning "Default" php packages discovered. -func parseInstalledJSON(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseInstalledJSON(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package dec := json.NewDecoder(reader) diff --git a/syft/pkg/cataloger/php/parse_installed_json_test.go b/syft/pkg/cataloger/php/parse_installed_json_test.go index dde72021eaf..984856ed497 100644 --- a/syft/pkg/cataloger/php/parse_installed_json_test.go +++ b/syft/pkg/cataloger/php/parse_installed_json_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseInstalledJsonComposerV1(t *testing.T) { @@ -130,7 +130,7 @@ func TestParseInstalledJsonComposerV1(t *testing.T) { for _, fixture := range fixtures { t.Run(fixture, func(t *testing.T) { - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) for i := range expectedPkgs { expectedPkgs[i].Locations = locations locationLicenses := pkg.NewLicenseSet() diff --git a/syft/pkg/cataloger/portage/cataloger_test.go b/syft/pkg/cataloger/portage/cataloger_test.go index b2ff5f26d17..c556c940a14 100644 --- a/syft/pkg/cataloger/portage/cataloger_test.go +++ b/syft/pkg/cataloger/portage/cataloger_test.go @@ -7,20 +7,19 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestPortageCataloger(t *testing.T) { - expectedLicenseLocation := source.NewLocation("var/db/pkg/app-containers/skopeo-1.5.1/LICENSE") + expectedLicenseLocation := file.NewLocation("var/db/pkg/app-containers/skopeo-1.5.1/LICENSE") expectedPkgs := []pkg.Package{ { Name: "app-containers/skopeo", Version: "1.5.1", FoundBy: "portage-cataloger", PURL: "pkg:ebuild/app-containers/skopeo@1.5.1", - Locations: source.NewLocationSet( - source.NewLocation("var/db/pkg/app-containers/skopeo-1.5.1/CONTENTS"), - source.NewLocation("var/db/pkg/app-containers/skopeo-1.5.1/SIZE"), + Locations: file.NewLocationSet( + file.NewLocation("var/db/pkg/app-containers/skopeo-1.5.1/CONTENTS"), + file.NewLocation("var/db/pkg/app-containers/skopeo-1.5.1/SIZE"), expectedLicenseLocation, ), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(expectedLicenseLocation, "Apache-2.0", "BSD", "BSD-2", "CC-BY-SA-4.0", "ISC", "MIT")...), diff --git a/syft/pkg/cataloger/portage/parse_portage_contents.go b/syft/pkg/cataloger/portage/parse_portage_contents.go index ac93c6ea05e..941cce394bb 100644 --- a/syft/pkg/cataloger/portage/parse_portage_contents.go +++ b/syft/pkg/cataloger/portage/parse_portage_contents.go @@ -15,7 +15,6 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var ( @@ -23,7 +22,7 @@ var ( _ generic.Parser = parsePortageContents ) -func parsePortageContents(resolver source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePortageContents(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { cpvMatch := cpvRe.FindStringSubmatch(reader.Location.RealPath) if cpvMatch == nil { return nil, nil, fmt.Errorf("failed to match package and version in %s", reader.Location.RealPath) @@ -39,7 +38,7 @@ func parsePortageContents(resolver source.FileResolver, _ *generic.Environment, Name: name, Version: version, PURL: packageURL(name, version), - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), ), Type: pkg.PortagePkg, @@ -58,7 +57,7 @@ func parsePortageContents(resolver source.FileResolver, _ *generic.Environment, return []pkg.Package{p}, nil, nil } -func addFiles(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) { +func addFiles(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) { contentsReader, err := resolver.FileContentsByLocation(dbLocation) if err != nil { log.WithFields("path", dbLocation.RealPath).Warnf("failed to fetch portage contents (package=%s): %+v", p.Name, err) @@ -91,7 +90,7 @@ func addFiles(resolver source.FileResolver, dbLocation source.Location, p *pkg.P p.Locations.Add(dbLocation) } -func addLicenses(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) { +func addLicenses(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) { parentPath := filepath.Dir(dbLocation.RealPath) location := resolver.RelativeFileByPath(dbLocation, path.Join(parentPath, "LICENSE")) @@ -121,7 +120,7 @@ func addLicenses(resolver source.FileResolver, dbLocation source.Location, p *pk p.Locations.Add(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation)) } -func addSize(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) { +func addSize(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) { parentPath := filepath.Dir(dbLocation.RealPath) location := resolver.RelativeFileByPath(dbLocation, path.Join(parentPath, "SIZE")) diff --git a/syft/pkg/cataloger/python/cataloger_test.go b/syft/pkg/cataloger/python/cataloger_test.go index 10522f21524..da15f299313 100644 --- a/syft/pkg/cataloger/python/cataloger_test.go +++ b/syft/pkg/cataloger/python/cataloger_test.go @@ -5,9 +5,9 @@ import ( "github.com/stretchr/testify/require" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func Test_PackageCataloger(t *testing.T) { @@ -46,7 +46,7 @@ func Test_PackageCataloger(t *testing.T) { Type: pkg.PythonPkg, Language: pkg.Python, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("Apache 2.0", source.NewLocation("test-fixtures/egg-info/PKG-INFO")), + pkg.NewLicenseFromLocations("Apache 2.0", file.NewLocation("test-fixtures/egg-info/PKG-INFO")), ), FoundBy: "python-package-cataloger", MetadataType: pkg.PythonPackageMetadataType, @@ -84,7 +84,7 @@ func Test_PackageCataloger(t *testing.T) { Type: pkg.PythonPkg, Language: pkg.Python, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("BSD License", source.NewLocation("test-fixtures/dist-info/METADATA")), + pkg.NewLicenseFromLocations("BSD License", file.NewLocation("test-fixtures/dist-info/METADATA")), ), FoundBy: "python-package-cataloger", MetadataType: pkg.PythonPackageMetadataType, @@ -122,7 +122,7 @@ func Test_PackageCataloger(t *testing.T) { Type: pkg.PythonPkg, Language: pkg.Python, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("BSD License", source.NewLocation("test-fixtures/malformed-record/dist-info/METADATA")), + pkg.NewLicenseFromLocations("BSD License", file.NewLocation("test-fixtures/malformed-record/dist-info/METADATA")), ), FoundBy: "python-package-cataloger", MetadataType: pkg.PythonPackageMetadataType, @@ -154,7 +154,7 @@ func Test_PackageCataloger(t *testing.T) { Type: pkg.PythonPkg, Language: pkg.Python, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("BSD License", source.NewLocation("test-fixtures/partial.dist-info/METADATA")), + pkg.NewLicenseFromLocations("BSD License", file.NewLocation("test-fixtures/partial.dist-info/METADATA")), ), FoundBy: "python-package-cataloger", MetadataType: pkg.PythonPackageMetadataType, @@ -178,7 +178,7 @@ func Test_PackageCataloger(t *testing.T) { Type: pkg.PythonPkg, Language: pkg.Python, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("Apache 2.0", source.NewLocation("test-fixtures/test.egg-info")), + pkg.NewLicenseFromLocations("Apache 2.0", file.NewLocation("test-fixtures/test.egg-info")), ), FoundBy: "python-package-cataloger", MetadataType: pkg.PythonPackageMetadataType, @@ -196,12 +196,12 @@ func Test_PackageCataloger(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - resolver := source.NewMockResolverForPaths(test.fixtures...) + resolver := file.NewMockResolverForPaths(test.fixtures...) locations, err := resolver.FilesByPath(test.fixtures...) require.NoError(t, err) - test.expectedPackage.Locations = source.NewLocationSet(locations...) + test.expectedPackage.Locations = file.NewLocationSet(locations...) pkgtest.NewCatalogTester(). WithResolver(resolver). @@ -225,7 +225,7 @@ func Test_PackageCataloger_IgnorePackage(t *testing.T) { for _, test := range tests { t.Run(test.MetadataFixture, func(t *testing.T) { - resolver := source.NewMockResolverForPaths(test.MetadataFixture) + resolver := file.NewMockResolverForPaths(test.MetadataFixture) actual, _, err := NewPythonPackageCataloger().Catalog(resolver) require.NoError(t, err) diff --git a/syft/pkg/cataloger/python/package.go b/syft/pkg/cataloger/python/package.go index 68f7f1dccfe..e20f878601f 100644 --- a/syft/pkg/cataloger/python/package.go +++ b/syft/pkg/cataloger/python/package.go @@ -4,15 +4,15 @@ import ( "fmt" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackageForIndex(name, version string, locations ...source.Location) pkg.Package { +func newPackageForIndex(name, version string, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(name, version, nil), Language: pkg.Python, Type: pkg.PythonPkg, @@ -23,11 +23,11 @@ func newPackageForIndex(name, version string, locations ...source.Location) pkg. return p } -func newPackageForIndexWithMetadata(name, version string, metadata pkg.PythonPipfileLockMetadata, locations ...source.Location) pkg.Package { +func newPackageForIndexWithMetadata(name, version string, metadata pkg.PythonPipfileLockMetadata, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(name, version, nil), Language: pkg.Python, Type: pkg.PythonPkg, @@ -40,11 +40,11 @@ func newPackageForIndexWithMetadata(name, version string, metadata pkg.PythonPip return p } -func newPackageForRequirementsWithMetadata(name, version string, metadata pkg.PythonRequirementsMetadata, locations ...source.Location) pkg.Package { +func newPackageForRequirementsWithMetadata(name, version string, metadata pkg.PythonRequirementsMetadata, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(name, version, nil), Language: pkg.Python, Type: pkg.PythonPkg, @@ -57,12 +57,12 @@ func newPackageForRequirementsWithMetadata(name, version string, metadata pkg.Py return p } -func newPackageForPackage(m parsedData, sources ...source.Location) pkg.Package { +func newPackageForPackage(m parsedData, sources ...file.Location) pkg.Package { p := pkg.Package{ Name: m.Name, Version: m.Version, PURL: packageURL(m.Name, m.Version, &m.PythonPackageMetadata), - Locations: source.NewLocationSet(sources...), + Locations: file.NewLocationSet(sources...), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(m.LicenseLocation, m.Licenses)...), Language: pkg.Python, Type: pkg.PythonPkg, diff --git a/syft/pkg/cataloger/python/parse_pipfile_lock.go b/syft/pkg/cataloger/python/parse_pipfile_lock.go index c957405a647..77c8cd4fe8a 100644 --- a/syft/pkg/cataloger/python/parse_pipfile_lock.go +++ b/syft/pkg/cataloger/python/parse_pipfile_lock.go @@ -8,9 +8,9 @@ import ( "strings" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) type pipfileLock struct { @@ -41,7 +41,7 @@ type Dependency struct { var _ generic.Parser = parsePipfileLock // parsePipfileLock is a parser function for Pipfile.lock contents, returning "Default" python packages discovered. -func parsePipfileLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePipfileLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { pkgs := make([]pkg.Package, 0) dec := json.NewDecoder(reader) diff --git a/syft/pkg/cataloger/python/parse_pipfile_lock_test.go b/syft/pkg/cataloger/python/parse_pipfile_lock_test.go index 15b327845db..783c7dfd0ec 100644 --- a/syft/pkg/cataloger/python/parse_pipfile_lock_test.go +++ b/syft/pkg/cataloger/python/parse_pipfile_lock_test.go @@ -4,15 +4,15 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParsePipFileLock(t *testing.T) { fixture := "test-fixtures/pipfile-lock/Pipfile.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { Name: "aio-pika", diff --git a/syft/pkg/cataloger/python/parse_poetry_lock.go b/syft/pkg/cataloger/python/parse_poetry_lock.go index 0e29de0178c..4bc929cbee4 100644 --- a/syft/pkg/cataloger/python/parse_poetry_lock.go +++ b/syft/pkg/cataloger/python/parse_poetry_lock.go @@ -6,9 +6,9 @@ import ( "github.com/pelletier/go-toml" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -25,7 +25,7 @@ type poetryMetadata struct { } // parsePoetryLock is a parser function for poetry.lock contents, returning all python packages discovered. -func parsePoetryLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePoetryLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { tree, err := toml.LoadReader(reader) if err != nil { return nil, nil, fmt.Errorf("unable to load poetry.lock for parsing: %w", err) diff --git a/syft/pkg/cataloger/python/parse_poetry_lock_test.go b/syft/pkg/cataloger/python/parse_poetry_lock_test.go index 0a3478e1bdf..fd6d1bdc805 100644 --- a/syft/pkg/cataloger/python/parse_poetry_lock_test.go +++ b/syft/pkg/cataloger/python/parse_poetry_lock_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParsePoetryLock(t *testing.T) { fixture := "test-fixtures/poetry/poetry.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { Name: "added-value", diff --git a/syft/pkg/cataloger/python/parse_requirements.go b/syft/pkg/cataloger/python/parse_requirements.go index c2b5a122a0a..33e1371b07b 100644 --- a/syft/pkg/cataloger/python/parse_requirements.go +++ b/syft/pkg/cataloger/python/parse_requirements.go @@ -9,9 +9,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseRequirementsTxt @@ -23,7 +23,7 @@ var ( // parseRequirementsTxt takes a Python requirements.txt file, returning all Python packages that are locked to a // specific version. -func parseRequirementsTxt(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseRequirementsTxt(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var packages []pkg.Package scanner := bufio.NewScanner(reader) diff --git a/syft/pkg/cataloger/python/parse_requirements_test.go b/syft/pkg/cataloger/python/parse_requirements_test.go index b25179c5070..b38cae3d306 100644 --- a/syft/pkg/cataloger/python/parse_requirements_test.go +++ b/syft/pkg/cataloger/python/parse_requirements_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseRequirementsTxt(t *testing.T) { fixture := "test-fixtures/requires/requirements.txt" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { Name: "flask", diff --git a/syft/pkg/cataloger/python/parse_setup.go b/syft/pkg/cataloger/python/parse_setup.go index ee91f6ada2a..e5150b2743c 100644 --- a/syft/pkg/cataloger/python/parse_setup.go +++ b/syft/pkg/cataloger/python/parse_setup.go @@ -7,9 +7,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -22,7 +22,7 @@ var _ generic.Parser = parseSetup // " mypy2 == v0.770", ' mypy3== v0.770', --> match(name=mypy2 version=v0.770), match(name=mypy3, version=v0.770) var pinnedDependency = regexp.MustCompile(`['"]\W?(\w+\W?==\W?[\w.]*)`) -func parseSetup(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseSetup(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var packages []pkg.Package scanner := bufio.NewScanner(reader) diff --git a/syft/pkg/cataloger/python/parse_setup_test.go b/syft/pkg/cataloger/python/parse_setup_test.go index a3fdfd85b33..66500729631 100644 --- a/syft/pkg/cataloger/python/parse_setup_test.go +++ b/syft/pkg/cataloger/python/parse_setup_test.go @@ -6,9 +6,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseSetup(t *testing.T) { @@ -65,7 +65,7 @@ func TestParseSetup(t *testing.T) { for _, tt := range tests { t.Run(tt.fixture, func(t *testing.T) { - locations := source.NewLocationSet(source.NewLocation(tt.fixture)) + locations := file.NewLocationSet(file.NewLocation(tt.fixture)) for i := range tt.expected { tt.expected[i].Locations = locations } diff --git a/syft/pkg/cataloger/python/parse_wheel_egg.go b/syft/pkg/cataloger/python/parse_wheel_egg.go index 911e7801ca5..f3fc20ead54 100644 --- a/syft/pkg/cataloger/python/parse_wheel_egg.go +++ b/syft/pkg/cataloger/python/parse_wheel_egg.go @@ -10,13 +10,13 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // parseWheelOrEgg takes the primary metadata file reference and returns the python package it represents. -func parseWheelOrEgg(resolver source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseWheelOrEgg(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { pd, sources, err := assembleEggOrWheelMetadata(resolver, reader.Location) if err != nil { return nil, nil, err @@ -37,7 +37,7 @@ func parseWheelOrEgg(resolver source.FileResolver, _ *generic.Environment, reade } // fetchRecordFiles finds a corresponding installed-files.txt file for the given python package metadata file and returns the set of file records contained. -func fetchInstalledFiles(resolver source.FileResolver, metadataLocation source.Location, sitePackagesRootPath string) (files []pkg.PythonFileRecord, sources []source.Location, err error) { +func fetchInstalledFiles(resolver file.Resolver, metadataLocation file.Location, sitePackagesRootPath string) (files []pkg.PythonFileRecord, sources []file.Location, err error) { // we've been given a file reference to a specific wheel METADATA file. note: this may be for a directory // or for an image... for an image the METADATA file may be present within multiple layers, so it is important // to reconcile the installed-files.txt path to the same layer (or the next adjacent lower layer). @@ -68,7 +68,7 @@ func fetchInstalledFiles(resolver source.FileResolver, metadataLocation source.L } // fetchRecordFiles finds a corresponding RECORD file for the given python package metadata file and returns the set of file records contained. -func fetchRecordFiles(resolver source.FileResolver, metadataLocation source.Location) (files []pkg.PythonFileRecord, sources []source.Location, err error) { +func fetchRecordFiles(resolver file.Resolver, metadataLocation file.Location) (files []pkg.PythonFileRecord, sources []file.Location, err error) { // we've been given a file reference to a specific wheel METADATA file. note: this may be for a directory // or for an image... for an image the METADATA file may be present within multiple layers, so it is important // to reconcile the RECORD path to the same layer (or the next adjacent lower layer). @@ -95,7 +95,7 @@ func fetchRecordFiles(resolver source.FileResolver, metadataLocation source.Loca } // fetchTopLevelPackages finds a corresponding top_level.txt file for the given python package metadata file and returns the set of package names contained. -func fetchTopLevelPackages(resolver source.FileResolver, metadataLocation source.Location) (pkgs []string, sources []source.Location, err error) { +func fetchTopLevelPackages(resolver file.Resolver, metadataLocation file.Location) (pkgs []string, sources []file.Location, err error) { // a top_level.txt file specifies the python top-level packages (provided by this python package) installed into site-packages parentDir := filepath.Dir(metadataLocation.RealPath) topLevelPath := filepath.Join(parentDir, "top_level.txt") @@ -125,7 +125,7 @@ func fetchTopLevelPackages(resolver source.FileResolver, metadataLocation source return pkgs, sources, nil } -func fetchDirectURLData(resolver source.FileResolver, metadataLocation source.Location) (d *pkg.PythonDirectURLOriginInfo, sources []source.Location, err error) { +func fetchDirectURLData(resolver file.Resolver, metadataLocation file.Location) (d *pkg.PythonDirectURLOriginInfo, sources []file.Location, err error) { parentDir := filepath.Dir(metadataLocation.RealPath) directURLPath := filepath.Join(parentDir, "direct_url.json") directURLLocation := resolver.RelativeFileByPath(metadataLocation, directURLPath) @@ -160,8 +160,8 @@ func fetchDirectURLData(resolver source.FileResolver, metadataLocation source.Lo } // assembleEggOrWheelMetadata discovers and accumulates python package metadata from multiple file sources and returns a single metadata object as well as a list of files where the metadata was derived from. -func assembleEggOrWheelMetadata(resolver source.FileResolver, metadataLocation source.Location) (*parsedData, []source.Location, error) { - var sources = []source.Location{ +func assembleEggOrWheelMetadata(resolver file.Resolver, metadataLocation file.Location) (*parsedData, []file.Location, error) { + var sources = []file.Location{ metadataLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), } diff --git a/syft/pkg/cataloger/python/parse_wheel_egg_metadata.go b/syft/pkg/cataloger/python/parse_wheel_egg_metadata.go index 55ac924f002..e8d2cafafbf 100644 --- a/syft/pkg/cataloger/python/parse_wheel_egg_metadata.go +++ b/syft/pkg/cataloger/python/parse_wheel_egg_metadata.go @@ -9,15 +9,15 @@ import ( "github.com/mitchellh/mapstructure" - "github.com/anchore/syft/internal/file" + intFile "github.com/anchore/syft/internal/file" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) type parsedData struct { Licenses string `mapstructure:"License"` - LicenseLocation source.Location + LicenseLocation file.Location pkg.PythonPackageMetadata `mapstructure:",squash"` } @@ -81,7 +81,7 @@ func parseWheelOrEggMetadata(path string, reader io.Reader) (parsedData, error) pd.SitePackagesRootPath = determineSitePackagesRootPath(path) if pd.Licenses != "" { - pd.LicenseLocation = source.NewLocation(path) + pd.LicenseLocation = file.NewLocation(path) } return pd, nil @@ -91,7 +91,7 @@ func parseWheelOrEggMetadata(path string, reader io.Reader) (parsedData, error) // of egg metadata (as opposed to a directory that contains more metadata // files). func isEggRegularFile(path string) bool { - return file.GlobMatch(eggInfoGlob, path) + return intFile.GlobMatch(eggInfoGlob, path) } // determineSitePackagesRootPath returns the path of the site packages root, diff --git a/syft/pkg/cataloger/python/parse_wheel_egg_metadata_test.go b/syft/pkg/cataloger/python/parse_wheel_egg_metadata_test.go index cb776b66937..e9db5446667 100644 --- a/syft/pkg/cataloger/python/parse_wheel_egg_metadata_test.go +++ b/syft/pkg/cataloger/python/parse_wheel_egg_metadata_test.go @@ -6,8 +6,8 @@ import ( "github.com/go-test/deep" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func TestParseWheelEggMetadata(t *testing.T) { @@ -19,7 +19,7 @@ func TestParseWheelEggMetadata(t *testing.T) { Fixture: "test-fixtures/egg-info/PKG-INFO", ExpectedMetadata: parsedData{ "Apache 2.0", - source.NewLocation("test-fixtures/egg-info/PKG-INFO"), + file.NewLocation("test-fixtures/egg-info/PKG-INFO"), pkg.PythonPackageMetadata{ Name: "requests", Version: "2.22.0", @@ -34,7 +34,7 @@ func TestParseWheelEggMetadata(t *testing.T) { Fixture: "test-fixtures/dist-info/METADATA", ExpectedMetadata: parsedData{ "BSD License", - source.NewLocation("test-fixtures/dist-info/METADATA"), + file.NewLocation("test-fixtures/dist-info/METADATA"), pkg.PythonPackageMetadata{ Name: "Pygments", Version: "2.6.1", @@ -135,7 +135,7 @@ func TestParseWheelEggMetadataInvalid(t *testing.T) { Fixture: "test-fixtures/egg-info/PKG-INFO-INVALID", ExpectedMetadata: parsedData{ "", - source.Location{}, + file.Location{}, pkg.PythonPackageMetadata{ Name: "mxnet", Version: "1.8.0", diff --git a/syft/pkg/cataloger/r/cataloger_test.go b/syft/pkg/cataloger/r/cataloger_test.go index 1581e8dc657..0e2a193d6e0 100644 --- a/syft/pkg/cataloger/r/cataloger_test.go +++ b/syft/pkg/cataloger/r/cataloger_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestRPackageCataloger(t *testing.T) { @@ -15,7 +15,7 @@ func TestRPackageCataloger(t *testing.T) { Name: "base", Version: "4.3.0", FoundBy: "r-package-cataloger", - Locations: source.NewLocationSet(source.NewLocation("base/DESCRIPTION")), + Locations: file.NewLocationSet(file.NewLocation("base/DESCRIPTION")), Licenses: pkg.NewLicenseSet([]pkg.License{pkg.NewLicense("Part of R 4.3.0")}...), Language: pkg.R, Type: pkg.Rpkg, @@ -34,7 +34,7 @@ func TestRPackageCataloger(t *testing.T) { Name: "stringr", Version: "1.5.0.9000", FoundBy: "r-package-cataloger", - Locations: source.NewLocationSet(source.NewLocation("stringr/DESCRIPTION")), + Locations: file.NewLocationSet(file.NewLocation("stringr/DESCRIPTION")), Licenses: pkg.NewLicenseSet([]pkg.License{pkg.NewLicense("MIT")}...), Language: pkg.R, Type: pkg.Rpkg, diff --git a/syft/pkg/cataloger/r/package.go b/syft/pkg/cataloger/r/package.go index b916cc9da69..9fc45d3e26a 100644 --- a/syft/pkg/cataloger/r/package.go +++ b/syft/pkg/cataloger/r/package.go @@ -4,12 +4,12 @@ import ( "strings" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(pd parseData, locations ...source.Location) pkg.Package { - locationSet := source.NewLocationSet() +func newPackage(pd parseData, locations ...file.Location) pkg.Package { + locationSet := file.NewLocationSet() for _, loc := range locations { locationSet.Add(loc.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)) } @@ -45,7 +45,7 @@ func packageURL(m parseData) string { // Multiple licences can be specified separated by ‘|’ // (surrounded by spaces) in which case the user can choose any of the above cases. // https://cran.rstudio.com/doc/manuals/r-devel/R-exts.html#Licensing -func parseLicenseData(license string, locations ...source.Location) []pkg.License { +func parseLicenseData(license string, locations ...file.Location) []pkg.License { licenses := make([]pkg.License, 0) // check if multiple licenses are separated by | diff --git a/syft/pkg/cataloger/r/parse_description.go b/syft/pkg/cataloger/r/parse_description.go index b062b039559..182cd4bde2f 100644 --- a/syft/pkg/cataloger/r/parse_description.go +++ b/syft/pkg/cataloger/r/parse_description.go @@ -7,9 +7,9 @@ import ( "strings" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) /* some examples of license strings found in DESCRIPTION files: @@ -28,10 +28,10 @@ License: Part of R 4.3.0 License: Unlimited */ -func parseDescriptionFile(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseDescriptionFile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { values := extractFieldsFromDescriptionFile(reader) m := parseDataFromDescriptionMap(values) - p := newPackage(m, []source.Location{reader.Location}...) + p := newPackage(m, []file.Location{reader.Location}...) if p.Name == "" || p.Version == "" { return nil, nil, nil } diff --git a/syft/pkg/cataloger/r/parse_description_test.go b/syft/pkg/cataloger/r/parse_description_test.go index 4263995240d..483c54adbac 100644 --- a/syft/pkg/cataloger/r/parse_description_test.go +++ b/syft/pkg/cataloger/r/parse_description_test.go @@ -8,8 +8,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func Test_parseDescriptionFile(t *testing.T) { @@ -53,8 +53,8 @@ func Test_parseDescriptionFile(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { f, err := os.Open(tt.fixture) - input := source.LocationReadCloser{ - Location: source.NewLocation(tt.fixture), + input := file.LocationReadCloser{ + Location: file.NewLocation(tt.fixture), ReadCloser: f, } got, _, err := parseDescriptionFile(nil, nil, input) diff --git a/syft/pkg/cataloger/rpm/package.go b/syft/pkg/cataloger/rpm/package.go index 53c0925b906..136af9f5755 100644 --- a/syft/pkg/cataloger/rpm/package.go +++ b/syft/pkg/cataloger/rpm/package.go @@ -8,18 +8,18 @@ import ( rpmdb "github.com/knqyf263/go-rpmdb/pkg" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(dbOrRpmLocation source.Location, pd parsedData, distro *linux.Release) pkg.Package { +func newPackage(dbOrRpmLocation file.Location, pd parsedData, distro *linux.Release) pkg.Package { p := pkg.Package{ Name: pd.Name, Version: toELVersion(pd.RpmMetadata), Licenses: pkg.NewLicenseSet(pd.Licenses...), PURL: packageURL(pd.RpmMetadata, distro), - Locations: source.NewLocationSet(dbOrRpmLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(dbOrRpmLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, Metadata: pd.RpmMetadata, @@ -34,7 +34,7 @@ type parsedData struct { pkg.RpmMetadata } -func newParsedDataFromEntry(licenseLocation source.Location, entry rpmdb.PackageInfo, files []pkg.RpmdbFileRecord) parsedData { +func newParsedDataFromEntry(licenseLocation file.Location, entry rpmdb.PackageInfo, files []pkg.RpmdbFileRecord) parsedData { return parsedData{ Licenses: pkg.NewLicensesFromLocation(licenseLocation, entry.License), RpmMetadata: pkg.RpmMetadata{ diff --git a/syft/pkg/cataloger/rpm/parse_rpm.go b/syft/pkg/cataloger/rpm/parse_rpm.go index 6e866c5cabb..06c5f61451b 100644 --- a/syft/pkg/cataloger/rpm/parse_rpm.go +++ b/syft/pkg/cataloger/rpm/parse_rpm.go @@ -11,11 +11,10 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // parseRpm parses a single RPM -func parseRpm(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseRpm(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { rpm, err := rpmutils.ReadRpm(reader) if err != nil { return nil, nil, fmt.Errorf("RPM file found but unable to read: %s (%w)", reader.Location.RealPath, err) diff --git a/syft/pkg/cataloger/rpm/parse_rpm_db.go b/syft/pkg/cataloger/rpm/parse_rpm_db.go index ee4d64b4f02..02106f62c35 100644 --- a/syft/pkg/cataloger/rpm/parse_rpm_db.go +++ b/syft/pkg/cataloger/rpm/parse_rpm_db.go @@ -14,11 +14,10 @@ import ( "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // parseRpmDb parses an "Packages" RPM DB and returns the Packages listed within it. -func parseRpmDB(resolver source.FileResolver, env *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseRpmDB(resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { f, err := os.CreateTemp("", internal.ApplicationName+"-rpmdb") if err != nil { return nil, nil, fmt.Errorf("failed to create temp rpmdb file: %w", err) @@ -90,7 +89,7 @@ func toELVersion(metadata pkg.RpmMetadata) string { return fmt.Sprintf("%s-%s", metadata.Version, metadata.Release) } -func extractRpmdbFileRecords(resolver source.FilePathResolver, entry rpmdb.PackageInfo) []pkg.RpmdbFileRecord { +func extractRpmdbFileRecords(resolver file.PathResolver, entry rpmdb.PackageInfo) []pkg.RpmdbFileRecord { var records = make([]pkg.RpmdbFileRecord, 0) files, err := entry.InstalledFiles() diff --git a/syft/pkg/cataloger/rpm/parse_rpm_db_test.go b/syft/pkg/cataloger/rpm/parse_rpm_db_test.go index b58a01744a5..dea087880e5 100644 --- a/syft/pkg/cataloger/rpm/parse_rpm_db_test.go +++ b/syft/pkg/cataloger/rpm/parse_rpm_db_test.go @@ -10,36 +10,35 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) -var _ source.FileResolver = (*rpmdbTestFileResolverMock)(nil) +var _ file.Resolver = (*rpmdbTestFileResolverMock)(nil) type rpmdbTestFileResolverMock struct { ignorePaths bool } -func (r rpmdbTestFileResolverMock) FilesByExtension(extensions ...string) ([]source.Location, error) { +func (r rpmdbTestFileResolverMock) FilesByExtension(extensions ...string) ([]file.Location, error) { panic("not implemented") } -func (r rpmdbTestFileResolverMock) FilesByBasename(filenames ...string) ([]source.Location, error) { +func (r rpmdbTestFileResolverMock) FilesByBasename(filenames ...string) ([]file.Location, error) { panic("not implemented") } -func (r rpmdbTestFileResolverMock) FilesByBasenameGlob(globs ...string) ([]source.Location, error) { +func (r rpmdbTestFileResolverMock) FilesByBasenameGlob(globs ...string) ([]file.Location, error) { panic("not implemented") } -func (r rpmdbTestFileResolverMock) FileContentsByLocation(location source.Location) (io.ReadCloser, error) { +func (r rpmdbTestFileResolverMock) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { panic("not implemented") } -func (r rpmdbTestFileResolverMock) AllLocations() <-chan source.Location { +func (r rpmdbTestFileResolverMock) AllLocations() <-chan file.Location { panic("not implemented") } -func (r rpmdbTestFileResolverMock) FileMetadataByLocation(location source.Location) (source.FileMetadata, error) { +func (r rpmdbTestFileResolverMock) FileMetadataByLocation(location file.Location) (file.Metadata, error) { panic("not implemented") } @@ -53,34 +52,34 @@ func (r rpmdbTestFileResolverMock) HasPath(path string) bool { return !r.ignorePaths } -func (r *rpmdbTestFileResolverMock) FilesByPath(paths ...string) ([]source.Location, error) { +func (r *rpmdbTestFileResolverMock) FilesByPath(paths ...string) ([]file.Location, error) { if r.ignorePaths { // act as if no paths exist return nil, nil } // act as if all files exist - var locations = make([]source.Location, len(paths)) + var locations = make([]file.Location, len(paths)) for i, p := range paths { - locations[i] = source.NewLocation(p) + locations[i] = file.NewLocation(p) } return locations, nil } -func (r *rpmdbTestFileResolverMock) FilesByGlob(...string) ([]source.Location, error) { +func (r *rpmdbTestFileResolverMock) FilesByGlob(...string) ([]file.Location, error) { return nil, fmt.Errorf("not implemented") } -func (r *rpmdbTestFileResolverMock) RelativeFileByPath(source.Location, string) *source.Location { +func (r *rpmdbTestFileResolverMock) RelativeFileByPath(file.Location, string) *file.Location { panic(fmt.Errorf("not implemented")) return nil } -func (r *rpmdbTestFileResolverMock) FilesByMIMEType(...string) ([]source.Location, error) { +func (r *rpmdbTestFileResolverMock) FilesByMIMEType(...string) ([]file.Location, error) { return nil, fmt.Errorf("not implemented") } func TestParseRpmDB(t *testing.T) { - packagesLocation := source.NewLocation("test-fixtures/Packages") + packagesLocation := file.NewLocation("test-fixtures/Packages") tests := []struct { fixture string expected []pkg.Package @@ -95,7 +94,7 @@ func TestParseRpmDB(t *testing.T) { Name: "dive", Version: "0.9.2-1", PURL: "pkg:rpm/dive@0.9.2-1?arch=x86_64&upstream=dive-0.9.2-1.src.rpm", - Locations: source.NewLocationSet(packagesLocation), + Locations: file.NewLocationSet(file.NewLocation("test-fixtures/Packages")), Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, Licenses: pkg.NewLicenseSet( @@ -124,7 +123,7 @@ func TestParseRpmDB(t *testing.T) { Name: "dive", Version: "0.9.2-1", PURL: "pkg:rpm/dive@0.9.2-1?arch=x86_64&upstream=dive-0.9.2-1.src.rpm", - Locations: source.NewLocationSet(packagesLocation), + Locations: file.NewLocationSet(packagesLocation), Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, Licenses: pkg.NewLicenseSet( diff --git a/syft/pkg/cataloger/rpm/parse_rpm_manifest.go b/syft/pkg/cataloger/rpm/parse_rpm_manifest.go index ee8de71c6ba..c8110d6dab4 100644 --- a/syft/pkg/cataloger/rpm/parse_rpm_manifest.go +++ b/syft/pkg/cataloger/rpm/parse_rpm_manifest.go @@ -8,13 +8,13 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // Parses an RPM manifest file, as used in Mariner distroless containers, and returns the Packages listed -func parseRpmManifest(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseRpmManifest(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { r := bufio.NewReader(reader) allPkgs := make([]pkg.Package, 0) @@ -52,7 +52,7 @@ func parseRpmManifest(_ source.FileResolver, _ *generic.Environment, reader sour // Each line is the output of : // rpm --query --all --query-format "%{NAME}\t%{VERSION}-%{RELEASE}\t%{INSTALLTIME}\t%{BUILDTIME}\t%{VENDOR}\t%{EPOCH}\t%{SIZE}\t%{ARCH}\t%{EPOCHNUM}\t%{SOURCERPM}\n" // https://github.com/microsoft/CBL-Mariner/blob/3df18fac373aba13a54bd02466e64969574f13af/toolkit/docs/how_it_works/5_misc.md?plain=1#L150 -func parseRpmManifestEntry(entry string, location source.Location) (*pkg.Package, error) { +func parseRpmManifestEntry(entry string, location file.Location) (*pkg.Package, error) { metadata, err := newMetadataFromManifestLine(entry) if err != nil { return nil, err diff --git a/syft/pkg/cataloger/rpm/parse_rpm_manifest_test.go b/syft/pkg/cataloger/rpm/parse_rpm_manifest_test.go index 64cca390aab..5f2c3e3b7b3 100644 --- a/syft/pkg/cataloger/rpm/parse_rpm_manifest_test.go +++ b/syft/pkg/cataloger/rpm/parse_rpm_manifest_test.go @@ -3,20 +3,20 @@ package rpm import ( "testing" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseRpmManifest(t *testing.T) { fixture := "test-fixtures/container-manifest-2" - location := source.NewLocation(fixture) + location := file.NewLocation(fixture) expected := []pkg.Package{ { Name: "mariner-release", Version: "2.0-12.cm2", PURL: "pkg:rpm/mariner-release@2.0-12.cm2?arch=noarch&upstream=mariner-release-2.0-12.cm2.src.rpm", - Locations: source.NewLocationSet(location), + Locations: file.NewLocationSet(location), Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, Metadata: pkg.RpmMetadata{ @@ -34,7 +34,7 @@ func TestParseRpmManifest(t *testing.T) { Name: "filesystem", Version: "1.1-9.cm2", PURL: "pkg:rpm/filesystem@1.1-9.cm2?arch=x86_64&upstream=filesystem-1.1-9.cm2.src.rpm", - Locations: source.NewLocationSet(location), + Locations: file.NewLocationSet(location), Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, Metadata: pkg.RpmMetadata{ @@ -52,7 +52,7 @@ func TestParseRpmManifest(t *testing.T) { Name: "glibc", Version: "2.35-2.cm2", PURL: "pkg:rpm/glibc@2.35-2.cm2?arch=x86_64&upstream=glibc-2.35-2.cm2.src.rpm", - Locations: source.NewLocationSet(location), + Locations: file.NewLocationSet(location), Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, Metadata: pkg.RpmMetadata{ @@ -70,7 +70,7 @@ func TestParseRpmManifest(t *testing.T) { Name: "openssl-libs", Version: "1.1.1k-15.cm2", PURL: "pkg:rpm/openssl-libs@1.1.1k-15.cm2?arch=x86_64&upstream=openssl-1.1.1k-15.cm2.src.rpm", - Locations: source.NewLocationSet(location), + Locations: file.NewLocationSet(location), Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, Metadata: pkg.RpmMetadata{ diff --git a/syft/pkg/cataloger/rpm/parse_rpm_test.go b/syft/pkg/cataloger/rpm/parse_rpm_test.go index 253d99f59dd..83e39528b6f 100644 --- a/syft/pkg/cataloger/rpm/parse_rpm_test.go +++ b/syft/pkg/cataloger/rpm/parse_rpm_test.go @@ -6,12 +6,11 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseRpmFiles(t *testing.T) { - abcRpmLocation := source.NewLocation("abc-1.01-9.hg20160905.el7.x86_64.rpm") - zorkRpmLocation := source.NewLocation("zork-1.0.3-1.el7.x86_64.rpm") + abcRpmLocation := file.NewLocation("abc-1.01-9.hg20160905.el7.x86_64.rpm") + zorkRpmLocation := file.NewLocation("zork-1.0.3-1.el7.x86_64.rpm") tests := []struct { fixture string expected []pkg.Package @@ -23,7 +22,7 @@ func TestParseRpmFiles(t *testing.T) { Name: "abc", Version: "0:1.01-9.hg20160905.el7", PURL: "pkg:rpm/abc@1.01-9.hg20160905.el7?arch=x86_64&epoch=0&upstream=abc-1.01-9.hg20160905.el7.src.rpm", - Locations: source.NewLocationSet(abcRpmLocation), + Locations: file.NewLocationSet(file.NewLocation("abc-1.01-9.hg20160905.el7.x86_64.rpm")), FoundBy: "rpm-file-cataloger", Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, @@ -52,7 +51,7 @@ func TestParseRpmFiles(t *testing.T) { Name: "zork", Version: "0:1.0.3-1.el7", PURL: "pkg:rpm/zork@1.0.3-1.el7?arch=x86_64&epoch=0&upstream=zork-1.0.3-1.el7.src.rpm", - Locations: source.NewLocationSet(zorkRpmLocation), + Locations: file.NewLocationSet(zorkRpmLocation), FoundBy: "rpm-file-cataloger", Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, diff --git a/syft/pkg/cataloger/ruby/package.go b/syft/pkg/cataloger/ruby/package.go index 973d67350fb..86075274204 100644 --- a/syft/pkg/cataloger/ruby/package.go +++ b/syft/pkg/cataloger/ruby/package.go @@ -2,16 +2,16 @@ package ruby import ( "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newGemfileLockPackage(name, version string, locations ...source.Location) pkg.Package { +func newGemfileLockPackage(name, version string, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: name, Version: version, PURL: packageURL(name, version), - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), Language: pkg.Ruby, Type: pkg.GemPkg, } @@ -21,11 +21,11 @@ func newGemfileLockPackage(name, version string, locations ...source.Location) p return p } -func newGemspecPackage(m gemData, gemSpecLocation source.Location) pkg.Package { +func newGemspecPackage(m gemData, gemSpecLocation file.Location) pkg.Package { p := pkg.Package{ Name: m.Name, Version: m.Version, - Locations: source.NewLocationSet(gemSpecLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(gemSpecLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(gemSpecLocation, m.Licenses...)...), PURL: packageURL(m.Name, m.Version), Language: pkg.Ruby, diff --git a/syft/pkg/cataloger/ruby/parse_gemfile_lock.go b/syft/pkg/cataloger/ruby/parse_gemfile_lock.go index 884f1ea144b..f2bedb4b2a1 100644 --- a/syft/pkg/cataloger/ruby/parse_gemfile_lock.go +++ b/syft/pkg/cataloger/ruby/parse_gemfile_lock.go @@ -6,9 +6,9 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseGemFileLockEntries @@ -16,7 +16,7 @@ var _ generic.Parser = parseGemFileLockEntries var sectionsOfInterest = internal.NewStringSet("GEM", "GIT", "PATH", "PLUGIN SOURCE") // parseGemFileLockEntries is a parser function for Gemfile.lock contents, returning all Gems discovered. -func parseGemFileLockEntries(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseGemFileLockEntries(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package scanner := bufio.NewScanner(reader) diff --git a/syft/pkg/cataloger/ruby/parse_gemfile_lock_test.go b/syft/pkg/cataloger/ruby/parse_gemfile_lock_test.go index ef2a0378429..ad94283aae3 100644 --- a/syft/pkg/cataloger/ruby/parse_gemfile_lock_test.go +++ b/syft/pkg/cataloger/ruby/parse_gemfile_lock_test.go @@ -3,14 +3,14 @@ package ruby import ( "testing" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseGemfileLockEntries(t *testing.T) { fixture := "test-fixtures/Gemfile.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) var expectedPkgs = []pkg.Package{ {Name: "actionmailer", Version: "4.1.1", PURL: "pkg:gem/actionmailer@4.1.1", Locations: locations, Language: pkg.Ruby, Type: pkg.GemPkg}, {Name: "actionpack", Version: "4.1.1", PURL: "pkg:gem/actionpack@4.1.1", Locations: locations, Language: pkg.Ruby, Type: pkg.GemPkg}, diff --git a/syft/pkg/cataloger/ruby/parse_gemspec.go b/syft/pkg/cataloger/ruby/parse_gemspec.go index 347caabbd01..97c2876bd81 100644 --- a/syft/pkg/cataloger/ruby/parse_gemspec.go +++ b/syft/pkg/cataloger/ruby/parse_gemspec.go @@ -11,9 +11,9 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseGemFileLockEntries @@ -64,7 +64,7 @@ func processList(s string) []string { return results } -func parseGemSpecEntries(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseGemSpecEntries(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package var fields = make(map[string]interface{}) scanner := bufio.NewScanner(reader) diff --git a/syft/pkg/cataloger/ruby/parse_gemspec_test.go b/syft/pkg/cataloger/ruby/parse_gemspec_test.go index 53cb59ecfa1..c320185cccc 100644 --- a/syft/pkg/cataloger/ruby/parse_gemspec_test.go +++ b/syft/pkg/cataloger/ruby/parse_gemspec_test.go @@ -3,15 +3,15 @@ package ruby import ( "testing" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseGemspec(t *testing.T) { fixture := "test-fixtures/bundler.gemspec" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) var expectedPkg = pkg.Package{ Name: "bundler", @@ -20,7 +20,7 @@ func TestParseGemspec(t *testing.T) { Locations: locations, Type: pkg.GemPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), Language: pkg.Ruby, MetadataType: pkg.GemMetadataType, diff --git a/syft/pkg/cataloger/rust/cataloger_test.go b/syft/pkg/cataloger/rust/cataloger_test.go index 73b442c817b..303b88fd41d 100644 --- a/syft/pkg/cataloger/rust/cataloger_test.go +++ b/syft/pkg/cataloger/rust/cataloger_test.go @@ -3,9 +3,9 @@ package rust import ( "testing" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestNewAuditBinaryCataloger(t *testing.T) { @@ -16,7 +16,7 @@ func TestNewAuditBinaryCataloger(t *testing.T) { Version: "0.1.0", PURL: "pkg:cargo/auditable@0.1.0", FoundBy: "cargo-auditable-binary-cataloger", - Locations: source.NewLocationSet(source.NewVirtualLocation("/hello-auditable", "/hello-auditable")), + Locations: file.NewLocationSet(file.NewVirtualLocation("/hello-auditable", "/hello-auditable")), Language: pkg.Rust, Type: pkg.RustPkg, MetadataType: pkg.RustCargoPackageMetadataType, @@ -31,7 +31,7 @@ func TestNewAuditBinaryCataloger(t *testing.T) { Version: "0.1.0", PURL: "pkg:cargo/hello-auditable@0.1.0", FoundBy: "cargo-auditable-binary-cataloger", - Locations: source.NewLocationSet(source.NewVirtualLocation("/hello-auditable", "/hello-auditable")), + Locations: file.NewLocationSet(file.NewVirtualLocation("/hello-auditable", "/hello-auditable")), Language: pkg.Rust, Type: pkg.RustPkg, MetadataType: pkg.RustCargoPackageMetadataType, diff --git a/syft/pkg/cataloger/rust/package.go b/syft/pkg/cataloger/rust/package.go index 1d661bce3b5..8787c5153bf 100644 --- a/syft/pkg/cataloger/rust/package.go +++ b/syft/pkg/cataloger/rust/package.go @@ -4,16 +4,16 @@ import ( "github.com/microsoft/go-rustaudit" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) // Pkg returns the standard `pkg.Package` representation of the package referenced within the Cargo.lock metadata. -func newPackageFromCargoMetadata(m pkg.CargoPackageMetadata, locations ...source.Location) pkg.Package { +func newPackageFromCargoMetadata(m pkg.CargoPackageMetadata, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: m.Name, Version: m.Version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(m.Name, m.Version), Language: pkg.Rust, Type: pkg.RustPkg, @@ -26,7 +26,7 @@ func newPackageFromCargoMetadata(m pkg.CargoPackageMetadata, locations ...source return p } -func newPackagesFromAudit(location source.Location, versionInfo rustaudit.VersionInfo) []pkg.Package { +func newPackagesFromAudit(location file.Location, versionInfo rustaudit.VersionInfo) []pkg.Package { var pkgs []pkg.Package for _, dep := range versionInfo.Packages { @@ -40,14 +40,14 @@ func newPackagesFromAudit(location source.Location, versionInfo rustaudit.Versio return pkgs } -func newPackageFromAudit(dep *rustaudit.Package, locations ...source.Location) pkg.Package { +func newPackageFromAudit(dep *rustaudit.Package, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: dep.Name, Version: dep.Version, PURL: packageURL(dep.Name, dep.Version), Language: pkg.Rust, Type: pkg.RustPkg, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), MetadataType: pkg.RustCargoPackageMetadataType, Metadata: pkg.CargoPackageMetadata{ Name: dep.Name, diff --git a/syft/pkg/cataloger/rust/parse_audit_binary.go b/syft/pkg/cataloger/rust/parse_audit_binary.go index 7c7e3ad54ec..de894006b56 100644 --- a/syft/pkg/cataloger/rust/parse_audit_binary.go +++ b/syft/pkg/cataloger/rust/parse_audit_binary.go @@ -7,14 +7,14 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader" - "github.com/anchore/syft/syft/source" ) // Catalog identifies executables then attempts to read Rust dependency information from them -func parseAuditBinary(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseAuditBinary(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package unionReader, err := unionreader.GetUnionReader(reader.ReadCloser) diff --git a/syft/pkg/cataloger/rust/parse_cargo_lock.go b/syft/pkg/cataloger/rust/parse_cargo_lock.go index 0e9d582a7ed..cd001728685 100644 --- a/syft/pkg/cataloger/rust/parse_cargo_lock.go +++ b/syft/pkg/cataloger/rust/parse_cargo_lock.go @@ -6,9 +6,9 @@ import ( "github.com/pelletier/go-toml" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseCargoLock @@ -18,7 +18,7 @@ type cargoLockFile struct { } // parseCargoLock is a parser function for Cargo.lock contents, returning all rust cargo crates discovered. -func parseCargoLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseCargoLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { tree, err := toml.LoadReader(reader) if err != nil { return nil, nil, fmt.Errorf("unable to load Cargo.lock for parsing: %w", err) diff --git a/syft/pkg/cataloger/rust/parse_cargo_lock_test.go b/syft/pkg/cataloger/rust/parse_cargo_lock_test.go index d05f62d6245..fb4ed7427ab 100644 --- a/syft/pkg/cataloger/rust/parse_cargo_lock_test.go +++ b/syft/pkg/cataloger/rust/parse_cargo_lock_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseCargoLock(t *testing.T) { fixture := "test-fixtures/Cargo.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { Name: "ansi_term", diff --git a/syft/pkg/cataloger/sbom/cataloger.go b/syft/pkg/cataloger/sbom/cataloger.go index c66c9940ab9..a08c2c2a942 100644 --- a/syft/pkg/cataloger/sbom/cataloger.go +++ b/syft/pkg/cataloger/sbom/cataloger.go @@ -3,10 +3,10 @@ package sbom import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/formats" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) const catalogerName = "sbom-cataloger" @@ -29,7 +29,7 @@ func NewSBOMCataloger() *generic.Cataloger { ) } -func parseSBOM(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseSBOM(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { s, _, err := formats.Decode(reader) if err != nil { return nil, nil, err @@ -47,7 +47,7 @@ func parseSBOM(_ source.FileResolver, _ *generic.Environment, reader source.Loca // Why not keep the original list of locations? Since the "locations" field is meant to capture // where there is evidence of this file, and the catalogers have not run against any file other than, // the SBOM, this is the only location that is relevant for this cataloger. - p.Locations = source.NewLocationSet( + p.Locations = file.NewLocationSet( reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), ) p.FoundBy = catalogerName diff --git a/syft/pkg/cataloger/sbom/cataloger_test.go b/syft/pkg/cataloger/sbom/cataloger_test.go index a2226f80e7c..46332a2a745 100644 --- a/syft/pkg/cataloger/sbom/cataloger_test.go +++ b/syft/pkg/cataloger/sbom/cataloger_test.go @@ -7,11 +7,11 @@ import ( "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/cpe" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/formats/syftjson" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" "github.com/anchore/syft/syft/sbom" - "github.com/anchore/syft/syft/source" ) func mustCPEs(s ...string) (c []cpe.CPE) { @@ -37,7 +37,7 @@ func Test_parseSBOM(t *testing.T) { Name: "alpine-baselayout", Version: "3.2.0-r23", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("GPL-2.0-only")), FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/alpine-baselayout@3.2.0-r23?arch=x86_64&upstream=alpine-baselayout&distro=alpine-3.16.3", @@ -54,7 +54,7 @@ func Test_parseSBOM(t *testing.T) { Name: "alpine-baselayout-data", Version: "3.2.0-r23", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("GPL-2.0-only")), FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/alpine-baselayout-data@3.2.0-r23?arch=x86_64&upstream=alpine-baselayout&distro=alpine-3.16.3", @@ -75,7 +75,7 @@ func Test_parseSBOM(t *testing.T) { Name: "alpine-keys", Version: "2.4-r1", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("MIT")), FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/alpine-keys@2.4-r1?arch=x86_64&upstream=alpine-keys&distro=alpine-3.16.3", @@ -92,7 +92,7 @@ func Test_parseSBOM(t *testing.T) { Name: "apk-tools", Version: "2.12.9-r3", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("GPL-2.0-only")), FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/apk-tools@2.12.9-r3?arch=x86_64&upstream=apk-tools&distro=alpine-3.16.3", @@ -109,7 +109,7 @@ func Test_parseSBOM(t *testing.T) { Name: "busybox", Version: "1.35.0-r17", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("GPL-2.0-only")), FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/busybox@1.35.0-r17?arch=x86_64&upstream=busybox&distro=alpine-3.16.3", @@ -121,7 +121,7 @@ func Test_parseSBOM(t *testing.T) { Name: "ca-certificates-bundle", Version: "20220614-r0", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet( pkg.NewLicense("MPL-2.0"), pkg.NewLicense("MIT"), @@ -145,7 +145,7 @@ func Test_parseSBOM(t *testing.T) { Name: "libc-utils", Version: "0.7.2-r3", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet( pkg.NewLicense("BSD-2-Clause"), pkg.NewLicense("BSD-3-Clause"), @@ -165,7 +165,7 @@ func Test_parseSBOM(t *testing.T) { Name: "libcrypto1.1", Version: "1.1.1s-r0", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("OpenSSL")), // SPDX expression is not set FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/libcrypto1.1@1.1.1s-r0?arch=x86_64&upstream=openssl&distro=alpine-3.16.3", @@ -177,7 +177,7 @@ func Test_parseSBOM(t *testing.T) { Name: "libssl1.1", Version: "1.1.1s-r0", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("OpenSSL")), // SPDX expression is not set FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/libssl1.1@1.1.1s-r0?arch=x86_64&upstream=openssl&distro=alpine-3.16.3", @@ -189,7 +189,7 @@ func Test_parseSBOM(t *testing.T) { Name: "musl", Version: "1.2.3-r1", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("MIT")), // SPDX expression is not set FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/musl@1.2.3-r1?arch=x86_64&upstream=musl&distro=alpine-3.16.3", @@ -201,7 +201,7 @@ func Test_parseSBOM(t *testing.T) { Name: "musl-utils", Version: "1.2.3-r1", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet( pkg.NewLicense("MIT"), pkg.NewLicense("BSD"), @@ -222,7 +222,7 @@ func Test_parseSBOM(t *testing.T) { Name: "scanelf", Version: "1.3.4-r0", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet( pkg.NewLicense("GPL-2.0-only"), ), @@ -236,7 +236,7 @@ func Test_parseSBOM(t *testing.T) { Name: "ssl_client", Version: "1.35.0-r17", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet( pkg.NewLicense("GPL-2.0-only"), ), @@ -255,7 +255,7 @@ func Test_parseSBOM(t *testing.T) { Name: "zlib", Version: "1.2.12-r3", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet( pkg.NewLicense("Zlib"), ), @@ -267,9 +267,9 @@ func Test_parseSBOM(t *testing.T) { }, } - apkgdbLocation := source.NewLocationSet(source.Location{ - LocationData: source.LocationData{ - Coordinates: source.Coordinates{ + apkgdbLocation := file.NewLocationSet(file.Location{ + LocationData: file.LocationData{ + Coordinates: file.Coordinates{ RealPath: "/lib/apk/db/installed", FileSystemID: "sha256:e5e13b0c77cbb769548077189c3da2f0a764ceca06af49d8d558e759f5c232bd", }, @@ -359,7 +359,7 @@ func Test_parseSBOM(t *testing.T) { }, { From: libSSL, - To: source.Coordinates{ + To: file.Coordinates{ RealPath: "/lib/libssl.so.1.1", FileSystemID: "sha256:e5e13b0c77cbb769548077189c3da2f0a764ceca06af49d8d558e759f5c232bd", }, @@ -372,7 +372,7 @@ func Test_parseSBOM(t *testing.T) { }, { From: baseLayout, - To: source.Coordinates{ + To: file.Coordinates{ RealPath: "/etc/profile.d/color_prompt.sh.disabled", FileSystemID: "sha256:e5e13b0c77cbb769548077189c3da2f0a764ceca06af49d8d558e759f5c232bd", }, @@ -380,7 +380,7 @@ func Test_parseSBOM(t *testing.T) { }, { From: baseLayout, - To: source.Coordinates{ + To: file.Coordinates{ RealPath: "/etc/modprobe.d/kms.conf", FileSystemID: "sha256:e5e13b0c77cbb769548077189c3da2f0a764ceca06af49d8d558e759f5c232bd", }, @@ -396,7 +396,7 @@ func Test_parseSBOM(t *testing.T) { for _, p := range expectedPkgs { expectedRelationships = append(expectedRelationships, artifact.Relationship{ From: p, - To: source.Coordinates{ + To: file.Coordinates{ RealPath: "sbom.syft.json", }, Type: artifact.DescribedByRelationship, diff --git a/syft/pkg/cataloger/swift/package.go b/syft/pkg/cataloger/swift/package.go index 0e1c1ce4a0a..ad6416e64ae 100644 --- a/syft/pkg/cataloger/swift/package.go +++ b/syft/pkg/cataloger/swift/package.go @@ -2,16 +2,16 @@ package swift import ( "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(name, version, hash string, locations ...source.Location) pkg.Package { +func newPackage(name, version, hash string, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: name, Version: version, PURL: packageURL(name, version), - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), Type: pkg.CocoapodsPkg, Language: pkg.Swift, MetadataType: pkg.CocoapodsMetadataType, diff --git a/syft/pkg/cataloger/swift/parse_podfile_lock.go b/syft/pkg/cataloger/swift/parse_podfile_lock.go index afff41ae9b3..58a58c4643f 100644 --- a/syft/pkg/cataloger/swift/parse_podfile_lock.go +++ b/syft/pkg/cataloger/swift/parse_podfile_lock.go @@ -8,9 +8,9 @@ import ( "gopkg.in/yaml.v3" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parsePodfileLock @@ -25,7 +25,7 @@ type podfileLock struct { } // parsePodfileLock is a parser function for Podfile.lock contents, returning all cocoapods pods discovered. -func parsePodfileLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePodfileLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { bytes, err := io.ReadAll(reader) if err != nil { return nil, nil, fmt.Errorf("unable to read file: %w", err) diff --git a/syft/pkg/cataloger/swift/parse_podfile_lock_test.go b/syft/pkg/cataloger/swift/parse_podfile_lock_test.go index ef4c7d2c444..53b6dfd12e4 100644 --- a/syft/pkg/cataloger/swift/parse_podfile_lock_test.go +++ b/syft/pkg/cataloger/swift/parse_podfile_lock_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParsePodfileLock(t *testing.T) { fixture := "test-fixtures/Podfile.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { Name: "GlossButtonNode", diff --git a/syft/pkg/java_metadata.go b/syft/pkg/java_metadata.go index 2e134bf1125..12b9c5c534e 100644 --- a/syft/pkg/java_metadata.go +++ b/syft/pkg/java_metadata.go @@ -32,6 +32,7 @@ type PomProperties struct { GroupID string `mapstructure:"groupId" json:"groupId" cyclonedx:"groupID"` ArtifactID string `mapstructure:"artifactId" json:"artifactId" cyclonedx:"artifactID"` Version string `mapstructure:"version" json:"version"` + Scope string `mapstructure:"scope" json:"scope,omitempty"` Extra map[string]string `mapstructure:",remain" json:"extraFields,omitempty"` } diff --git a/syft/pkg/license.go b/syft/pkg/license.go index 0e0a3f04b99..6e681da6348 100644 --- a/syft/pkg/license.go +++ b/syft/pkg/license.go @@ -7,8 +7,8 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/license" - "github.com/anchore/syft/syft/source" ) var _ sort.Interface = (*Licenses)(nil) @@ -27,7 +27,7 @@ type License struct { SPDXExpression string `json:"spdxExpression"` Type license.Type `json:"type"` URLs internal.StringSet `hash:"ignore"` - Locations source.LocationSet `hash:"ignore"` + Locations file.LocationSet `hash:"ignore"` } type Licenses []License @@ -62,7 +62,7 @@ func (l Licenses) Swap(i, j int) { func NewLicense(value string) License { spdxExpression, err := license.ParseExpression(value) if err != nil { - log.Trace("unable to parse license expression: %w", err) + log.Trace("unable to parse license expression for %q: %w", value, err) } return License{ @@ -70,7 +70,7 @@ func NewLicense(value string) License { SPDXExpression: spdxExpression, Type: license.Declared, URLs: internal.NewStringSet(), - Locations: source.NewLocationSet(), + Locations: file.NewLocationSet(), } } @@ -85,7 +85,7 @@ func NewLicenseFromType(value string, t license.Type) License { SPDXExpression: spdxExpression, Type: t, URLs: internal.NewStringSet(), - Locations: source.NewLocationSet(), + Locations: file.NewLocationSet(), } } @@ -96,7 +96,7 @@ func NewLicensesFromValues(values ...string) (licenses []License) { return } -func NewLicensesFromLocation(location source.Location, values ...string) (licenses []License) { +func NewLicensesFromLocation(location file.Location, values ...string) (licenses []License) { for _, v := range values { if v == "" { continue @@ -106,7 +106,7 @@ func NewLicensesFromLocation(location source.Location, values ...string) (licens return } -func NewLicenseFromLocations(value string, locations ...source.Location) License { +func NewLicenseFromLocations(value string, locations ...file.Location) License { l := NewLicense(value) for _, loc := range locations { l.Locations.Add(loc) diff --git a/syft/pkg/license_set_test.go b/syft/pkg/license_set_test.go index 16abd83a8a1..7125c5411b8 100644 --- a/syft/pkg/license_set_test.go +++ b/syft/pkg/license_set_test.go @@ -6,8 +6,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/syft/internal" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/license" - "github.com/anchore/syft/syft/source" ) func TestLicenseSet_Add(t *testing.T) { @@ -58,15 +58,15 @@ func TestLicenseSet_Add(t *testing.T) { { name: "deduplicate licenses with locations", licenses: []License{ - NewLicenseFromLocations("MIT", source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"})), - NewLicenseFromLocations("MIT", source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"})), - NewLicenseFromLocations("MIT", source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "2"})), + NewLicenseFromLocations("MIT", file.NewLocationFromCoordinates(file.Coordinates{RealPath: "/place", FileSystemID: "1"})), + NewLicenseFromLocations("MIT", file.NewLocationFromCoordinates(file.Coordinates{RealPath: "/place", FileSystemID: "1"})), + NewLicenseFromLocations("MIT", file.NewLocationFromCoordinates(file.Coordinates{RealPath: "/place", FileSystemID: "2"})), }, want: []License{ NewLicenseFromLocations( "MIT", - source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"}), - source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "2"}), + file.NewLocationFromCoordinates(file.Coordinates{RealPath: "/place", FileSystemID: "1"}), + file.NewLocationFromCoordinates(file.Coordinates{RealPath: "/place", FileSystemID: "2"}), ), }, }, @@ -74,14 +74,14 @@ func TestLicenseSet_Add(t *testing.T) { name: "same licenses with different locations", licenses: []License{ NewLicense("MIT"), - NewLicenseFromLocations("MIT", source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "2"})), - NewLicenseFromLocations("MIT", source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"})), + NewLicenseFromLocations("MIT", file.NewLocationFromCoordinates(file.Coordinates{RealPath: "/place", FileSystemID: "2"})), + NewLicenseFromLocations("MIT", file.NewLocationFromCoordinates(file.Coordinates{RealPath: "/place", FileSystemID: "1"})), }, want: []License{ NewLicenseFromLocations( "MIT", - source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"}), - source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "2"}), + file.NewLocationFromCoordinates(file.Coordinates{RealPath: "/place", FileSystemID: "1"}), + file.NewLocationFromCoordinates(file.Coordinates{RealPath: "/place", FileSystemID: "2"}), ), }, }, @@ -89,7 +89,7 @@ func TestLicenseSet_Add(t *testing.T) { name: "same license from different sources", licenses: []License{ NewLicense("MIT"), - NewLicenseFromLocations("MIT", source.NewLocation("/place")), + NewLicenseFromLocations("MIT", file.NewLocation("/place")), NewLicenseFromURLs("MIT", "https://example.com"), }, want: []License{ @@ -98,7 +98,7 @@ func TestLicenseSet_Add(t *testing.T) { SPDXExpression: "MIT", Type: license.Declared, URLs: internal.NewStringSet("https://example.com"), - Locations: source.NewLocationSet(source.NewLocation("/place")), + Locations: file.NewLocationSet(file.NewLocation("/place")), }, }, }, @@ -107,7 +107,7 @@ func TestLicenseSet_Add(t *testing.T) { licenses: []License{ NewLicenseFromType("MIT", license.Concluded), NewLicenseFromType("MIT", license.Declared), - NewLicenseFromLocations("MIT", source.NewLocation("/place")), + NewLicenseFromLocations("MIT", file.NewLocation("/place")), NewLicenseFromURLs("MIT", "https://example.com"), }, want: []License{ @@ -116,14 +116,14 @@ func TestLicenseSet_Add(t *testing.T) { SPDXExpression: "MIT", Type: license.Concluded, URLs: internal.NewStringSet(), - Locations: source.NewLocationSet(), + Locations: file.NewLocationSet(), }, { Value: "MIT", SPDXExpression: "MIT", Type: license.Declared, URLs: internal.NewStringSet("https://example.com"), - Locations: source.NewLocationSet(source.NewLocation("/place")), + Locations: file.NewLocationSet(file.NewLocation("/place")), }, }, }, diff --git a/syft/pkg/license_test.go b/syft/pkg/license_test.go index f3456f5aa21..4e9e16d943f 100644 --- a/syft/pkg/license_test.go +++ b/syft/pkg/license_test.go @@ -8,14 +8,14 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/artifact" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) func Test_Hash(t *testing.T) { - loc1 := source.NewLocation("place!") + loc1 := file.NewLocation("place!") loc1.FileSystemID = "fs1" - loc2 := source.NewLocation("place!") + loc2 := file.NewLocation("place!") loc2.FileSystemID = "fs2" // important! there is a different file system ID lic1 := NewLicenseFromLocations("MIT", loc1) @@ -47,44 +47,44 @@ func Test_Sort(t *testing.T) { { name: "single", licenses: []License{ - NewLicenseFromLocations("MIT", source.NewLocation("place!")), + NewLicenseFromLocations("MIT", file.NewLocation("place!")), }, expected: []License{ - NewLicenseFromLocations("MIT", source.NewLocation("place!")), + NewLicenseFromLocations("MIT", file.NewLocation("place!")), }, }, { name: "multiple", licenses: []License{ - NewLicenseFromLocations("MIT", source.NewLocation("place!")), + NewLicenseFromLocations("MIT", file.NewLocation("place!")), NewLicenseFromURLs("MIT", "https://github.com/anchore/syft/blob/main/LICENSE"), - NewLicenseFromLocations("Apache", source.NewLocation("area!")), - NewLicenseFromLocations("gpl2+", source.NewLocation("area!")), + NewLicenseFromLocations("Apache", file.NewLocation("area!")), + NewLicenseFromLocations("gpl2+", file.NewLocation("area!")), }, expected: Licenses{ - NewLicenseFromLocations("Apache", source.NewLocation("area!")), + NewLicenseFromLocations("Apache", file.NewLocation("area!")), NewLicenseFromURLs("MIT", "https://github.com/anchore/syft/blob/main/LICENSE"), - NewLicenseFromLocations("MIT", source.NewLocation("place!")), - NewLicenseFromLocations("gpl2+", source.NewLocation("area!")), + NewLicenseFromLocations("MIT", file.NewLocation("place!")), + NewLicenseFromLocations("gpl2+", file.NewLocation("area!")), }, }, { name: "multiple with location variants", licenses: []License{ - NewLicenseFromLocations("MIT", source.NewLocation("place!")), - NewLicenseFromLocations("MIT", source.NewLocation("park!")), + NewLicenseFromLocations("MIT", file.NewLocation("place!")), + NewLicenseFromLocations("MIT", file.NewLocation("park!")), NewLicense("MIT"), NewLicense("AAL"), NewLicense("Adobe-2006"), - NewLicenseFromLocations("Apache", source.NewLocation("area!")), + NewLicenseFromLocations("Apache", file.NewLocation("area!")), }, expected: Licenses{ NewLicense("AAL"), NewLicense("Adobe-2006"), - NewLicenseFromLocations("Apache", source.NewLocation("area!")), + NewLicenseFromLocations("Apache", file.NewLocation("area!")), NewLicense("MIT"), - NewLicenseFromLocations("MIT", source.NewLocation("park!")), - NewLicenseFromLocations("MIT", source.NewLocation("place!")), + NewLicenseFromLocations("MIT", file.NewLocation("park!")), + NewLicenseFromLocations("MIT", file.NewLocation("place!")), }, }, } diff --git a/syft/pkg/package.go b/syft/pkg/package.go index 6d028f20c68..c72e57d34ae 100644 --- a/syft/pkg/package.go +++ b/syft/pkg/package.go @@ -11,25 +11,24 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/cpe" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) // Package represents an application or library that has been bundled into a distributable format. // TODO: if we ignore FoundBy for ID generation should we merge the field to show it was found in two places? -// TODO: should cyclonedx tags exist on the struct? Why don't we use the model.Package type? type Package struct { - id artifact.ID `hash:"ignore"` - Name string // the package name - Version string // the version of the package - FoundBy string `hash:"ignore" cyclonedx:"foundBy"` // the specific cataloger that discovered this package - Locations source.LocationSet // the locations that lead to the discovery of this package (note: this is not necessarily the locations that make up this package) - Licenses LicenseSet // licenses discovered with the package metadata - Language Language `hash:"ignore" cyclonedx:"language"` // the language ecosystem this package belongs to (e.g. JavaScript, Python, etc) - Type Type `cyclonedx:"type"` // the package type (e.g. Npm, Yarn, Python, Rpm, Deb, etc) - CPEs []cpe.CPE `hash:"ignore"` // all possible Common Platform Enumerators (note: this is NOT included in the definition of the ID since all fields on a CPE are derived from other fields) - PURL string `hash:"ignore"` // the Package URL (see https://github.com/package-url/purl-spec) - MetadataType MetadataType `cyclonedx:"metadataType"` // the shape of the additional data in the "metadata" field - Metadata interface{} // additional data found while parsing the package source + id artifact.ID `hash:"ignore"` + Name string // the package name + Version string // the version of the package + FoundBy string `hash:"ignore" cyclonedx:"foundBy"` // the specific cataloger that discovered this package + Locations file.LocationSet // the locations that lead to the discovery of this package (note: this is not necessarily the locations that make up this package) + Licenses LicenseSet // licenses discovered with the package metadata + Language Language `hash:"ignore" cyclonedx:"language"` // the language ecosystem this package belongs to (e.g. JavaScript, Python, etc) + Type Type `cyclonedx:"type"` // the package type (e.g. Npm, Yarn, Python, Rpm, Deb, etc) + CPEs []cpe.CPE `hash:"ignore"` // all possible Common Platform Enumerators (note: this is NOT included in the definition of the ID since all fields on a CPE are derived from other fields) + PURL string `hash:"ignore"` // the Package URL (see https://github.com/package-url/purl-spec) + MetadataType MetadataType `cyclonedx:"metadataType"` // the shape of the additional data in the "metadata" field + Metadata interface{} // additional data found while parsing the package source } func (p *Package) OverrideID(id artifact.ID) { diff --git a/syft/pkg/package_test.go b/syft/pkg/package_test.go index 7c461a680b8..24b8d37fd50 100644 --- a/syft/pkg/package_test.go +++ b/syft/pkg/package_test.go @@ -8,12 +8,12 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/cpe" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) func TestIDUniqueness(t *testing.T) { - originalLocation := source.NewVirtualLocationFromCoordinates( - source.Coordinates{ + originalLocation := file.NewVirtualLocationFromCoordinates( + file.Coordinates{ RealPath: "39.0742° N, 21.8243° E", FileSystemID: "Earth", }, @@ -24,7 +24,7 @@ func TestIDUniqueness(t *testing.T) { Name: "pi", Version: "3.14", FoundBy: "Archimedes", - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( originalLocation, ), Licenses: NewLicenseSet( @@ -101,8 +101,8 @@ func TestIDUniqueness(t *testing.T) { { name: "location is reflected", transform: func(pkg Package) Package { - locations := source.NewLocationSet(pkg.Locations.ToSlice()...) - locations.Add(source.NewLocation("/somewhere/new")) + locations := file.NewLocationSet(pkg.Locations.ToSlice()...) + locations.Add(file.NewLocation("/somewhere/new")) pkg.Locations = locations return pkg }, @@ -122,7 +122,7 @@ func TestIDUniqueness(t *testing.T) { newLocation := originalLocation newLocation.FileSystemID = "Mars" - pkg.Locations = source.NewLocationSet(newLocation) + pkg.Locations = file.NewLocationSet(newLocation) return pkg }, expectedIDComparison: assert.Equal, @@ -133,7 +133,7 @@ func TestIDUniqueness(t *testing.T) { newLocation := originalLocation newLocation.FileSystemID = "Mars" - locations := source.NewLocationSet(pkg.Locations.ToSlice()...) + locations := file.NewLocationSet(pkg.Locations.ToSlice()...) locations.Add(newLocation, originalLocation) pkg.Locations = locations @@ -236,8 +236,8 @@ func TestIDUniqueness(t *testing.T) { } func TestPackage_Merge(t *testing.T) { - originalLocation := source.NewVirtualLocationFromCoordinates( - source.Coordinates{ + originalLocation := file.NewVirtualLocationFromCoordinates( + file.Coordinates{ RealPath: "39.0742° N, 21.8243° E", FileSystemID: "Earth", }, @@ -259,7 +259,7 @@ func TestPackage_Merge(t *testing.T) { Name: "pi", Version: "3.14", FoundBy: "Archimedes", - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( originalLocation, ), Language: "math", @@ -282,7 +282,7 @@ func TestPackage_Merge(t *testing.T) { Name: "pi", Version: "3.14", FoundBy: "Archimedes", - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( similarLocation, // NOTE: difference; we have a different layer but the same path ), Language: "math", @@ -305,7 +305,7 @@ func TestPackage_Merge(t *testing.T) { Name: "pi", Version: "3.14", FoundBy: "Archimedes", - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( originalLocation, similarLocation, // NOTE: merge! ), @@ -333,7 +333,7 @@ func TestPackage_Merge(t *testing.T) { Name: "pi", Version: "3.14", FoundBy: "Archimedes", - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( originalLocation, ), Language: "math", @@ -356,7 +356,7 @@ func TestPackage_Merge(t *testing.T) { Name: "pi-DIFFERENT", // difference Version: "3.14", FoundBy: "Archimedes", - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( originalLocation, ), Language: "math", @@ -395,7 +395,7 @@ func TestPackage_Merge(t *testing.T) { if diff := cmp.Diff(*tt.expected, tt.subject, cmp.AllowUnexported(Package{}), cmp.Comparer( - func(x, y source.LocationSet) bool { + func(x, y file.LocationSet) bool { xs := x.ToSlice() ys := y.ToSlice() @@ -442,7 +442,7 @@ func licenseComparer(x, y License) bool { return cmp.Equal(x, y, cmp.Comparer(locationComparer)) } -func locationComparer(x, y source.Location) bool { +func locationComparer(x, y file.Location) bool { return cmp.Equal(x.Coordinates, y.Coordinates) && cmp.Equal(x.VirtualPath, y.VirtualPath) } diff --git a/syft/pkg/relationships_by_file_ownership_test.go b/syft/pkg/relationships_by_file_ownership_test.go index fdef2897171..f34cb8be4cb 100644 --- a/syft/pkg/relationships_by_file_ownership_test.go +++ b/syft/pkg/relationships_by_file_ownership_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/syft/syft/artifact" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) func TestOwnershipByFilesRelationship(t *testing.T) { @@ -19,9 +19,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { name: "owns-by-real-path", setup: func(t testing.TB) ([]Package, []artifact.Relationship) { parent := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/a/path", "/another/path"), - source.NewVirtualLocation("/b/path", "/bee/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/a/path", "/another/path"), + file.NewVirtualLocation("/b/path", "/bee/path"), ), Type: RpmPkg, MetadataType: RpmMetadataType, @@ -36,9 +36,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { parent.SetID() child := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/c/path", "/another/path"), - source.NewVirtualLocation("/d/path", "/another/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/c/path", "/another/path"), + file.NewVirtualLocation("/d/path", "/another/path"), ), Type: NpmPkg, } @@ -62,9 +62,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { name: "owns-by-virtual-path", setup: func(t testing.TB) ([]Package, []artifact.Relationship) { parent := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/a/path", "/some/other/path"), - source.NewVirtualLocation("/b/path", "/bee/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/a/path", "/some/other/path"), + file.NewVirtualLocation("/b/path", "/bee/path"), ), Type: RpmPkg, MetadataType: RpmMetadataType, @@ -79,9 +79,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { parent.SetID() child := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/c/path", "/another/path"), - source.NewLocation("/d/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/c/path", "/another/path"), + file.NewLocation("/d/path"), ), Type: NpmPkg, } @@ -104,9 +104,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { name: "ignore-empty-path", setup: func(t testing.TB) ([]Package, []artifact.Relationship) { parent := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/a/path", "/some/other/path"), - source.NewVirtualLocation("/b/path", "/bee/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/a/path", "/some/other/path"), + file.NewVirtualLocation("/b/path", "/bee/path"), ), Type: RpmPkg, MetadataType: RpmMetadataType, @@ -122,9 +122,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { parent.SetID() child := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/c/path", "/another/path"), - source.NewLocation("/d/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/c/path", "/another/path"), + file.NewLocation("/d/path"), ), Type: NpmPkg, } diff --git a/syft/pkg/relationships_evident_by_test.go b/syft/pkg/relationships_evident_by_test.go index f0a99a6ba1a..21e7801bfd5 100644 --- a/syft/pkg/relationships_evident_by_test.go +++ b/syft/pkg/relationships_evident_by_test.go @@ -7,45 +7,45 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/artifact" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) func TestRelationshipsEvidentBy(t *testing.T) { c := NewCollection() - coordA := source.Coordinates{ + coordA := file.Coordinates{ RealPath: "/somewhere/real", FileSystemID: "abc", } - coordC := source.Coordinates{ + coordC := file.Coordinates{ RealPath: "/somewhere/real", FileSystemID: "abc", } - coordD := source.Coordinates{ + coordD := file.Coordinates{ RealPath: "/somewhere/real", FileSystemID: "abc", } pkgA := Package{ - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( // added! - source.NewLocationFromCoordinates(coordA).WithAnnotation(EvidenceAnnotationKey, PrimaryEvidenceAnnotation), + file.NewLocationFromCoordinates(coordA).WithAnnotation(EvidenceAnnotationKey, PrimaryEvidenceAnnotation), // ignored... - source.NewLocationFromCoordinates(coordC).WithAnnotation(EvidenceAnnotationKey, SupportingEvidenceAnnotation), - source.NewLocationFromCoordinates(coordD), + file.NewLocationFromCoordinates(coordC).WithAnnotation(EvidenceAnnotationKey, SupportingEvidenceAnnotation), + file.NewLocationFromCoordinates(coordD), ), } pkgA.SetID() c.Add(pkgA) - coordB := source.Coordinates{ + coordB := file.Coordinates{ RealPath: "/somewhere-else/real", FileSystemID: "def", } pkgB := Package{ - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( // added! - source.NewLocationFromCoordinates(coordB).WithAnnotation(EvidenceAnnotationKey, PrimaryEvidenceAnnotation), + file.NewLocationFromCoordinates(coordB).WithAnnotation(EvidenceAnnotationKey, PrimaryEvidenceAnnotation), ), } pkgB.SetID() diff --git a/syft/sbom/sbom.go b/syft/sbom/sbom.go index 7770027182b..0bc8feb0cfa 100644 --- a/syft/sbom/sbom.go +++ b/syft/sbom/sbom.go @@ -21,11 +21,11 @@ type SBOM struct { type Artifacts struct { Packages *pkg.Collection - FileMetadata map[source.Coordinates]source.FileMetadata - FileDigests map[source.Coordinates][]file.Digest - FileContents map[source.Coordinates]string - FileLicenses map[source.Coordinates][]file.License - Secrets map[source.Coordinates][]file.SearchResult + FileMetadata map[file.Coordinates]file.Metadata + FileDigests map[file.Coordinates][]file.Digest + FileContents map[file.Coordinates]string + FileLicenses map[file.Coordinates][]file.License + Secrets map[file.Coordinates][]file.SearchResult LinuxDistribution *linux.Release } @@ -49,8 +49,8 @@ func (s SBOM) RelationshipsSorted() []artifact.Relationship { return relationships } -func (s SBOM) AllCoordinates() []source.Coordinates { - set := source.NewCoordinateSet() +func (s SBOM) AllCoordinates() []file.Coordinates { + set := file.NewCoordinateSet() for coordinates := range s.Artifacts.FileMetadata { set.Add(coordinates) } @@ -89,8 +89,8 @@ func (s SBOM) RelationshipsForPackage(p pkg.Package, rt ...artifact.Relationship // CoordinatesForPackage returns all coordinates for the provided package for provided relationship types // If no types are provided, all relationship types are considered. -func (s SBOM) CoordinatesForPackage(p pkg.Package, rt ...artifact.RelationshipType) []source.Coordinates { - var coordinates []source.Coordinates +func (s SBOM) CoordinatesForPackage(p pkg.Package, rt ...artifact.RelationshipType) []file.Coordinates { + var coordinates []file.Coordinates for _, relationship := range s.RelationshipsForPackage(p, rt...) { cords := extractCoordinates(relationship) coordinates = append(coordinates, cords...) @@ -98,12 +98,12 @@ func (s SBOM) CoordinatesForPackage(p pkg.Package, rt ...artifact.RelationshipTy return coordinates } -func extractCoordinates(relationship artifact.Relationship) (results []source.Coordinates) { - if coordinates, exists := relationship.From.(source.Coordinates); exists { +func extractCoordinates(relationship artifact.Relationship) (results []file.Coordinates) { + if coordinates, exists := relationship.From.(file.Coordinates); exists { results = append(results, coordinates) } - if coordinates, exists := relationship.To.(source.Coordinates); exists { + if coordinates, exists := relationship.To.(file.Coordinates); exists { results = append(results, coordinates) } diff --git a/syft/source/deferred_resolver.go b/syft/source/deferred_resolver.go deleted file mode 100644 index 7ca9b90eab6..00000000000 --- a/syft/source/deferred_resolver.go +++ /dev/null @@ -1,108 +0,0 @@ -package source - -import ( - "io" - - "github.com/anchore/syft/internal/log" -) - -func NewDeferredResolverFromSource(creator func() (Source, error)) *DeferredResolver { - return NewDeferredResolver(func() (FileResolver, error) { - s, err := creator() - if err != nil { - return nil, err - } - - return s.FileResolver(SquashedScope) - }) -} - -func NewDeferredResolver(creator func() (FileResolver, error)) *DeferredResolver { - return &DeferredResolver{ - creator: creator, - } -} - -type DeferredResolver struct { - creator func() (FileResolver, error) - resolver FileResolver -} - -func (d *DeferredResolver) getResolver() (FileResolver, error) { - if d.resolver == nil { - resolver, err := d.creator() - if err != nil { - return nil, err - } - d.resolver = resolver - } - return d.resolver, nil -} - -func (d *DeferredResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { - r, err := d.getResolver() - if err != nil { - return nil, err - } - return r.FileContentsByLocation(location) -} - -func (d *DeferredResolver) HasPath(s string) bool { - r, err := d.getResolver() - if err != nil { - log.Debug("unable to get resolver: %v", err) - return false - } - return r.HasPath(s) -} - -func (d *DeferredResolver) FilesByPath(paths ...string) ([]Location, error) { - r, err := d.getResolver() - if err != nil { - return nil, err - } - return r.FilesByPath(paths...) -} - -func (d *DeferredResolver) FilesByGlob(patterns ...string) ([]Location, error) { - r, err := d.getResolver() - if err != nil { - return nil, err - } - return r.FilesByGlob(patterns...) -} - -func (d *DeferredResolver) FilesByMIMEType(types ...string) ([]Location, error) { - r, err := d.getResolver() - if err != nil { - return nil, err - } - return r.FilesByMIMEType(types...) -} - -func (d *DeferredResolver) RelativeFileByPath(location Location, path string) *Location { - r, err := d.getResolver() - if err != nil { - return nil - } - return r.RelativeFileByPath(location, path) -} - -func (d *DeferredResolver) AllLocations() <-chan Location { - r, err := d.getResolver() - if err != nil { - log.Debug("unable to get resolver: %v", err) - return nil - } - return r.AllLocations() -} - -func (d *DeferredResolver) FileMetadataByLocation(location Location) (FileMetadata, error) { - r, err := d.getResolver() - if err != nil { - return FileMetadata{}, err - } - return r.FileMetadataByLocation(location) -} - -var _ FileResolver = (*DeferredResolver)(nil) diff --git a/syft/source/deprecated.go b/syft/source/deprecated.go new file mode 100644 index 00000000000..4b7e35cf11a --- /dev/null +++ b/syft/source/deprecated.go @@ -0,0 +1,119 @@ +package source + +import ( + "io" + + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" + "github.com/anchore/stereoscope/pkg/image" + "github.com/anchore/syft/syft/file" +) + +// Deprecated: use file.Metadata instead +type FileMetadata = file.Metadata + +type ( + // Deprecated: use file.Coordinates instead + Coordinates = file.Coordinates + + // Deprecated: use file.CoordinateSet instead + CoordinateSet = file.CoordinateSet + + // Deprecated: use file.Resolver instead + FileResolver = file.Resolver + + // Deprecated: use file.ContentResolver instead + FileContentResolver = file.ContentResolver + + // Deprecated: use file.PathResolver instead + FilePathResolver = file.PathResolver + + // Deprecated: use file.LocationResolver instead + FileLocationResolver = file.LocationResolver + + // Deprecated: use file.MetadataResolver instead + FileMetadataResolver = file.MetadataResolver + + // Deprecated: use file.WritableResolver instead + WritableFileResolver = file.WritableResolver + + // Deprecated: use file.MockResolver instead + MockResolver = file.MockResolver + + // Deprecated: use file.Location instead + Location = file.Location + + // Deprecated: use file.LocationData instead + LocationData = file.LocationData + + // Deprecated: use file.LocationMetadata instead + LocationMetadata = file.LocationMetadata + + // Deprecated: use file.LocationSet instead + LocationSet = file.LocationSet + + // Deprecated: use file.Locations instead + Locations = file.Locations + + // Deprecated: use file.LocationReadCloser instead + LocationReadCloser = file.LocationReadCloser +) + +// Deprecated: use file.NewCoordinateSet instead +func NewCoordinateSet(coordinates ...file.Coordinates) file.CoordinateSet { + return file.NewCoordinateSet(coordinates...) +} + +// Deprecated: use file.NewLocationSet instead +func NewLocationSet(locations ...file.Location) file.LocationSet { + return file.NewLocationSet(locations...) +} + +// Deprecated: use file.NewLocation instead +func NewLocation(realPath string) file.Location { + return file.NewLocation(realPath) +} + +// Deprecated: use file.NewVirtualLocation instead +func NewVirtualLocation(realPath, virtualPath string) file.Location { + return file.NewVirtualLocation(realPath, virtualPath) +} + +// Deprecated: use file.NewLocationFromCoordinates instead +func NewLocationFromCoordinates(coordinates file.Coordinates) file.Location { + return file.NewLocationFromCoordinates(coordinates) +} + +// Deprecated: use file.NewVirtualLocationFromCoordinates instead +func NewVirtualLocationFromCoordinates(coordinates file.Coordinates, virtualPath string) file.Location { + return file.NewVirtualLocationFromCoordinates(coordinates, virtualPath) +} + +// Deprecated: use file.NewLocationFromImage instead +func NewLocationFromImage(virtualPath string, ref stereoscopeFile.Reference, img *image.Image) file.Location { + return file.NewLocationFromImage(virtualPath, ref, img) +} + +// Deprecated: use file.NewLocationFromDirectory instead +func NewLocationFromDirectory(responsePath string, ref stereoscopeFile.Reference) file.Location { + return file.NewLocationFromDirectory(responsePath, ref) +} + +// Deprecated: use file.NewVirtualLocationFromDirectory instead +func NewVirtualLocationFromDirectory(responsePath, virtualResponsePath string, ref stereoscopeFile.Reference) file.Location { + return file.NewVirtualLocationFromDirectory(responsePath, virtualResponsePath, ref) +} + +// Deprecated: use file.NewLocationReadCloser instead +func NewLocationReadCloser(location file.Location, reader io.ReadCloser) file.LocationReadCloser { + return file.NewLocationReadCloser(location, reader) +} + +// Deprecated: use file.NewMockResolverForPaths instead +func NewMockResolverForPaths(paths ...string) *file.MockResolver { + return file.NewMockResolverForPaths(paths...) +} + +// Deprecated: use file.NewMockResolverForPathsWithMetadata instead +func NewMockResolverForPathsWithMetadata(metadata map[file.Coordinates]file.Metadata) *file.MockResolver { + return file.NewMockResolverForPathsWithMetadata(metadata) +} diff --git a/syft/source/directory_resolver_test.go b/syft/source/directory_resolver_test.go deleted file mode 100644 index 0f38f3e017d..00000000000 --- a/syft/source/directory_resolver_test.go +++ /dev/null @@ -1,994 +0,0 @@ -//go:build !windows -// +build !windows - -package source - -import ( - "io" - "io/fs" - "os" - "path/filepath" - "sort" - "strings" - "testing" - "time" - - "github.com/google/go-cmp/cmp" - "github.com/scylladb/go-set/strset" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/anchore/stereoscope/pkg/file" -) - -func TestDirectoryResolver_FilesByPath_relativeRoot(t *testing.T) { - cases := []struct { - name string - relativeRoot string - input string - expected []string - }{ - { - name: "should find a file from an absolute input", - relativeRoot: "./test-fixtures/", - input: "/image-symlinks/file-1.txt", - expected: []string{ - "image-symlinks/file-1.txt", - }, - }, - { - name: "should find a file from a relative path", - relativeRoot: "./test-fixtures/", - input: "image-symlinks/file-1.txt", - expected: []string{ - "image-symlinks/file-1.txt", - }, - }, - { - name: "should find a file from a relative path (root above cwd)", - relativeRoot: "../", - input: "sbom/sbom.go", - expected: []string{ - "sbom/sbom.go", - }, - }, - } - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - resolver, err := newDirectoryResolver(c.relativeRoot, "") - assert.NoError(t, err) - - refs, err := resolver.FilesByPath(c.input) - require.NoError(t, err) - assert.Len(t, refs, len(c.expected)) - s := strset.New() - for _, actual := range refs { - s.Add(actual.RealPath) - } - assert.ElementsMatch(t, c.expected, s.List()) - }) - } -} - -func TestDirectoryResolver_FilesByPath_absoluteRoot(t *testing.T) { - cases := []struct { - name string - relativeRoot string - input string - expected []string - }{ - { - name: "should find a file from an absolute input", - relativeRoot: "./test-fixtures/", - input: "/image-symlinks/file-1.txt", - expected: []string{ - "image-symlinks/file-1.txt", - }, - }, - { - name: "should find a file from a relative path", - relativeRoot: "./test-fixtures/", - input: "image-symlinks/file-1.txt", - expected: []string{ - "image-symlinks/file-1.txt", - }, - }, - { - name: "should find a file from a relative path (root above cwd)", - relativeRoot: "../", - input: "sbom/sbom.go", - expected: []string{ - "sbom/sbom.go", - }, - }, - } - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - // note: this test is all about asserting correct functionality when the given analysis path - // is an absolute path - absRoot, err := filepath.Abs(c.relativeRoot) - require.NoError(t, err) - - resolver, err := newDirectoryResolver(absRoot, "") - assert.NoError(t, err) - - refs, err := resolver.FilesByPath(c.input) - require.NoError(t, err) - assert.Len(t, refs, len(c.expected)) - s := strset.New() - for _, actual := range refs { - s.Add(actual.RealPath) - } - assert.ElementsMatch(t, c.expected, s.List()) - }) - } -} - -func TestDirectoryResolver_FilesByPath(t *testing.T) { - cases := []struct { - name string - root string - input string - expected string - refCount int - forcePositiveHasPath bool - }{ - { - name: "finds a file (relative)", - root: "./test-fixtures/", - input: "image-symlinks/file-1.txt", - expected: "image-symlinks/file-1.txt", - refCount: 1, - }, - { - name: "finds a file with relative indirection", - root: "./test-fixtures/../test-fixtures", - input: "image-symlinks/file-1.txt", - expected: "image-symlinks/file-1.txt", - refCount: 1, - }, - { - name: "managed non-existing files (relative)", - root: "./test-fixtures/", - input: "test-fixtures/image-symlinks/bogus.txt", - refCount: 0, - }, - { - name: "finds a file (absolute)", - root: "./test-fixtures/", - input: "/image-symlinks/file-1.txt", - expected: "image-symlinks/file-1.txt", - refCount: 1, - }, - { - name: "directories ignored", - root: "./test-fixtures/", - input: "/image-symlinks", - refCount: 0, - forcePositiveHasPath: true, - }, - } - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - resolver, err := newDirectoryResolver(c.root, "") - assert.NoError(t, err) - - hasPath := resolver.HasPath(c.input) - if !c.forcePositiveHasPath { - if c.refCount != 0 && !hasPath { - t.Errorf("expected HasPath() to indicate existence, but did not") - } else if c.refCount == 0 && hasPath { - t.Errorf("expected HasPath() to NOT indicate existence, but does") - } - } else if !hasPath { - t.Errorf("expected HasPath() to indicate existence, but did not (force path)") - } - - refs, err := resolver.FilesByPath(c.input) - require.NoError(t, err) - assert.Len(t, refs, c.refCount) - for _, actual := range refs { - assert.Equal(t, c.expected, actual.RealPath) - } - }) - } -} - -func TestDirectoryResolver_MultipleFilesByPath(t *testing.T) { - cases := []struct { - name string - input []string - refCount int - }{ - { - name: "finds multiple files", - input: []string{"image-symlinks/file-1.txt", "image-symlinks/file-2.txt"}, - refCount: 2, - }, - { - name: "skips non-existing files", - input: []string{"image-symlinks/bogus.txt", "image-symlinks/file-1.txt"}, - refCount: 1, - }, - { - name: "does not return anything for non-existing directories", - input: []string{"non-existing/bogus.txt", "non-existing/file-1.txt"}, - refCount: 0, - }, - } - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures", "") - assert.NoError(t, err) - refs, err := resolver.FilesByPath(c.input...) - assert.NoError(t, err) - - if len(refs) != c.refCount { - t.Errorf("unexpected number of refs: %d != %d", len(refs), c.refCount) - } - }) - } -} - -func TestDirectoryResolver_FilesByGlobMultiple(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures", "") - assert.NoError(t, err) - refs, err := resolver.FilesByGlob("**/image-symlinks/file*") - assert.NoError(t, err) - - assert.Len(t, refs, 2) -} - -func TestDirectoryResolver_FilesByGlobRecursive(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/image-symlinks", "") - assert.NoError(t, err) - refs, err := resolver.FilesByGlob("**/*.txt") - assert.NoError(t, err) - assert.Len(t, refs, 6) -} - -func TestDirectoryResolver_FilesByGlobSingle(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures", "") - assert.NoError(t, err) - refs, err := resolver.FilesByGlob("**/image-symlinks/*1.txt") - assert.NoError(t, err) - - assert.Len(t, refs, 1) - assert.Equal(t, "image-symlinks/file-1.txt", refs[0].RealPath) -} - -func TestDirectoryResolver_FilesByPath_ResolvesSymlinks(t *testing.T) { - - tests := []struct { - name string - fixture string - }{ - { - name: "one degree", - fixture: "link_to_new_readme", - }, - { - name: "two degrees", - fixture: "link_to_link_to_new_readme", - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-simple", "") - assert.NoError(t, err) - - refs, err := resolver.FilesByPath(test.fixture) - require.NoError(t, err) - assert.Len(t, refs, 1) - - reader, err := resolver.FileContentsByLocation(refs[0]) - require.NoError(t, err) - - actual, err := io.ReadAll(reader) - require.NoError(t, err) - - expected, err := os.ReadFile("test-fixtures/symlinks-simple/readme") - require.NoError(t, err) - - assert.Equal(t, string(expected), string(actual)) - }) - } -} - -func TestDirectoryResolverDoesNotIgnoreRelativeSystemPaths(t *testing.T) { - // let's make certain that "dev/place" is not ignored, since it is not "/dev/place" - resolver, err := newDirectoryResolver("test-fixtures/system_paths/target", "") - assert.NoError(t, err) - - // all paths should be found (non filtering matches a path) - locations, err := resolver.FilesByGlob("**/place") - assert.NoError(t, err) - // 4: within target/ - // 1: target/link --> relative path to "place" // NOTE: this is filtered out since it not unique relative to outside_root/link_target/place - // 1: outside_root/link_target/place - assert.Len(t, locations, 5) - - // ensure that symlink indexing outside of root worked - testLocation := "test-fixtures/system_paths/outside_root/link_target/place" - ok := false - for _, location := range locations { - if strings.HasSuffix(location.RealPath, testLocation) { - ok = true - } - } - - if !ok { - t.Fatalf("could not find test location=%q", testLocation) - } -} - -var _ fs.FileInfo = (*testFileInfo)(nil) - -type testFileInfo struct { - mode os.FileMode -} - -func (t testFileInfo) Name() string { - panic("implement me") -} - -func (t testFileInfo) Size() int64 { - panic("implement me") -} - -func (t testFileInfo) Mode() fs.FileMode { - return t.mode -} - -func (t testFileInfo) ModTime() time.Time { - panic("implement me") -} - -func (t testFileInfo) IsDir() bool { - panic("implement me") -} - -func (t testFileInfo) Sys() interface{} { - panic("implement me") -} - -func Test_isUnallowableFileType(t *testing.T) { - tests := []struct { - name string - info os.FileInfo - expected error - }{ - { - name: "regular file", - info: testFileInfo{ - mode: 0, - }, - }, - { - name: "dir", - info: testFileInfo{ - mode: os.ModeDir, - }, - }, - { - name: "symlink", - info: testFileInfo{ - mode: os.ModeSymlink, - }, - }, - { - name: "socket", - info: testFileInfo{ - mode: os.ModeSocket, - }, - expected: errSkipPath, - }, - { - name: "named pipe", - info: testFileInfo{ - mode: os.ModeNamedPipe, - }, - expected: errSkipPath, - }, - { - name: "char device", - info: testFileInfo{ - mode: os.ModeCharDevice, - }, - expected: errSkipPath, - }, - { - name: "block device", - info: testFileInfo{ - mode: os.ModeDevice, - }, - expected: errSkipPath, - }, - { - name: "irregular", - info: testFileInfo{ - mode: os.ModeIrregular, - }, - expected: errSkipPath, - }, - } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - assert.Equal(t, test.expected, disallowByFileType("dont/care", test.info, nil)) - }) - } -} - -func Test_directoryResolver_FilesByMIMEType(t *testing.T) { - tests := []struct { - fixturePath string - mimeType string - expectedPaths *strset.Set - }{ - { - fixturePath: "./test-fixtures/image-simple", - mimeType: "text/plain", - expectedPaths: strset.New("file-1.txt", "file-2.txt", "target/really/nested/file-3.txt", "Dockerfile"), - }, - } - for _, test := range tests { - t.Run(test.fixturePath, func(t *testing.T) { - resolver, err := newDirectoryResolver(test.fixturePath, "") - assert.NoError(t, err) - locations, err := resolver.FilesByMIMEType(test.mimeType) - assert.NoError(t, err) - assert.Equal(t, test.expectedPaths.Size(), len(locations)) - for _, l := range locations { - assert.True(t, test.expectedPaths.Has(l.RealPath), "does not have path %q", l.RealPath) - } - }) - } -} - -func Test_IndexingNestedSymLinks(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-simple", "") - require.NoError(t, err) - - // check that we can get the real path - locations, err := resolver.FilesByPath("./readme") - require.NoError(t, err) - assert.Len(t, locations, 1) - - // check that we can access the same file via 1 symlink - locations, err = resolver.FilesByPath("./link_to_new_readme") - require.NoError(t, err) - require.Len(t, locations, 1) - assert.Equal(t, "readme", locations[0].RealPath) - assert.Equal(t, "link_to_new_readme", locations[0].VirtualPath) - - // check that we can access the same file via 2 symlinks - locations, err = resolver.FilesByPath("./link_to_link_to_new_readme") - require.NoError(t, err) - require.Len(t, locations, 1) - assert.Equal(t, "readme", locations[0].RealPath) - assert.Equal(t, "link_to_link_to_new_readme", locations[0].VirtualPath) - - // check that we can access the same file via 2 symlinks - locations, err = resolver.FilesByGlob("**/link_*") - require.NoError(t, err) - require.Len(t, locations, 1) // you would think this is 2, however, they point to the same file, and glob only returns unique files - - // returned locations can be in any order - expectedVirtualPaths := []string{ - "link_to_link_to_new_readme", - //"link_to_new_readme", // we filter out this one because the first symlink resolves to the same file - } - - expectedRealPaths := []string{ - "readme", - } - - actualRealPaths := strset.New() - actualVirtualPaths := strset.New() - for _, a := range locations { - actualVirtualPaths.Add(a.VirtualPath) - actualRealPaths.Add(a.RealPath) - } - - assert.ElementsMatch(t, expectedVirtualPaths, actualVirtualPaths.List()) - assert.ElementsMatch(t, expectedRealPaths, actualRealPaths.List()) -} - -func Test_IndexingNestedSymLinks_ignoredIndexes(t *testing.T) { - filterFn := func(path string, _ os.FileInfo, _ error) error { - if strings.HasSuffix(path, string(filepath.Separator)+"readme") { - return errSkipPath - } - return nil - } - - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-simple", "", filterFn) - require.NoError(t, err) - - // the path to the real file is PRUNED from the index, so we should NOT expect a location returned - locations, err := resolver.FilesByPath("./readme") - require.NoError(t, err) - assert.Empty(t, locations) - - // check that we cannot access the file even via symlink - locations, err = resolver.FilesByPath("./link_to_new_readme") - require.NoError(t, err) - assert.Empty(t, locations) - - // check that we still cannot access the same file via 2 symlinks - locations, err = resolver.FilesByPath("./link_to_link_to_new_readme") - require.NoError(t, err) - assert.Empty(t, locations) -} - -func Test_IndexingNestedSymLinksOutsideOfRoot(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-multiple-roots/root", "") - require.NoError(t, err) - - // check that we can get the real path - locations, err := resolver.FilesByPath("./readme") - require.NoError(t, err) - assert.Len(t, locations, 1) - - // check that we can access the same file via 2 symlinks (link_to_link_to_readme -> link_to_readme -> readme) - locations, err = resolver.FilesByPath("./link_to_link_to_readme") - require.NoError(t, err) - assert.Len(t, locations, 1) - - // something looks wrong here - t.Failed() -} - -func Test_RootViaSymlink(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinked-root/nested/link-root", "") - require.NoError(t, err) - - locations, err := resolver.FilesByPath("./file1.txt") - require.NoError(t, err) - assert.Len(t, locations, 1) - - locations, err = resolver.FilesByPath("./nested/file2.txt") - require.NoError(t, err) - assert.Len(t, locations, 1) - - locations, err = resolver.FilesByPath("./nested/linked-file1.txt") - require.NoError(t, err) - assert.Len(t, locations, 1) -} - -func Test_directoryResolver_FileContentsByLocation(t *testing.T) { - cwd, err := os.Getwd() - require.NoError(t, err) - - r, err := newDirectoryResolver(".", "") - require.NoError(t, err) - - exists, existingPath, err := r.tree.File(file.Path(filepath.Join(cwd, "test-fixtures/image-simple/file-1.txt"))) - require.True(t, exists) - require.NoError(t, err) - require.True(t, existingPath.HasReference()) - - tests := []struct { - name string - location Location - expects string - err bool - }{ - { - name: "use file reference for content requests", - location: NewLocationFromDirectory("some/place", *existingPath.Reference), - expects: "this file has contents", - }, - { - name: "error on empty file reference", - location: NewLocationFromDirectory("doesn't matter", file.Reference{}), - err: true, - }, - } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - - actual, err := r.FileContentsByLocation(test.location) - if test.err { - require.Error(t, err) - return - } - - require.NoError(t, err) - if test.expects != "" { - b, err := io.ReadAll(actual) - require.NoError(t, err) - assert.Equal(t, test.expects, string(b)) - } - }) - } -} - -func Test_isUnixSystemRuntimePath(t *testing.T) { - tests := []struct { - path string - expected error - }{ - { - path: "proc/place", - }, - { - path: "/proc/place", - expected: fs.SkipDir, - }, - { - path: "/proc", - expected: fs.SkipDir, - }, - { - path: "/pro/c", - }, - { - path: "/pro", - }, - { - path: "/dev", - expected: fs.SkipDir, - }, - { - path: "/sys", - expected: fs.SkipDir, - }, - { - path: "/something/sys", - }, - } - for _, test := range tests { - t.Run(test.path, func(t *testing.T) { - assert.Equal(t, test.expected, disallowUnixSystemRuntimePath(test.path, nil, nil)) - }) - } -} - -func Test_SymlinkLoopWithGlobsShouldResolve(t *testing.T) { - test := func(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-loop", "") - require.NoError(t, err) - - locations, err := resolver.FilesByGlob("**/file.target") - require.NoError(t, err) - - require.Len(t, locations, 1) - assert.Equal(t, "devices/loop0/file.target", locations[0].RealPath) - } - - testWithTimeout(t, 5*time.Second, test) -} - -func testWithTimeout(t *testing.T, timeout time.Duration, test func(*testing.T)) { - done := make(chan bool) - go func() { - test(t) - done <- true - }() - - select { - case <-time.After(timeout): - t.Fatal("test timed out") - case <-done: - } -} - -func TestDirectoryResolver_FilesByPath_baseRoot(t *testing.T) { - cases := []struct { - name string - root string - input string - expected []string - }{ - { - name: "should find the base file", - root: "./test-fixtures/symlinks-base/", - input: "./base", - expected: []string{ - "/base", - }, - }, - { - name: "should follow a link with a pivoted root", - root: "./test-fixtures/symlinks-base/", - input: "./foo", - expected: []string{ - "/base", - }, - }, - { - name: "should follow a relative link with extra parents", - root: "./test-fixtures/symlinks-base/", - input: "./bar", - expected: []string{ - "/base", - }, - }, - { - name: "should follow an absolute link with extra parents", - root: "./test-fixtures/symlinks-base/", - input: "./baz", - expected: []string{ - "/base", - }, - }, - { - name: "should follow an absolute link with extra parents", - root: "./test-fixtures/symlinks-base/", - input: "./sub/link", - expected: []string{ - "/sub/item", - }, - }, - { - name: "should follow chained pivoted link", - root: "./test-fixtures/symlinks-base/", - input: "./chain", - expected: []string{ - "/base", - }, - }, - } - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - resolver, err := newDirectoryResolver(c.root, c.root) - assert.NoError(t, err) - - refs, err := resolver.FilesByPath(c.input) - require.NoError(t, err) - assert.Len(t, refs, len(c.expected)) - s := strset.New() - for _, actual := range refs { - s.Add(actual.RealPath) - } - assert.ElementsMatch(t, c.expected, s.List()) - }) - } - -} - -func Test_directoryResolver_resolvesLinks(t *testing.T) { - tests := []struct { - name string - runner func(FileResolver) []Location - expected []Location - }{ - { - name: "by mimetype", - runner: func(resolver FileResolver) []Location { - // links should not show up when searching mimetype - actualLocations, err := resolver.FilesByMIMEType("text/plain") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt" - NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt" - NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt" - NewLocation("parent/file-4.txt"), // note: missing virtual path "file-4.txt" - }, - }, - { - name: "by glob to links", - runner: func(resolver FileResolver) []Location { - // links are searched, but resolve to the real files - // for that reason we need to place **/ in front (which is not the same for other resolvers) - actualLocations, err := resolver.FilesByGlob("**/*ink-*") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - NewVirtualLocation("file-1.txt", "link-1"), - NewVirtualLocation("file-2.txt", "link-2"), - // we already have this real file path via another link, so only one is returned - //NewVirtualLocation("file-2.txt", "link-indirect"), - NewVirtualLocation("file-3.txt", "link-within"), - }, - }, - { - name: "by basename", - runner: func(resolver FileResolver) []Location { - // links are searched, but resolve to the real files - actualLocations, err := resolver.FilesByGlob("**/file-2.txt") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - // this has two copies in the base image, which overwrites the same location - NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt", - }, - }, - { - name: "by basename glob", - runner: func(resolver FileResolver) []Location { - // links are searched, but resolve to the real files - actualLocations, err := resolver.FilesByGlob("**/file-?.txt") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt" - NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt" - NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt" - NewLocation("parent/file-4.txt"), // note: missing virtual path "parent/file-4.txt" - }, - }, - { - name: "by basename glob to links", - runner: func(resolver FileResolver) []Location { - actualLocations, err := resolver.FilesByGlob("**/link-*") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - { - LocationData: LocationData{ - Coordinates: Coordinates{ - RealPath: "file-1.txt", - }, - VirtualPath: "link-1", - ref: file.Reference{RealPath: "file-1.txt"}, - }, - }, - { - LocationData: LocationData{ - Coordinates: Coordinates{ - RealPath: "file-2.txt", - }, - VirtualPath: "link-2", - ref: file.Reference{RealPath: "file-2.txt"}, - }, - }, - // we already have this real file path via another link, so only one is returned - //{ - // LocationData: LocationData{ - // Coordinates: Coordinates{ - // RealPath: "file-2.txt", - // }, - // VirtualPath: "link-indirect", - // ref: file.Reference{RealPath: "file-2.txt"}, - // }, - //}, - { - LocationData: LocationData{ - Coordinates: Coordinates{ - RealPath: "file-3.txt", - }, - VirtualPath: "link-within", - ref: file.Reference{RealPath: "file-3.txt"}, - }, - }, - }, - }, - { - name: "by extension", - runner: func(resolver FileResolver) []Location { - // links are searched, but resolve to the real files - actualLocations, err := resolver.FilesByGlob("**/*.txt") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt" - NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt" - NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt" - NewLocation("parent/file-4.txt"), // note: missing virtual path "parent/file-4.txt" - }, - }, - { - name: "by path to degree 1 link", - runner: func(resolver FileResolver) []Location { - // links resolve to the final file - actualLocations, err := resolver.FilesByPath("/link-2") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - // we have multiple copies across layers - NewVirtualLocation("file-2.txt", "link-2"), - }, - }, - { - name: "by path to degree 2 link", - runner: func(resolver FileResolver) []Location { - // multiple links resolves to the final file - actualLocations, err := resolver.FilesByPath("/link-indirect") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - // we have multiple copies across layers - NewVirtualLocation("file-2.txt", "link-indirect"), - }, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-from-image-symlinks-fixture", "") - require.NoError(t, err) - assert.NoError(t, err) - - actual := test.runner(resolver) - - compareLocations(t, test.expected, actual) - }) - } -} - -func TestDirectoryResolver_DoNotAddVirtualPathsToTree(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-prune-indexing", "") - require.NoError(t, err) - - var allRealPaths []file.Path - for l := range resolver.AllLocations() { - allRealPaths = append(allRealPaths, file.Path(l.RealPath)) - } - pathSet := file.NewPathSet(allRealPaths...) - - assert.False(t, - pathSet.Contains("before-path/file.txt"), - "symlink destinations should only be indexed at their real path, not through their virtual (symlinked) path", - ) - - assert.False(t, - pathSet.Contains("a-path/file.txt"), - "symlink destinations should only be indexed at their real path, not through their virtual (symlinked) path", - ) - -} - -func TestDirectoryResolver_FilesContents_errorOnDirRequest(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/system_paths", "") - assert.NoError(t, err) - - var dirLoc *Location - for loc := range resolver.AllLocations() { - entry, err := resolver.index.Get(loc.ref) - require.NoError(t, err) - if entry.Metadata.IsDir { - dirLoc = &loc - break - } - } - - require.NotNil(t, dirLoc) - - reader, err := resolver.FileContentsByLocation(*dirLoc) - require.Error(t, err) - require.Nil(t, reader) -} - -func TestDirectoryResolver_AllLocations(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-from-image-symlinks-fixture", "") - assert.NoError(t, err) - - paths := strset.New() - for loc := range resolver.AllLocations() { - if strings.HasPrefix(loc.RealPath, "/") { - // ignore outside of the fixture root for now - continue - } - paths.Add(loc.RealPath) - } - expected := []string{ - "file-1.txt", - "file-2.txt", - "file-3.txt", - "link-1", - "link-2", - "link-dead", - "link-indirect", - "link-within", - "parent", - "parent-link", - "parent/file-4.txt", - } - - pathsList := paths.List() - sort.Strings(pathsList) - - assert.ElementsMatchf(t, expected, pathsList, "expected all paths to be indexed, but found different paths: \n%s", cmp.Diff(expected, paths.List())) -} diff --git a/syft/source/empty_resolver.go b/syft/source/empty_resolver.go deleted file mode 100644 index 72c9331dd9d..00000000000 --- a/syft/source/empty_resolver.go +++ /dev/null @@ -1,45 +0,0 @@ -package source - -import ( - "io" -) - -type EmptyResolver struct{} - -func (e EmptyResolver) FileContentsByLocation(_ Location) (io.ReadCloser, error) { - return nil, nil -} - -func (e EmptyResolver) HasPath(_ string) bool { - return false -} - -func (e EmptyResolver) FilesByPath(_ ...string) ([]Location, error) { - return nil, nil -} - -func (e EmptyResolver) FilesByGlob(_ ...string) ([]Location, error) { - return nil, nil -} - -func (e EmptyResolver) FilesByMIMEType(_ ...string) ([]Location, error) { - return nil, nil -} - -func (e EmptyResolver) RelativeFileByPath(_ Location, _ string) *Location { - return nil -} - -func (e EmptyResolver) AllLocations() <-chan Location { - return nil -} - -func (e EmptyResolver) FileMetadataByLocation(_ Location) (FileMetadata, error) { - return FileMetadata{}, nil -} - -func (e EmptyResolver) Write(_ Location, _ io.Reader) error { - return nil -} - -var _ WritableFileResolver = (*EmptyResolver)(nil) diff --git a/syft/source/file_details.go b/syft/source/file_details.go deleted file mode 100644 index f034057ba5f..00000000000 --- a/syft/source/file_details.go +++ /dev/null @@ -1,21 +0,0 @@ -//go:build linux || darwin || netbsd -// +build linux darwin netbsd - -package source - -import ( - "os" - "syscall" -) - -// GetXid is the UID GID system info for unix -func GetXid(info os.FileInfo) (uid, gid int) { - uid = -1 - gid = -1 - if stat, ok := info.Sys().(*syscall.Stat_t); ok { - uid = int(stat.Uid) - gid = int(stat.Gid) - } - - return uid, gid -} diff --git a/syft/source/file_details_win.go b/syft/source/file_details_win.go deleted file mode 100644 index 31fd05063e7..00000000000 --- a/syft/source/file_details_win.go +++ /dev/null @@ -1,13 +0,0 @@ -//go:build windows -// +build windows - -package source - -import ( - "os" -) - -// GetXid is a placeholder for windows file information -func GetXid(info os.FileInfo) (uid, gid int) { - return -1, -1 -} diff --git a/syft/source/file_metadata.go b/syft/source/file_metadata.go deleted file mode 100644 index 0763564d0fb..00000000000 --- a/syft/source/file_metadata.go +++ /dev/null @@ -1,17 +0,0 @@ -package source - -import ( - "github.com/anchore/stereoscope/pkg/file" - "github.com/anchore/stereoscope/pkg/image" -) - -type FileMetadata = file.Metadata - -func fileMetadataByLocation(img *image.Image, location Location) (file.Metadata, error) { - entry, err := img.FileCatalog.Get(location.ref) - if err != nil { - return FileMetadata{}, err - } - - return entry.Metadata, nil -} diff --git a/syft/source/metadata.go b/syft/source/metadata.go index 1d29973b4ec..ecbad4f1dd8 100644 --- a/syft/source/metadata.go +++ b/syft/source/metadata.go @@ -8,4 +8,5 @@ type Metadata struct { Path string // the root path to be cataloged (directory only) Base string // the base path to be cataloged (directory only) Name string + Version string } diff --git a/syft/source/source.go b/syft/source/source.go index 8433ed31613..4ff747ae297 100644 --- a/syft/source/source.go +++ b/syft/source/source.go @@ -22,6 +22,8 @@ import ( "github.com/anchore/stereoscope/pkg/image" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/internal/fileresolver" ) // Source is an object that captures the data source to be cataloged, configuration, and a specific resolver used @@ -30,7 +32,7 @@ type Source struct { id artifact.ID `hash:"ignore"` Image *image.Image `hash:"ignore"` // the image object to be cataloged (image only) Metadata Metadata - directoryResolver *directoryResolver `hash:"ignore"` + directoryResolver *fileresolver.Directory `hash:"ignore"` path string base string mutex *sync.Mutex @@ -46,6 +48,7 @@ type Input struct { Location string Platform string Name string + Version string } // ParseInput generates a source Input that can be used as an argument to generate a new source @@ -57,6 +60,12 @@ func ParseInput(userInput string, platform string) (*Input, error) { // ParseInputWithName generates a source Input that can be used as an argument to generate a new source // from specific providers including a registry, with an explicit name. func ParseInputWithName(userInput string, platform, name, defaultImageSource string) (*Input, error) { + return ParseInputWithNameVersion(userInput, platform, name, "", defaultImageSource) +} + +// ParseInputWithNameVersion generates a source Input that can be used as an argument to generate a new source +// from specific providers including a registry, with an explicit name and version. +func ParseInputWithNameVersion(userInput, platform, name, version, defaultImageSource string) (*Input, error) { fs := afero.NewOsFs() scheme, source, location, err := DetectScheme(fs, image.DetectSource, userInput) if err != nil { @@ -95,6 +104,7 @@ func ParseInputWithName(userInput string, platform, name, defaultImageSource str Location: location, Platform: platform, Name: name, + Version: version, }, nil } @@ -152,7 +162,7 @@ func generateImageSource(in Input, registryOptions *image.RegistryOptions) (*Sou return nil, cleanup, fmt.Errorf("could not fetch image %q: %w", in.Location, err) } - s, err := NewFromImageWithName(img, in.Location, in.Name) + s, err := NewFromImageWithNameVersion(img, in.Location, in.Name, in.Version) if err != nil { return nil, cleanup, fmt.Errorf("could not populate source with image: %w", err) } @@ -216,13 +226,27 @@ func getImageWithRetryStrategy(in Input, registryOptions *image.RegistryOptions) // We need to determine the image source again, such that this determination // doesn't take scheme parsing into account. in.ImageSource = image.DetermineDefaultImagePullSource(in.UserInput) - img, err = stereoscope.GetImageFromSource(ctx, in.UserInput, in.ImageSource, opts...) + img, userInputErr := stereoscope.GetImageFromSource(ctx, in.UserInput, in.ImageSource, opts...) cleanup = func() { if err := img.Cleanup(); err != nil { log.Warnf("unable to cleanup image=%q: %w", in.UserInput, err) } } - return img, cleanup, err + if userInputErr != nil { + // Image retrieval failed on both tries, we will want to return both errors. + return nil, nil, fmt.Errorf( + "scheme %q specified; "+ + "image retrieval using scheme parsing (%s) was unsuccessful: %v; "+ + "image retrieval without scheme parsing (%s) was unsuccessful: %v", + scheme, + in.Location, + err, + in.UserInput, + userInputErr, + ) + } + + return img, cleanup, nil } func generateDirectorySource(fs afero.Fs, in Input) (*Source, func(), error) { @@ -235,7 +259,7 @@ func generateDirectorySource(fs afero.Fs, in Input) (*Source, func(), error) { return nil, func() {}, fmt.Errorf("given path is not a directory (path=%q): %w", in.Location, err) } - s, err := NewFromDirectoryWithName(in.Location, in.Name) + s, err := NewFromDirectoryWithNameVersion(in.Location, in.Name, in.Version) if err != nil { return nil, func() {}, fmt.Errorf("could not populate source from path=%q: %w", in.Location, err) } @@ -253,7 +277,7 @@ func generateFileSource(fs afero.Fs, in Input) (*Source, func(), error) { return nil, func() {}, fmt.Errorf("given path is not a directory (path=%q): %w", in.Location, err) } - s, cleanupFn := NewFromFileWithName(in.Location, in.Name) + s, cleanupFn := NewFromFileWithNameVersion(in.Location, in.Name, in.Version) return &s, cleanupFn, nil } @@ -263,19 +287,20 @@ func NewFromDirectory(path string) (Source, error) { return NewFromDirectoryWithName(path, "") } -// NewFromDirectory creates a new source object tailored to catalog a given filesystem directory recursively. -func NewFromDirectoryRoot(path string) (Source, error) { - return NewFromDirectoryRootWithName(path, "") -} - // NewFromDirectoryWithName creates a new source object tailored to catalog a given filesystem directory recursively, with an explicitly provided name. func NewFromDirectoryWithName(path string, name string) (Source, error) { + return NewFromDirectoryWithNameVersion(path, name, "") +} + +// NewFromDirectoryWithNameVersion creates a new source object tailored to catalog a given filesystem directory recursively, with an explicitly provided name. +func NewFromDirectoryWithNameVersion(path string, name string, version string) (Source, error) { s := Source{ mutex: &sync.Mutex{}, Metadata: Metadata{ - Name: name, - Scheme: DirectoryScheme, - Path: path, + Name: name, + Version: version, + Scheme: DirectoryScheme, + Path: path, }, path: path, } @@ -283,15 +308,26 @@ func NewFromDirectoryWithName(path string, name string) (Source, error) { return s, nil } +// NewFromDirectoryRoot creates a new source object tailored to catalog a given filesystem directory recursively. +func NewFromDirectoryRoot(path string) (Source, error) { + return NewFromDirectoryRootWithName(path, "") +} + // NewFromDirectoryRootWithName creates a new source object tailored to catalog a given filesystem directory recursively, with an explicitly provided name. func NewFromDirectoryRootWithName(path string, name string) (Source, error) { + return NewFromDirectoryRootWithNameVersion(path, name, "") +} + +// NewFromDirectoryRootWithNameVersion creates a new source object tailored to catalog a given filesystem directory recursively, with an explicitly provided name. +func NewFromDirectoryRootWithNameVersion(path string, name string, version string) (Source, error) { s := Source{ mutex: &sync.Mutex{}, Metadata: Metadata{ - Name: name, - Scheme: DirectoryScheme, - Path: path, - Base: path, + Name: name, + Version: version, + Scheme: DirectoryScheme, + Path: path, + Base: path, }, path: path, base: path, @@ -307,14 +343,20 @@ func NewFromFile(path string) (Source, func()) { // NewFromFileWithName creates a new source object tailored to catalog a file, with an explicitly provided name. func NewFromFileWithName(path string, name string) (Source, func()) { + return NewFromFileWithNameVersion(path, name, "") +} + +// NewFromFileWithNameVersion creates a new source object tailored to catalog a file, with an explicitly provided name and version. +func NewFromFileWithNameVersion(path string, name string, version string) (Source, func()) { analysisPath, cleanupFn := fileAnalysisPath(path) s := Source{ mutex: &sync.Mutex{}, Metadata: Metadata{ - Name: name, - Scheme: FileScheme, - Path: path, + Name: name, + Version: version, + Scheme: FileScheme, + Path: path, }, path: analysisPath, } @@ -364,6 +406,12 @@ func NewFromImage(img *image.Image, userImageStr string) (Source, error) { // NewFromImageWithName creates a new source object tailored to catalog a given container image, relative to the // option given (e.g. all-layers, squashed, etc), with an explicit name. func NewFromImageWithName(img *image.Image, userImageStr string, name string) (Source, error) { + return NewFromImageWithNameVersion(img, userImageStr, name, "") +} + +// NewFromImageWithNameVersion creates a new source object tailored to catalog a given container image, relative to the +// option given (e.g. all-layers, squashed, etc), with an explicit name and version. +func NewFromImageWithNameVersion(img *image.Image, userImageStr string, name string, version string) (Source, error) { if img == nil { return Source{}, fmt.Errorf("no image given") } @@ -372,6 +420,7 @@ func NewFromImageWithName(img *image.Image, userImageStr string, name string) (S Image: img, Metadata: Metadata{ Name: name, + Version: version, Scheme: ImageScheme, ImageMetadata: NewImageMetadata(img, userImageStr), }, @@ -452,7 +501,7 @@ func chain(chainID string, layers []LayerMetadata) string { return chain(chainID, layers[1:]) } -func (s *Source) FileResolver(scope Scope) (FileResolver, error) { +func (s *Source) FileResolver(scope Scope) (file.Resolver, error) { switch s.Metadata.Scheme { case DirectoryScheme, FileScheme: s.mutex.Lock() @@ -462,21 +511,21 @@ func (s *Source) FileResolver(scope Scope) (FileResolver, error) { if err != nil { return nil, err } - resolver, err := newDirectoryResolver(s.path, s.base, exclusionFunctions...) + res, err := fileresolver.NewFromDirectory(s.path, s.base, exclusionFunctions...) if err != nil { return nil, fmt.Errorf("unable to create directory resolver: %w", err) } - s.directoryResolver = resolver + s.directoryResolver = res } return s.directoryResolver, nil case ImageScheme: - var resolver FileResolver + var res file.Resolver var err error switch scope { case SquashedScope: - resolver, err = newImageSquashResolver(s.Image) + res, err = fileresolver.NewFromContainerImageSquash(s.Image) case AllLayersScope: - resolver, err = newAllLayersResolver(s.Image) + res, err = fileresolver.NewFromContainerImageAllLayers(s.Image) default: return nil, fmt.Errorf("bad image scope provided: %+v", scope) } @@ -485,9 +534,9 @@ func (s *Source) FileResolver(scope Scope) (FileResolver, error) { } // image tree contains all paths, so we filter out the excluded entries afterwards if len(s.Exclusions) > 0 { - resolver = NewExcludingResolver(resolver, getImageExclusionFunction(s.Exclusions)) + res = fileresolver.NewExcluding(res, getImageExclusionFunction(s.Exclusions)) } - return resolver, nil + return res, nil } return nil, fmt.Errorf("unable to determine FilePathResolver with current scheme=%q", s.Metadata.Scheme) } @@ -529,12 +578,12 @@ func getImageExclusionFunction(exclusions []string) func(string) bool { } } -func getDirectoryExclusionFunctions(root string, exclusions []string) ([]pathIndexVisitor, error) { +func getDirectoryExclusionFunctions(root string, exclusions []string) ([]fileresolver.PathIndexVisitor, error) { if len(exclusions) == 0 { return nil, nil } - // this is what directoryResolver.indexTree is doing to get the absolute path: + // this is what Directory.indexTree is doing to get the absolute path: root, err := filepath.Abs(root) if err != nil { return nil, err @@ -562,7 +611,7 @@ func getDirectoryExclusionFunctions(root string, exclusions []string) ([]pathInd return nil, fmt.Errorf("invalid exclusion pattern(s): '%s' (must start with one of: './', '*/', or '**/')", strings.Join(errors, "', '")) } - return []pathIndexVisitor{ + return []fileresolver.PathIndexVisitor{ func(path string, info os.FileInfo, _ error) error { for _, exclusion := range exclusions { // this is required to handle Windows filepaths @@ -575,7 +624,7 @@ func getDirectoryExclusionFunctions(root string, exclusions []string) ([]pathInd if info != nil && info.IsDir() { return filepath.SkipDir } - return errSkipPath + return fileresolver.ErrSkipPath } } return nil diff --git a/syft/source/source_test.go b/syft/source/source_test.go index df550c595c4..4b8e2369320 100644 --- a/syft/source/source_test.go +++ b/syft/source/source_test.go @@ -23,6 +23,7 @@ import ( "github.com/anchore/stereoscope/pkg/image" "github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/internal/fileresolver" ) func TestParseInput(t *testing.T) { @@ -124,7 +125,7 @@ func TestSetID(t *testing.T) { Path: "test-fixtures/image-simple", }, }, - expected: artifact.ID("1b0dc351e6577b01"), + expected: artifact.ID("9ee9e786412d6ae5"), }, } @@ -190,7 +191,7 @@ func TestNewFromDirectory(t *testing.T) { require.NoError(t, err) assert.Equal(t, test.input, src.Metadata.Path) - resolver, err := src.FileResolver(SquashedScope) + res, err := src.FileResolver(SquashedScope) if test.expectedErr { if err == nil { t.Fatal("expected an error when making the resolver but got none") @@ -200,7 +201,7 @@ func TestNewFromDirectory(t *testing.T) { require.NoError(t, err) } - refs, err := resolver.FilesByPath(test.inputPaths...) + refs, err := res.FilesByPath(test.inputPaths...) if err != nil { t.Errorf("FilesByPath call produced an error: %+v", err) } @@ -238,10 +239,10 @@ func TestNewFromFile(t *testing.T) { assert.Equal(t, test.input, src.Metadata.Path) assert.Equal(t, src.Metadata.Path, src.path) - resolver, err := src.FileResolver(SquashedScope) + res, err := src.FileResolver(SquashedScope) require.NoError(t, err) - refs, err := resolver.FilesByPath(test.inputPaths...) + refs, err := res.FilesByPath(test.inputPaths...) require.NoError(t, err) assert.Len(t, refs, test.expRefs) @@ -286,15 +287,15 @@ func TestNewFromFile_WithArchive(t *testing.T) { assert.Equal(t, archivePath, src.Metadata.Path) assert.NotEqual(t, src.Metadata.Path, src.path) - resolver, err := src.FileResolver(SquashedScope) + res, err := src.FileResolver(SquashedScope) require.NoError(t, err) - refs, err := resolver.FilesByPath(test.inputPaths...) + refs, err := res.FilesByPath(test.inputPaths...) require.NoError(t, err) assert.Len(t, refs, test.expRefs) if test.contents != "" { - reader, err := resolver.FileContentsByLocation(refs[0]) + reader, err := res.FileContentsByLocation(refs[0]) require.NoError(t, err) data, err := io.ReadAll(reader) @@ -353,10 +354,10 @@ func TestNewFromDirectoryShared(t *testing.T) { assert.NoError(t, err) src.Metadata.Path = test.notExist - resolver2, err := src.FileResolver(SquashedScope) + resolver, err := src.FileResolver(SquashedScope) assert.NoError(t, err) - refs, err := resolver2.FilesByPath(test.inputPaths...) + refs, err := resolver.FilesByPath(test.inputPaths...) if err != nil { t.Errorf("FilesByPath call produced an error: %+v", err) } @@ -388,11 +389,11 @@ func TestFilesByPathDoesNotExist(t *testing.T) { if err != nil { t.Errorf("could not create NewDirScope: %+v", err) } - resolver, err := src.FileResolver(SquashedScope) + res, err := src.FileResolver(SquashedScope) if err != nil { t.Errorf("could not get resolver error: %+v", err) } - refs, err := resolver.FilesByPath(test.path) + refs, err := res.FilesByPath(test.path) if err != nil { t.Errorf("could not get file references from path: %s, %v", test.path, err) } @@ -437,11 +438,11 @@ func TestFilesByGlob(t *testing.T) { if err != nil { t.Errorf("could not create NewDirScope: %+v", err) } - resolver, err := src.FileResolver(SquashedScope) + res, err := src.FileResolver(SquashedScope) if err != nil { t.Errorf("could not get resolver error: %+v", err) } - contents, err := resolver.FilesByGlob(test.glob) + contents, err := res.FilesByGlob(test.glob) if err != nil { t.Errorf("could not get files by glob: %s+v", err) } @@ -611,11 +612,11 @@ func TestDirectoryExclusions(t *testing.T) { if err != nil { t.Errorf("could not create NewDirScope: %+v", err) } - resolver, err := src.FileResolver(SquashedScope) + res, err := src.FileResolver(SquashedScope) if err != nil { t.Errorf("could not get resolver error: %+v", err) } - locations, err := resolver.FilesByGlob(test.glob) + locations, err := res.FilesByGlob(test.glob) if err != nil { t.Errorf("could not get files by glob: %s+v", err) } @@ -703,11 +704,11 @@ func TestImageExclusions(t *testing.T) { if err != nil { t.Errorf("could not create NewDirScope: %+v", err) } - resolver, err := src.FileResolver(SquashedScope) + res, err := src.FileResolver(SquashedScope) if err != nil { t.Errorf("could not get resolver error: %+v", err) } - contents, err := resolver.FilesByGlob(test.glob) + contents, err := res.FilesByGlob(test.glob) if err != nil { t.Errorf("could not get files by glob: %s+v", err) } @@ -773,7 +774,7 @@ func Test_crossPlatformExclusions(t *testing.T) { root: "/", path: "/usr/var/lib", exclude: "**/var/lib", - walkHint: errSkipPath, + walkHint: fileresolver.ErrSkipPath, }, // linux specific tests... { @@ -782,7 +783,7 @@ func Test_crossPlatformExclusions(t *testing.T) { path: "/usr/var/lib/etc.txt", exclude: "**/*.txt", finfo: dummyInfo{isDir: false}, - walkHint: errSkipPath, + walkHint: fileresolver.ErrSkipPath, }, { desc: "linux relative", @@ -791,7 +792,7 @@ func Test_crossPlatformExclusions(t *testing.T) { exclude: "./*.txt", finfo: dummyInfo{isDir: false}, - walkHint: errSkipPath, + walkHint: fileresolver.ErrSkipPath, }, { desc: "linux one level", @@ -813,7 +814,7 @@ func Test_crossPlatformExclusions(t *testing.T) { path: "/C:/User/stuff/thing.txt", exclude: "**/*.txt", finfo: dummyInfo{isDir: false}, - walkHint: errSkipPath, + walkHint: fileresolver.ErrSkipPath, }, { desc: "windows relative", @@ -821,7 +822,7 @@ func Test_crossPlatformExclusions(t *testing.T) { path: "/C:/User/stuff/thing.txt", exclude: "./*.txt", finfo: dummyInfo{isDir: false}, - walkHint: errSkipPath, + walkHint: fileresolver.ErrSkipPath, }, { desc: "windows one level", @@ -897,8 +898,18 @@ func createArchive(t testing.TB, sourceDirPath, destinationArchivePath string, l func setupArchiveTest(t testing.TB, sourceDirPath string, layer2 bool) string { t.Helper() - archivePrefix := path.Join(t.TempDir(),"syft-archive-TEST-") - destinationArchiveFilePath := archivePrefix + ".tar" + archivePrefix, err := os.CreateTemp(t.TempDir(), "syft-archive-TEST-") + require.NoError(t, err) + + t.Cleanup( + assertNoError(t, + func() error { + return os.Remove(archivePrefix.Name()) + }, + ), + ) + + destinationArchiveFilePath := archivePrefix.Name() + ".tar" t.Logf("archive path: %s", destinationArchiveFilePath) createArchive(t, sourceDirPath, destinationArchiveFilePath, layer2) diff --git a/syft/source/test-fixtures/system_paths/target/link/a-symlink/place b/syft/source/test-fixtures/system_paths/target/link/a-symlink/place new file mode 100644 index 00000000000..476e93d5714 --- /dev/null +++ b/syft/source/test-fixtures/system_paths/target/link/a-symlink/place @@ -0,0 +1 @@ +good \ No newline at end of file diff --git a/syft/source/unindexed_directory_resolver_test.go b/syft/source/unindexed_directory_resolver_test.go deleted file mode 100644 index f6b1586718d..00000000000 --- a/syft/source/unindexed_directory_resolver_test.go +++ /dev/null @@ -1,744 +0,0 @@ -//go:build !windows -// +build !windows - -package source - -import ( - "io" - "os" - "path" - "path/filepath" - "sort" - "strings" - "testing" - "time" - - "github.com/google/go-cmp/cmp" - "github.com/scylladb/go-set/strset" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/anchore/stereoscope/pkg/file" -) - -func Test_UnindexedDirectoryResolver_Basic(t *testing.T) { - wd, err := os.Getwd() - require.NoError(t, err) - - r := NewUnindexedDirectoryResolver(path.Join(wd, "test-fixtures")) - locations, err := r.FilesByGlob("image-symlinks/*") - require.NoError(t, err) - require.Len(t, locations, 5) -} - -func Test_UnindexedDirectoryResolver_FilesByPath_relativeRoot(t *testing.T) { - cases := []struct { - name string - relativeRoot string - input string - expected []string - }{ - { - name: "should find a file from an absolute input", - relativeRoot: "./test-fixtures/", - input: "/image-symlinks/file-1.txt", - expected: []string{ - "image-symlinks/file-1.txt", - }, - }, - { - name: "should find a file from a relative path", - relativeRoot: "./test-fixtures/", - input: "image-symlinks/file-1.txt", - expected: []string{ - "image-symlinks/file-1.txt", - }, - }, - { - name: "should find a file from a relative path (root above cwd)", - relativeRoot: "../", - input: "sbom/sbom.go", - expected: []string{ - "sbom/sbom.go", - }, - }, - } - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - resolver := NewUnindexedDirectoryResolver(c.relativeRoot) - - refs, err := resolver.FilesByPath(c.input) - require.NoError(t, err) - assert.Len(t, refs, len(c.expected)) - s := strset.New() - for _, actual := range refs { - s.Add(actual.RealPath) - } - assert.ElementsMatch(t, c.expected, s.List()) - }) - } -} - -func Test_UnindexedDirectoryResolver_FilesByPath_absoluteRoot(t *testing.T) { - cases := []struct { - name string - relativeRoot string - input string - expected []string - }{ - { - name: "should find a file from an absolute input", - relativeRoot: "./test-fixtures/", - input: "/image-symlinks/file-1.txt", - expected: []string{ - "image-symlinks/file-1.txt", - }, - }, - { - name: "should find a file from a relative path", - relativeRoot: "./test-fixtures/", - input: "image-symlinks/file-1.txt", - expected: []string{ - "image-symlinks/file-1.txt", - }, - }, - { - name: "should find a file from a relative path (root above cwd)", - relativeRoot: "../", - input: "sbom/sbom.go", - expected: []string{ - "sbom/sbom.go", - }, - }, - } - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - // note: this test is all about asserting correct functionality when the given analysis path - // is an absolute path - absRoot, err := filepath.Abs(c.relativeRoot) - require.NoError(t, err) - - resolver := NewUnindexedDirectoryResolver(absRoot) - assert.NoError(t, err) - - refs, err := resolver.FilesByPath(c.input) - require.NoError(t, err) - assert.Len(t, refs, len(c.expected)) - s := strset.New() - for _, actual := range refs { - s.Add(actual.RealPath) - } - assert.ElementsMatch(t, c.expected, s.List()) - }) - } -} - -func Test_UnindexedDirectoryResolver_FilesByPath(t *testing.T) { - cases := []struct { - name string - root string - input string - expected string - refCount int - forcePositiveHasPath bool - }{ - { - name: "finds a file (relative)", - root: "./test-fixtures/", - input: "image-symlinks/file-1.txt", - expected: "image-symlinks/file-1.txt", - refCount: 1, - }, - { - name: "finds a file with relative indirection", - root: "./test-fixtures/../test-fixtures", - input: "image-symlinks/file-1.txt", - expected: "image-symlinks/file-1.txt", - refCount: 1, - }, - { - name: "managed non-existing files (relative)", - root: "./test-fixtures/", - input: "test-fixtures/image-symlinks/bogus.txt", - refCount: 0, - }, - { - name: "finds a file (absolute)", - root: "./test-fixtures/", - input: "/image-symlinks/file-1.txt", - expected: "image-symlinks/file-1.txt", - refCount: 1, - }, - { - name: "directories ignored", - root: "./test-fixtures/", - input: "/image-symlinks", - refCount: 0, - forcePositiveHasPath: true, - }, - } - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - resolver := NewUnindexedDirectoryResolver(c.root) - - hasPath := resolver.HasPath(c.input) - if !c.forcePositiveHasPath { - if c.refCount != 0 && !hasPath { - t.Errorf("expected HasPath() to indicate existence, but did not") - } else if c.refCount == 0 && hasPath { - t.Errorf("expected HasPath() to NOT indicate existence, but does") - } - } else if !hasPath { - t.Errorf("expected HasPath() to indicate existence, but did not (force path)") - } - - refs, err := resolver.FilesByPath(c.input) - require.NoError(t, err) - assert.Len(t, refs, c.refCount) - for _, actual := range refs { - assert.Equal(t, c.expected, actual.RealPath) - } - }) - } -} - -func Test_UnindexedDirectoryResolver_MultipleFilesByPath(t *testing.T) { - cases := []struct { - name string - input []string - refCount int - }{ - { - name: "finds multiple files", - input: []string{"image-symlinks/file-1.txt", "image-symlinks/file-2.txt"}, - refCount: 2, - }, - { - name: "skips non-existing files", - input: []string{"image-symlinks/bogus.txt", "image-symlinks/file-1.txt"}, - refCount: 1, - }, - { - name: "does not return anything for non-existing directories", - input: []string{"non-existing/bogus.txt", "non-existing/file-1.txt"}, - refCount: 0, - }, - } - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures") - refs, err := resolver.FilesByPath(c.input...) - assert.NoError(t, err) - - if len(refs) != c.refCount { - t.Errorf("unexpected number of refs: %d != %d", len(refs), c.refCount) - } - }) - } -} - -func Test_UnindexedDirectoryResolver_FilesByGlobMultiple(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures") - refs, err := resolver.FilesByGlob("**/image-symlinks/file*") - assert.NoError(t, err) - - assert.Len(t, refs, 2) -} - -func Test_UnindexedDirectoryResolver_FilesByGlobRecursive(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/image-symlinks") - refs, err := resolver.FilesByGlob("**/*.txt") - assert.NoError(t, err) - assert.Len(t, refs, 6) -} - -func Test_UnindexedDirectoryResolver_FilesByGlobSingle(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures") - refs, err := resolver.FilesByGlob("**/image-symlinks/*1.txt") - assert.NoError(t, err) - - assert.Len(t, refs, 1) - assert.Equal(t, "image-symlinks/file-1.txt", refs[0].RealPath) -} - -func Test_UnindexedDirectoryResolver_FilesByPath_ResolvesSymlinks(t *testing.T) { - - tests := []struct { - name string - fixture string - }{ - { - name: "one degree", - fixture: "link_to_new_readme", - }, - { - name: "two degrees", - fixture: "link_to_link_to_new_readme", - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinks-simple") - - refs, err := resolver.FilesByPath(test.fixture) - require.NoError(t, err) - require.Len(t, refs, 1) - - reader, err := resolver.FileContentsByLocation(refs[0]) - require.NoError(t, err) - - actual, err := io.ReadAll(reader) - require.NoError(t, err) - - expected, err := os.ReadFile("test-fixtures/symlinks-simple/readme") - require.NoError(t, err) - - require.Equal(t, string(expected), string(actual)) - }) - } -} - -func Test_UnindexedDirectoryResolverDoesNotIgnoreRelativeSystemPaths(t *testing.T) { - // let's make certain that "dev/place" is not ignored, since it is not "/dev/place" - resolver := NewUnindexedDirectoryResolver("test-fixtures/system_paths/target") - - // all paths should be found (non filtering matches a path) - locations, err := resolver.FilesByGlob("**/place") - assert.NoError(t, err) - // 4: within target/ - // 1: target/link --> relative path to "place" // NOTE: this is filtered out since it not unique relative to outside_root/link_target/place - // 1: outside_root/link_target/place - assert.Len(t, locations, 5) - - // ensure that symlink indexing outside of root worked - testLocation := "../outside_root/link_target/place" - ok := false - for _, location := range locations { - if strings.HasSuffix(location.RealPath, testLocation) { - ok = true - } - } - - if !ok { - t.Fatalf("could not find test location=%q", testLocation) - } -} - -func Test_UnindexedDirectoryResover_IndexingNestedSymLinks(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinks-simple") - - // check that we can get the real path - locations, err := resolver.FilesByPath("./readme") - require.NoError(t, err) - assert.Len(t, locations, 1) - - // check that we can access the same file via 1 symlink - locations, err = resolver.FilesByPath("./link_to_new_readme") - require.NoError(t, err) - require.Len(t, locations, 1) - assert.Equal(t, "readme", locations[0].RealPath) - assert.Equal(t, "link_to_new_readme", locations[0].VirtualPath) - - // check that we can access the same file via 2 symlinks - locations, err = resolver.FilesByPath("./link_to_link_to_new_readme") - require.NoError(t, err) - require.Len(t, locations, 1) - assert.Equal(t, "readme", locations[0].RealPath) - assert.Equal(t, "link_to_link_to_new_readme", locations[0].VirtualPath) - - // check that we can access the same file via 2 symlinks - locations, err = resolver.FilesByGlob("**/link_*") - require.NoError(t, err) - require.Len(t, locations, 1) // you would think this is 2, however, they point to the same file, and glob only returns unique files - - // returned locations can be in any order - expectedVirtualPaths := []string{ - "link_to_link_to_new_readme", - //"link_to_new_readme", // we filter out this one because the first symlink resolves to the same file - } - - expectedRealPaths := []string{ - "readme", - } - - actualRealPaths := strset.New() - actualVirtualPaths := strset.New() - for _, a := range locations { - actualVirtualPaths.Add(a.VirtualPath) - actualRealPaths.Add(a.RealPath) - } - - assert.ElementsMatch(t, expectedVirtualPaths, actualVirtualPaths.List()) - assert.ElementsMatch(t, expectedRealPaths, actualRealPaths.List()) -} - -func Test_UnindexedDirectoryResover_IndexingNestedSymLinksOutsideOfRoot(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinks-multiple-roots/root") - - // check that we can get the real path - locations, err := resolver.FilesByPath("./readme") - require.NoError(t, err) - assert.Len(t, locations, 1) - - // check that we can access the same file via 2 symlinks (link_to_link_to_readme -> link_to_readme -> readme) - locations, err = resolver.FilesByPath("./link_to_link_to_readme") - require.NoError(t, err) - assert.Len(t, locations, 1) - - // something looks wrong here - t.Failed() -} - -func Test_UnindexedDirectoryResover_RootViaSymlink(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinked-root/nested/link-root") - - locations, err := resolver.FilesByPath("./file1.txt") - require.NoError(t, err) - assert.Len(t, locations, 1) - - locations, err = resolver.FilesByPath("./nested/file2.txt") - require.NoError(t, err) - assert.Len(t, locations, 1) - - locations, err = resolver.FilesByPath("./nested/linked-file1.txt") - require.NoError(t, err) - assert.Len(t, locations, 1) -} - -func Test_UnindexedDirectoryResolver_FileContentsByLocation(t *testing.T) { - cwd, err := os.Getwd() - require.NoError(t, err) - - r := NewUnindexedDirectoryResolver(path.Join(cwd, "test-fixtures/image-simple")) - require.NoError(t, err) - - tests := []struct { - name string - location Location - expects string - err bool - }{ - { - name: "use file reference for content requests", - location: NewLocation("file-1.txt"), - expects: "this file has contents", - }, - { - name: "error on empty file reference", - location: NewLocationFromDirectory("doesn't matter", file.Reference{}), - err: true, - }, - } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - - actual, err := r.FileContentsByLocation(test.location) - if test.err { - require.Error(t, err) - return - } - - require.NoError(t, err) - if test.expects != "" { - b, err := io.ReadAll(actual) - require.NoError(t, err) - assert.Equal(t, test.expects, string(b)) - } - }) - } -} - -func Test_UnindexedDirectoryResover_SymlinkLoopWithGlobsShouldResolve(t *testing.T) { - test := func(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinks-loop") - - locations, err := resolver.FilesByGlob("**/file.target") - require.NoError(t, err) - - require.Len(t, locations, 1) - assert.Equal(t, "devices/loop0/file.target", locations[0].RealPath) - } - - testWithTimeout(t, 5*time.Second, test) -} - -func Test_UnindexedDirectoryResolver_FilesByPath_baseRoot(t *testing.T) { - cases := []struct { - name string - root string - input string - expected []string - }{ - { - name: "should find the base file", - root: "./test-fixtures/symlinks-base/", - input: "./base", - expected: []string{ - "base", - }, - }, - { - name: "should follow a link with a pivoted root", - root: "./test-fixtures/symlinks-base/", - input: "./foo", - expected: []string{ - "base", - }, - }, - { - name: "should follow a relative link with extra parents", - root: "./test-fixtures/symlinks-base/", - input: "./bar", - expected: []string{ - "base", - }, - }, - { - name: "should follow an absolute link with extra parents", - root: "./test-fixtures/symlinks-base/", - input: "./baz", - expected: []string{ - "base", - }, - }, - { - name: "should follow an absolute link with extra parents", - root: "./test-fixtures/symlinks-base/", - input: "./sub/link", - expected: []string{ - "sub/item", - }, - }, - { - name: "should follow chained pivoted link", - root: "./test-fixtures/symlinks-base/", - input: "./chain", - expected: []string{ - "base", - }, - }, - } - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - resolver := NewUnindexedDirectoryResolverRooted(c.root, c.root) - - refs, err := resolver.FilesByPath(c.input) - require.NoError(t, err) - assert.Len(t, refs, len(c.expected)) - s := strset.New() - for _, actual := range refs { - s.Add(actual.RealPath) - } - assert.ElementsMatch(t, c.expected, s.List()) - }) - } - -} - -func Test_UnindexedDirectoryResolver_resolvesLinks(t *testing.T) { - tests := []struct { - name string - runner func(FileResolver) []Location - expected []Location - }{ - { - name: "by glob to links", - runner: func(resolver FileResolver) []Location { - // links are searched, but resolve to the real files - // for that reason we need to place **/ in front (which is not the same for other resolvers) - actualLocations, err := resolver.FilesByGlob("**/*ink-*") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - NewVirtualLocation("file-1.txt", "link-1"), - NewVirtualLocation("file-2.txt", "link-2"), - // we already have this real file path via another link, so only one is returned - // NewVirtualLocation("file-2.txt", "link-indirect"), - NewVirtualLocation("file-3.txt", "link-within"), - }, - }, - { - name: "by basename", - runner: func(resolver FileResolver) []Location { - // links are searched, but resolve to the real files - actualLocations, err := resolver.FilesByGlob("**/file-2.txt") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - // this has two copies in the base image, which overwrites the same location - NewLocation("file-2.txt"), - }, - }, - { - name: "by basename glob", - runner: func(resolver FileResolver) []Location { - // links are searched, but resolve to the real files - actualLocations, err := resolver.FilesByGlob("**/file-?.txt") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - NewLocation("file-1.txt"), - NewLocation("file-2.txt"), - NewLocation("file-3.txt"), - NewLocation("parent/file-4.txt"), - }, - }, - { - name: "by basename glob to links", - runner: func(resolver FileResolver) []Location { - actualLocations, err := resolver.FilesByGlob("**/link-*") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - NewVirtualLocationFromDirectory("file-1.txt", "link-1", file.Reference{RealPath: "file-1.txt"}), - NewVirtualLocationFromDirectory("file-2.txt", "link-2", file.Reference{RealPath: "file-2.txt"}), - // we already have this real file path via another link, so only one is returned - //NewVirtualLocationFromDirectory("file-2.txt", "link-indirect", file.Reference{RealPath: "file-2.txt"}), - NewVirtualLocationFromDirectory("file-3.txt", "link-within", file.Reference{RealPath: "file-3.txt"}), - }, - }, - { - name: "by extension", - runner: func(resolver FileResolver) []Location { - // links are searched, but resolve to the real files - actualLocations, err := resolver.FilesByGlob("**/*.txt") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - NewLocation("file-1.txt"), - NewLocation("file-2.txt"), - NewLocation("file-3.txt"), - NewLocation("parent/file-4.txt"), - }, - }, - { - name: "by path to degree 1 link", - runner: func(resolver FileResolver) []Location { - // links resolve to the final file - actualLocations, err := resolver.FilesByPath("/link-2") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - // we have multiple copies across layers - NewVirtualLocation("file-2.txt", "link-2"), - }, - }, - { - name: "by path to degree 2 link", - runner: func(resolver FileResolver) []Location { - // multiple links resolves to the final file - actualLocations, err := resolver.FilesByPath("/link-indirect") - assert.NoError(t, err) - return actualLocations - }, - expected: []Location{ - // we have multiple copies across layers - NewVirtualLocation("file-2.txt", "link-indirect"), - }, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinks-from-image-symlinks-fixture") - - actual := test.runner(resolver) - - compareLocations(t, test.expected, actual) - }) - } -} - -func Test_UnindexedDirectoryResolver_DoNotAddVirtualPathsToTree(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinks-prune-indexing") - - allLocations := resolver.AllLocations() - var allRealPaths []file.Path - for l := range allLocations { - allRealPaths = append(allRealPaths, file.Path(l.RealPath)) - } - pathSet := file.NewPathSet(allRealPaths...) - - assert.False(t, - pathSet.Contains("before-path/file.txt"), - "symlink destinations should only be indexed at their real path, not through their virtual (symlinked) path", - ) - - assert.False(t, - pathSet.Contains("a-path/file.txt"), - "symlink destinations should only be indexed at their real path, not through their virtual (symlinked) path", - ) -} - -func Test_UnindexedDirectoryResolver_FilesContents_errorOnDirRequest(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/system_paths") - - dirLoc := NewLocation("arg/foo") - - reader, err := resolver.FileContentsByLocation(dirLoc) - require.Error(t, err) - require.Nil(t, reader) -} - -func Test_UnindexedDirectoryResolver_AllLocations(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinks-from-image-symlinks-fixture") - - paths := strset.New() - for loc := range resolver.AllLocations() { - if strings.HasPrefix(loc.RealPath, "/") { - // ignore outside of the fixture root for now - continue - } - paths.Add(loc.RealPath) - } - expected := []string{ - "file-1.txt", - "file-2.txt", - "file-3.txt", - "link-1", - "link-2", - "link-dead", - "link-indirect", - "link-within", - "parent", - "parent-link", - "parent/file-4.txt", - } - - pathsList := paths.List() - sort.Strings(pathsList) - - assert.ElementsMatchf(t, expected, pathsList, "expected all paths to be indexed, but found different paths: \n%s", cmp.Diff(expected, paths.List())) -} - -func Test_WritableUnindexedDirectoryResolver(t *testing.T) { - tmpdir := t.TempDir() - - p := "some/path/file" - c := "some contents" - - dr := NewUnindexedDirectoryResolver(tmpdir) - - locations, err := dr.FilesByPath(p) - require.NoError(t, err) - require.Len(t, locations, 0) - - err = dr.Write(NewLocation(p), strings.NewReader(c)) - require.NoError(t, err) - - locations, err = dr.FilesByPath(p) - require.NoError(t, err) - require.Len(t, locations, 1) - - reader, err := dr.FileContentsByLocation(locations[0]) - require.NoError(t, err) - bytes, err := io.ReadAll(reader) - require.Equal(t, c, string(bytes)) -} diff --git a/test/cli/all_formats_convertible_test.go b/test/cli/all_formats_convertible_test.go index 4b3dd478d44..d855a69aa44 100644 --- a/test/cli/all_formats_convertible_test.go +++ b/test/cli/all_formats_convertible_test.go @@ -52,6 +52,13 @@ func TestAllFormatsConvertable(t *testing.T) { convertArgs = append(convertArgs, "--template", test.template) } cmd, stdout, stderr = runSyft(t, test.env, convertArgs...) + if cmd.ProcessState.ExitCode() != 0 { + t.Log("STDOUT:\n", stdout) + t.Log("STDERR:\n", stderr) + t.Log("COMMAND:", strings.Join(cmd.Args, " ")) + t.Fatalf("failure executing syft creating an sbom") + return + } for _, traitFn := range assertions { traitFn(t, stdout, stderr, cmd.ProcessState.ExitCode()) } diff --git a/test/cli/spdx_tooling_validation_test.go b/test/cli/spdx_tooling_validation_test.go index b2aa46c1a3c..2e37d5a5b17 100644 --- a/test/cli/spdx_tooling_validation_test.go +++ b/test/cli/spdx_tooling_validation_test.go @@ -15,9 +15,9 @@ import ( ) func TestSpdxValidationTooling(t *testing.T) { - img := imagetest.GetFixtureImage(t, "docker-archive", "image-java-spdx-tools") - require.NotEmpty(t, img.Metadata.Tags) - imgTag := img.Metadata.Tags[0] + // note: the external tooling requires that the daemon explicitly has the image loaded, not just that + // we can get the image from a cache tar. + imgTag := imagetest.LoadFixtureImageIntoDocker(t, "image-java-spdx-tools") images := []string{ "alpine:3.17.3@sha256:b6ca290b6b4cdcca5b3db3ffa338ee0285c11744b4a6abaa9627746ee3291d8d",