Skip to content

Commit

Permalink
feat: add --cache-all-databases flag (#68)
Browse files Browse the repository at this point in the history
  • Loading branch information
G-Rath committed Mar 12, 2022
1 parent 6b3747a commit 7fdb42c
Show file tree
Hide file tree
Showing 4 changed files with 89 additions and 0 deletions.
10 changes: 10 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,16 @@ This requires the detector to have successfully downloaded a copy of ecosystem
databases required to check the packages discovered during parsing at least
once.

You can have the detector cache the databases for all known ecosystems supported
by the detector for later offline use with the `--cache-all-databases`:

```shell
osv-detector --cache-all-databases
```

This can be useful if you're planning to run the detector over a number of
lockfiles in bulk.

### Auxiliary output commands

The detector supports a few auxiliary commands that have it output information
Expand Down
12 changes: 12 additions & 0 deletions internal/lockfile/ecosystems.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
package lockfile

func KnownEcosystems() []Ecosystem {
return []Ecosystem{
NpmEcosystem,
CargoEcosystem,
BundlerEcosystem,
ComposerEcosystem,
GoEcosystem,
PipEcosystem,
}
}
55 changes: 55 additions & 0 deletions internal/lockfile/ecosystems_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
package lockfile_test

import (
"io/ioutil"
"osv-detector/internal/lockfile"
"strings"
"testing"
)

func numberOfLockfileParsers(t *testing.T) int {
t.Helper()

directories, err := ioutil.ReadDir(".")

if err != nil {
t.Fatalf("unable to read current directory: ")
}

count := 0

for _, directory := range directories {
if strings.HasPrefix(directory.Name(), "parse-") &&
!strings.HasSuffix(directory.Name(), "_test.go") {
count++
}
}

return count
}

func TestKnownEcosystems(t *testing.T) {
t.Parallel()

expectedCount := numberOfLockfileParsers(t)

// npm, yarn, and pnpm all use the same ecosystem,
// so "ignore" those parsers in the count
expectedCount -= 2

ecosystems := lockfile.KnownEcosystems()

if knownCount := len(ecosystems); knownCount != expectedCount {
t.Errorf("Expected to know about %d ecosystems, but knew about %d", expectedCount, knownCount)
}

uniq := make(map[lockfile.Ecosystem]int)

for _, ecosystem := range ecosystems {
uniq[ecosystem]++

if uniq[ecosystem] > 1 {
t.Errorf(`Ecosystem "%s" was listed more than once`, ecosystem)
}
}
}
12 changes: 12 additions & 0 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -109,12 +109,19 @@ func loadEcosystemDatabases(ecosystems []internal.Ecosystem, offline bool) OSVDa
return dbs
}

func cacheAllEcosystemDatabases() {
ecosystems := lockfile.KnownEcosystems()

loadEcosystemDatabases(ecosystems, false)
}

func main() {
offline := flag.Bool("offline", false, "Update the OSV database")
parseAs := flag.String("parse-as", "", "Name of a supported lockfile to use to determine how to parse the given file")
printVersion := flag.Bool("version", false, "Print version information")
listEcosystems := flag.Bool("list-ecosystems", false, "List all the ecosystems present in the loaded OSV database")
listPackages := flag.Bool("list-packages", false, "List all the packages that were parsed from the given file")
cacheAllDatabases := flag.Bool("cache-all-databases", false, "Cache all the known ecosystem databases for offline use")

flag.Parse()

Expand All @@ -123,6 +130,11 @@ func main() {
os.Exit(0)
}

if *cacheAllDatabases {
cacheAllEcosystemDatabases()
os.Exit(0)
}

pathToLockOrDirectory := flag.Arg(0)

packages, err := lockfile.Parse(pathToLockOrDirectory, *parseAs)
Expand Down

0 comments on commit 7fdb42c

Please sign in to comment.