diff --git a/DEVELOPING.md b/DEVELOPING.md index 2a787bef11f..7d915b7a313 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -167,12 +167,12 @@ always feel free to file an issue or reach out to us [on slack](https://anchore. #### Searching for files -All catalogers are provided an instance of the [`source.FileResolver`](https://github.com/anchore/syft/blob/v0.70.0/syft/source/file_resolver.go#L8) to interface with the image and search for files. The implementations for these +All catalogers are provided an instance of the [`file.Resolver`](https://github.com/anchore/syft/blob/v0.70.0/syft/source/file_resolver.go#L8) to interface with the image and search for files. The implementations for these abstractions leverage [`stereoscope`](https://github.com/anchore/stereoscope) in order to perform searching. Here is a rough outline how that works: -1. a stereoscope `file.Index` is searched based on the input given (a path, glob, or MIME type). The index is relatively fast to search, but requires results to be filtered down to the files that exist in the specific layer(s) of interest. This is done automatically by the `filetree.Searcher` abstraction. This abstraction will fallback to searching directly against the raw `filetree.FileTree` if the index does not contain the file(s) of interest. Note: the `filetree.Searcher` is used by the `source.FileResolver` abstraction. -2. Once the set of files are returned from the `filetree.Searcher` the results are filtered down further to return the most unique file results. For example, you may have requested for files by a glob that returns multiple results. These results are filtered down to deduplicate by real files, so if a result contains two references to the same file, say one accessed via symlink and one accessed via the real path, then the real path reference is returned and the symlink reference is filtered out. If both were accessed by symlink then the first (by lexical order) is returned. This is done automatically by the `source.FileResolver` abstraction. +1. a stereoscope `file.Index` is searched based on the input given (a path, glob, or MIME type). The index is relatively fast to search, but requires results to be filtered down to the files that exist in the specific layer(s) of interest. This is done automatically by the `filetree.Searcher` abstraction. This abstraction will fallback to searching directly against the raw `filetree.FileTree` if the index does not contain the file(s) of interest. Note: the `filetree.Searcher` is used by the `file.Resolver` abstraction. +2. Once the set of files are returned from the `filetree.Searcher` the results are filtered down further to return the most unique file results. For example, you may have requested for files by a glob that returns multiple results. These results are filtered down to deduplicate by real files, so if a result contains two references to the same file, say one accessed via symlink and one accessed via the real path, then the real path reference is returned and the symlink reference is filtered out. If both were accessed by symlink then the first (by lexical order) is returned. This is done automatically by the `file.Resolver` abstraction. 3. By the time results reach the `pkg.Cataloger` you are guaranteed to have a set of unique files that exist in the layer(s) of interest (relative to what the resolver supports). ## Testing diff --git a/cmd/syft/cli/eventloop/tasks.go b/cmd/syft/cli/eventloop/tasks.go index 56bbcc93535..536a39ee6f1 100644 --- a/cmd/syft/cli/eventloop/tasks.go +++ b/cmd/syft/cli/eventloop/tasks.go @@ -8,6 +8,10 @@ import ( "github.com/anchore/syft/syft" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/file/cataloger/filecontent" + "github.com/anchore/syft/syft/file/cataloger/filedigest" + "github.com/anchore/syft/syft/file/cataloger/filemetadata" + "github.com/anchore/syft/syft/file/cataloger/secrets" "github.com/anchore/syft/syft/sbom" "github.com/anchore/syft/syft/source" ) @@ -61,7 +65,7 @@ func generateCatalogFileMetadataTask(app *config.Application) (Task, error) { return nil, nil } - metadataCataloger := file.NewMetadataCataloger() + metadataCataloger := filemetadata.NewCataloger() task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) { resolver, err := src.FileResolver(app.FileMetadata.Cataloger.ScopeOpt) @@ -104,10 +108,7 @@ func generateCatalogFileDigestsTask(app *config.Application) (Task, error) { hashes = append(hashes, hashObj) } - digestsCataloger, err := file.NewDigestsCataloger(hashes) - if err != nil { - return nil, err - } + digestsCataloger := filedigest.NewCataloger(hashes) task := func(results *sbom.Artifacts, src *source.Source) ([]artifact.Relationship, error) { resolver, err := src.FileResolver(app.FileMetadata.Cataloger.ScopeOpt) @@ -131,12 +132,12 @@ func generateCatalogSecretsTask(app *config.Application) (Task, error) { return nil, nil } - patterns, err := file.GenerateSearchPatterns(file.DefaultSecretsPatterns, app.Secrets.AdditionalPatterns, app.Secrets.ExcludePatternNames) + patterns, err := secrets.GenerateSearchPatterns(secrets.DefaultSecretsPatterns, app.Secrets.AdditionalPatterns, app.Secrets.ExcludePatternNames) if err != nil { return nil, err } - secretsCataloger, err := file.NewSecretsCataloger(patterns, app.Secrets.RevealValues, app.Secrets.SkipFilesAboveSize) + secretsCataloger, err := secrets.NewCataloger(patterns, app.Secrets.RevealValues, app.Secrets.SkipFilesAboveSize) //nolint:staticcheck if err != nil { return nil, err } @@ -163,7 +164,7 @@ func generateCatalogContentsTask(app *config.Application) (Task, error) { return nil, nil } - contentsCataloger, err := file.NewContentsCataloger(app.FileContents.Globs, app.FileContents.SkipFilesAboveSize) + contentsCataloger, err := filecontent.NewCataloger(app.FileContents.Globs, app.FileContents.SkipFilesAboveSize) //nolint:staticcheck if err != nil { return nil, err } diff --git a/internal/licenses/parser.go b/internal/licenses/parser.go index b5cde28f7d0..58e4deb2d22 100644 --- a/internal/licenses/parser.go +++ b/internal/licenses/parser.go @@ -5,9 +5,9 @@ import ( "github.com/google/licensecheck" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/license" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) const ( @@ -16,7 +16,7 @@ const ( ) // Parse scans the contents of a license file to attempt to determine the type of license it is -func Parse(reader io.Reader, l source.Location) (licenses []pkg.License, err error) { +func Parse(reader io.Reader, l file.Location) (licenses []pkg.License, err error) { licenses = make([]pkg.License, 0) contents, err := io.ReadAll(reader) if err != nil { diff --git a/syft/event/parsers/parsers.go b/syft/event/parsers/parsers.go index e7a3d703d4f..3d0c8bfb85b 100644 --- a/syft/event/parsers/parsers.go +++ b/syft/event/parsers/parsers.go @@ -12,7 +12,7 @@ import ( "github.com/anchore/syft/syft/event" "github.com/anchore/syft/syft/event/monitor" - "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/file/cataloger/secrets" "github.com/anchore/syft/syft/pkg/cataloger" ) @@ -54,12 +54,12 @@ func ParsePackageCatalogerStarted(e partybus.Event) (*cataloger.Monitor, error) return &monitor, nil } -func ParseSecretsCatalogingStarted(e partybus.Event) (*file.SecretsMonitor, error) { +func ParseSecretsCatalogingStarted(e partybus.Event) (*secrets.Monitor, error) { if err := checkEventType(e.Type, event.SecretsCatalogerStarted); err != nil { return nil, err } - monitor, ok := e.Value.(file.SecretsMonitor) + monitor, ok := e.Value.(secrets.Monitor) if !ok { return nil, newPayloadErr(e.Type, "Value", e.Value) } diff --git a/syft/file/contents_cataloger.go b/syft/file/cataloger/filecontent/cataloger.go similarity index 75% rename from syft/file/contents_cataloger.go rename to syft/file/cataloger/filecontent/cataloger.go index 1e0cfe33b22..d108af3931f 100644 --- a/syft/file/contents_cataloger.go +++ b/syft/file/cataloger/filecontent/cataloger.go @@ -1,4 +1,4 @@ -package file +package filecontent import ( "bytes" @@ -8,24 +8,26 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) -type ContentsCataloger struct { +// Deprecated: will be removed in syft v1.0.0 +type Cataloger struct { globs []string skipFilesAboveSizeInBytes int64 } -func NewContentsCataloger(globs []string, skipFilesAboveSize int64) (*ContentsCataloger, error) { - return &ContentsCataloger{ +// Deprecated: will be removed in syft v1.0.0 +func NewCataloger(globs []string, skipFilesAboveSize int64) (*Cataloger, error) { + return &Cataloger{ globs: globs, skipFilesAboveSizeInBytes: skipFilesAboveSize, }, nil } -func (i *ContentsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates]string, error) { - results := make(map[source.Coordinates]string) - var locations []source.Location +func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates]string, error) { + results := make(map[file.Coordinates]string) + var locations []file.Location locations, err := resolver.FilesByGlob(i.globs...) if err != nil { @@ -56,7 +58,7 @@ func (i *ContentsCataloger) Catalog(resolver source.FileResolver) (map[source.Co return results, nil } -func (i *ContentsCataloger) catalogLocation(resolver source.FileResolver, location source.Location) (string, error) { +func (i *Cataloger) catalogLocation(resolver file.Resolver, location file.Location) (string, error) { contentReader, err := resolver.FileContentsByLocation(location) if err != nil { return "", err diff --git a/syft/file/cataloger/filecontent/cataloger_test.go b/syft/file/cataloger/filecontent/cataloger_test.go new file mode 100644 index 00000000000..719bdd48183 --- /dev/null +++ b/syft/file/cataloger/filecontent/cataloger_test.go @@ -0,0 +1,80 @@ +package filecontent + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/anchore/syft/syft/file" +) + +func TestContentsCataloger(t *testing.T) { + allFiles := []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"} + + tests := []struct { + name string + globs []string + maxSize int64 + files []string + expected map[file.Coordinates]string + }{ + { + name: "multi-pattern", + globs: []string{"test-fixtures/last/*.txt", "test-fixtures/*.txt"}, + files: allFiles, + expected: map[file.Coordinates]string{ + file.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + }, + }, + { + name: "no-patterns", + globs: []string{}, + files: []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"}, + expected: map[file.Coordinates]string{}, + }, + { + name: "all-txt", + globs: []string{"**/*.txt"}, + files: allFiles, + expected: map[file.Coordinates]string{ + file.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + }, + }, + { + name: "subpath", + globs: []string{"test-fixtures/*.txt"}, + files: allFiles, + expected: map[file.Coordinates]string{ + file.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + }, + }, + { + name: "size-filter", + maxSize: 42, + globs: []string{"**/*.txt"}, + files: allFiles, + expected: map[file.Coordinates]string{ + file.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", + file.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + c, err := NewCataloger(test.globs, test.maxSize) + assert.NoError(t, err) + + resolver := file.NewMockResolverForPaths(test.files...) + actual, err := c.Catalog(resolver) + assert.NoError(t, err) + assert.Equal(t, test.expected, actual, "mismatched contents") + + }) + } +} diff --git a/syft/file/test-fixtures/a-path.txt b/syft/file/cataloger/filecontent/test-fixtures/a-path.txt similarity index 100% rename from syft/file/test-fixtures/a-path.txt rename to syft/file/cataloger/filecontent/test-fixtures/a-path.txt diff --git a/syft/file/test-fixtures/another-path.txt b/syft/file/cataloger/filecontent/test-fixtures/another-path.txt similarity index 100% rename from syft/file/test-fixtures/another-path.txt rename to syft/file/cataloger/filecontent/test-fixtures/another-path.txt diff --git a/syft/file/test-fixtures/last/empty/empty b/syft/file/cataloger/filecontent/test-fixtures/last/empty/empty similarity index 100% rename from syft/file/test-fixtures/last/empty/empty rename to syft/file/cataloger/filecontent/test-fixtures/last/empty/empty diff --git a/syft/file/test-fixtures/last/path.txt b/syft/file/cataloger/filecontent/test-fixtures/last/path.txt similarity index 100% rename from syft/file/test-fixtures/last/path.txt rename to syft/file/cataloger/filecontent/test-fixtures/last/path.txt diff --git a/syft/file/cataloger/filedigest/cataloger.go b/syft/file/cataloger/filedigest/cataloger.go new file mode 100644 index 00000000000..e06c05a3514 --- /dev/null +++ b/syft/file/cataloger/filedigest/cataloger.go @@ -0,0 +1,109 @@ +package filedigest + +import ( + "crypto" + "errors" + + "github.com/wagoodman/go-partybus" + "github.com/wagoodman/go-progress" + + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" + "github.com/anchore/syft/internal" + "github.com/anchore/syft/internal/bus" + "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/event" + "github.com/anchore/syft/syft/file" + internal2 "github.com/anchore/syft/syft/file/cataloger/internal" +) + +var ErrUndigestableFile = errors.New("undigestable file") + +type Cataloger struct { + hashes []crypto.Hash +} + +func NewCataloger(hashes []crypto.Hash) *Cataloger { + return &Cataloger{ + hashes: hashes, + } +} + +func (i *Cataloger) Catalog(resolver file.Resolver, coordinates ...file.Coordinates) (map[file.Coordinates][]file.Digest, error) { + results := make(map[file.Coordinates][]file.Digest) + var locations []file.Location + + if len(coordinates) == 0 { + locations = internal2.AllRegularFiles(resolver) + } else { + for _, c := range coordinates { + locations = append(locations, file.NewLocationFromCoordinates(c)) + } + } + + stage, prog := digestsCatalogingProgress(int64(len(locations))) + for _, location := range locations { + stage.Current = location.RealPath + result, err := i.catalogLocation(resolver, location) + + if errors.Is(err, ErrUndigestableFile) { + continue + } + + if internal.IsErrPathPermission(err) { + log.Debugf("file digests cataloger skipping %q: %+v", location.RealPath, err) + continue + } + + if err != nil { + return nil, err + } + prog.Increment() + results[location.Coordinates] = result + } + log.Debugf("file digests cataloger processed %d files", prog.Current()) + prog.SetCompleted() + return results, nil +} + +func (i *Cataloger) catalogLocation(resolver file.Resolver, location file.Location) ([]file.Digest, error) { + meta, err := resolver.FileMetadataByLocation(location) + if err != nil { + return nil, err + } + + // we should only attempt to report digests for files that are regular files (don't attempt to resolve links) + if meta.Type != stereoscopeFile.TypeRegular { + return nil, ErrUndigestableFile + } + + contentReader, err := resolver.FileContentsByLocation(location) + if err != nil { + return nil, err + } + defer internal.CloseAndLogError(contentReader, location.VirtualPath) + + digests, err := file.NewDigestsFromFile(contentReader, i.hashes) + if err != nil { + return nil, internal.ErrPath{Context: "digests-cataloger", Path: location.RealPath, Err: err} + } + + return digests, nil +} + +func digestsCatalogingProgress(locations int64) (*progress.Stage, *progress.Manual) { + stage := &progress.Stage{} + prog := progress.NewManual(locations) + + bus.Publish(partybus.Event{ + Type: event.FileDigestsCatalogerStarted, + Value: struct { + progress.Stager + progress.Progressable + }{ + Stager: progress.Stager(stage), + Progressable: prog, + }, + }) + + return stage, prog +} diff --git a/syft/file/digest_cataloger_test.go b/syft/file/cataloger/filedigest/cataloger_test.go similarity index 74% rename from syft/file/digest_cataloger_test.go rename to syft/file/cataloger/filedigest/cataloger_test.go index 57a1a8071ee..ed8562cbd38 100644 --- a/syft/file/digest_cataloger_test.go +++ b/syft/file/cataloger/filedigest/cataloger_test.go @@ -1,9 +1,9 @@ -package file +package filedigest import ( "crypto" "fmt" - "io/ioutil" + "io" "os" "path/filepath" "testing" @@ -11,29 +11,36 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/imagetest" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/source" ) -func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Hash) map[source.Coordinates][]Digest { - digests := make(map[source.Coordinates][]Digest) +func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Hash) map[file.Coordinates][]file.Digest { + digests := make(map[file.Coordinates][]file.Digest) for _, f := range files { fh, err := os.Open(filepath.Join(root, f)) if err != nil { t.Fatalf("could not open %q : %+v", f, err) } - b, err := ioutil.ReadAll(fh) + b, err := io.ReadAll(fh) if err != nil { t.Fatalf("could not read %q : %+v", f, err) } + if len(b) == 0 { + // we don't keep digests for empty files + digests[file.NewLocation(f).Coordinates] = []file.Digest{} + continue + } + for _, hash := range hashes { h := hash.New() h.Write(b) - digests[source.NewLocation(f).Coordinates] = append(digests[source.NewLocation(f).Coordinates], Digest{ - Algorithm: CleanDigestAlgorithmName(hash.String()), + digests[file.NewLocation(f).Coordinates] = append(digests[file.NewLocation(f).Coordinates], file.Digest{ + Algorithm: file.CleanDigestAlgorithmName(hash.String()), Value: fmt.Sprintf("%x", h.Sum(nil)), }) } @@ -48,7 +55,7 @@ func TestDigestsCataloger(t *testing.T) { name string digests []crypto.Hash files []string - expected map[source.Coordinates][]Digest + expected map[file.Coordinates][]file.Digest }{ { name: "md5", @@ -66,8 +73,7 @@ func TestDigestsCataloger(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - c, err := NewDigestsCataloger(test.digests) - require.NoError(t, err) + c := NewCataloger(test.digests) src, err := source.NewFromDirectory("test-fixtures/last/") require.NoError(t, err) @@ -86,11 +92,7 @@ func TestDigestsCataloger(t *testing.T) { func TestDigestsCataloger_MixFileTypes(t *testing.T) { testImage := "image-file-type-mix" - if *updateImageGoldenFiles { - imagetest.UpdateGoldenFixtureImage(t, testImage) - } - - img := imagetest.GetGoldenFixtureImage(t, testImage) + img := imagetest.GetFixtureImage(t, "docker-archive", testImage) src, err := source.NewFromImage(img, "---") if err != nil { @@ -110,9 +112,10 @@ func TestDigestsCataloger_MixFileTypes(t *testing.T) { path: "/file-1.txt", expected: "888c139e550867814eb7c33b84d76e4d", }, - { - path: "/hardlink-1", - }, + // this is difficult to reproduce in a cross-platform way + //{ + // path: "/hardlink-1", + //}, { path: "/symlink-1", }, @@ -132,21 +135,18 @@ func TestDigestsCataloger_MixFileTypes(t *testing.T) { for _, test := range tests { t.Run(test.path, func(t *testing.T) { - c, err := NewDigestsCataloger([]crypto.Hash{crypto.MD5}) - if err != nil { - t.Fatalf("unable to get cataloger: %+v", err) - } + c := NewCataloger([]crypto.Hash{crypto.MD5}) actual, err := c.Catalog(resolver) if err != nil { t.Fatalf("could not catalog: %+v", err) } - _, ref, err := img.SquashedTree().File(file.Path(test.path)) + _, ref, err := img.SquashedTree().File(stereoscopeFile.Path(test.path)) if err != nil { t.Fatalf("unable to get file=%q : %+v", test.path, err) } - l := source.NewLocationFromImage(test.path, *ref.Reference, img) + l := file.NewLocationFromImage(test.path, *ref.Reference, img) if len(actual[l.Coordinates]) == 0 { if test.expected != "" { diff --git a/syft/file/test-fixtures/image-file-type-mix/Dockerfile b/syft/file/cataloger/filedigest/test-fixtures/image-file-type-mix/Dockerfile similarity index 81% rename from syft/file/test-fixtures/image-file-type-mix/Dockerfile rename to syft/file/cataloger/filedigest/test-fixtures/image-file-type-mix/Dockerfile index c2d61ef4da9..6ede1b68da8 100644 --- a/syft/file/test-fixtures/image-file-type-mix/Dockerfile +++ b/syft/file/cataloger/filedigest/test-fixtures/image-file-type-mix/Dockerfile @@ -1,4 +1,4 @@ -FROM busybox:latest +FROM busybox:1.28.1@sha256:c7b0a24019b0e6eda714ec0fa137ad42bc44a754d9cea17d14fba3a80ccc1ee4 ADD file-1.txt . RUN chmod 644 file-1.txt diff --git a/syft/file/test-fixtures/image-file-type-mix/file-1.txt b/syft/file/cataloger/filedigest/test-fixtures/image-file-type-mix/file-1.txt similarity index 100% rename from syft/file/test-fixtures/image-file-type-mix/file-1.txt rename to syft/file/cataloger/filedigest/test-fixtures/image-file-type-mix/file-1.txt diff --git a/syft/source/test-fixtures/symlinks-base/base b/syft/file/cataloger/filedigest/test-fixtures/last/empty/empty similarity index 100% rename from syft/source/test-fixtures/symlinks-base/base rename to syft/file/cataloger/filedigest/test-fixtures/last/empty/empty diff --git a/syft/file/cataloger/filedigest/test-fixtures/last/path.txt b/syft/file/cataloger/filedigest/test-fixtures/last/path.txt new file mode 100644 index 00000000000..3d4a165ab88 --- /dev/null +++ b/syft/file/cataloger/filedigest/test-fixtures/last/path.txt @@ -0,0 +1 @@ +test-fixtures/last/path.txt file contents! \ No newline at end of file diff --git a/syft/file/metadata_cataloger.go b/syft/file/cataloger/filemetadata/cataloger.go similarity index 59% rename from syft/file/metadata_cataloger.go rename to syft/file/cataloger/filemetadata/cataloger.go index 44d46f030ce..bae2344f479 100644 --- a/syft/file/metadata_cataloger.go +++ b/syft/file/cataloger/filemetadata/cataloger.go @@ -1,4 +1,4 @@ -package file +package filemetadata import ( "github.com/wagoodman/go-partybus" @@ -7,24 +7,37 @@ import ( "github.com/anchore/syft/internal/bus" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/event" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) -type MetadataCataloger struct { +type Cataloger struct { } -func NewMetadataCataloger() *MetadataCataloger { - return &MetadataCataloger{} +func NewCataloger() *Cataloger { + return &Cataloger{} } -func (i *MetadataCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates]source.FileMetadata, error) { - results := make(map[source.Coordinates]source.FileMetadata) - var locations []source.Location - for location := range resolver.AllLocations() { - locations = append(locations, location) +func (i *Cataloger) Catalog(resolver file.Resolver, coordinates ...file.Coordinates) (map[file.Coordinates]file.Metadata, error) { + results := make(map[file.Coordinates]file.Metadata) + var locations <-chan file.Location + + if len(coordinates) == 0 { + locations = resolver.AllLocations() + } else { + locations = func() <-chan file.Location { + ch := make(chan file.Location) + go func() { + close(ch) + for _, c := range coordinates { + ch <- file.NewLocationFromCoordinates(c) + } + }() + return ch + }() } + stage, prog := metadataCatalogingProgress(int64(len(locations))) - for _, location := range locations { + for location := range locations { stage.Current = location.RealPath metadata, err := resolver.FileMetadataByLocation(location) if err != nil { diff --git a/syft/file/metadata_cataloger_test.go b/syft/file/cataloger/filemetadata/cataloger_test.go similarity index 62% rename from syft/file/metadata_cataloger_test.go rename to syft/file/cataloger/filemetadata/cataloger_test.go index 3b625ba4f10..99dfa908a9f 100644 --- a/syft/file/metadata_cataloger_test.go +++ b/syft/file/cataloger/filemetadata/cataloger_test.go @@ -1,30 +1,24 @@ -package file +package filemetadata import ( - "flag" "os" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/imagetest" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/source" ) -var updateImageGoldenFiles = flag.Bool("update-image", false, "update the golden fixture images used for testing") - func TestFileMetadataCataloger(t *testing.T) { testImage := "image-file-type-mix" - if *updateImageGoldenFiles { - imagetest.UpdateGoldenFixtureImage(t, testImage) - } - - img := imagetest.GetGoldenFixtureImage(t, testImage) + img := imagetest.GetFixtureImage(t, "docker-archive", testImage) - c := NewMetadataCataloger() + c := NewCataloger() src, err := source.NewFromImage(img, "---") if err != nil { @@ -44,51 +38,36 @@ func TestFileMetadataCataloger(t *testing.T) { tests := []struct { path string exists bool - expected source.FileMetadata + expected file.Metadata err bool }{ + // note: it is difficult to add a hardlink-based test in a cross-platform way and is already covered well in stereoscope { path: "/file-1.txt", exists: true, - expected: source.FileMetadata{ - FileInfo: file.ManualInfo{ + expected: file.Metadata{ + FileInfo: stereoscopeFile.ManualInfo{ NameValue: "file-1.txt", ModeValue: 0644, SizeValue: 7, }, Path: "/file-1.txt", - Type: file.TypeRegular, + Type: stereoscopeFile.TypeRegular, UserID: 1, GroupID: 2, MIMEType: "text/plain", }, }, - { - path: "/hardlink-1", - exists: true, - expected: source.FileMetadata{ - FileInfo: file.ManualInfo{ - NameValue: "hardlink-1", - ModeValue: 0644, - }, - Path: "/hardlink-1", - Type: file.TypeHardLink, - LinkDestination: "file-1.txt", - UserID: 1, - GroupID: 2, - MIMEType: "", - }, - }, { path: "/symlink-1", exists: true, - expected: source.FileMetadata{ + expected: file.Metadata{ Path: "/symlink-1", - FileInfo: file.ManualInfo{ + FileInfo: stereoscopeFile.ManualInfo{ NameValue: "symlink-1", ModeValue: 0777 | os.ModeSymlink, }, - Type: file.TypeSymLink, + Type: stereoscopeFile.TypeSymLink, LinkDestination: "file-1.txt", UserID: 0, GroupID: 0, @@ -98,13 +77,13 @@ func TestFileMetadataCataloger(t *testing.T) { { path: "/char-device-1", exists: true, - expected: source.FileMetadata{ + expected: file.Metadata{ Path: "/char-device-1", - FileInfo: file.ManualInfo{ + FileInfo: stereoscopeFile.ManualInfo{ NameValue: "char-device-1", ModeValue: 0644 | os.ModeDevice | os.ModeCharDevice, }, - Type: file.TypeCharacterDevice, + Type: stereoscopeFile.TypeCharacterDevice, UserID: 0, GroupID: 0, MIMEType: "", @@ -113,13 +92,13 @@ func TestFileMetadataCataloger(t *testing.T) { { path: "/block-device-1", exists: true, - expected: source.FileMetadata{ + expected: file.Metadata{ Path: "/block-device-1", - FileInfo: file.ManualInfo{ + FileInfo: stereoscopeFile.ManualInfo{ NameValue: "block-device-1", ModeValue: 0644 | os.ModeDevice, }, - Type: file.TypeBlockDevice, + Type: stereoscopeFile.TypeBlockDevice, UserID: 0, GroupID: 0, MIMEType: "", @@ -128,13 +107,13 @@ func TestFileMetadataCataloger(t *testing.T) { { path: "/fifo-1", exists: true, - expected: source.FileMetadata{ + expected: file.Metadata{ Path: "/fifo-1", - FileInfo: file.ManualInfo{ + FileInfo: stereoscopeFile.ManualInfo{ NameValue: "fifo-1", ModeValue: 0644 | os.ModeNamedPipe, }, - Type: file.TypeFIFO, + Type: stereoscopeFile.TypeFIFO, UserID: 0, GroupID: 0, MIMEType: "", @@ -143,13 +122,13 @@ func TestFileMetadataCataloger(t *testing.T) { { path: "/bin", exists: true, - expected: source.FileMetadata{ + expected: file.Metadata{ Path: "/bin", - FileInfo: file.ManualInfo{ + FileInfo: stereoscopeFile.ManualInfo{ NameValue: "bin", ModeValue: 0755 | os.ModeDir, }, - Type: file.TypeDirectory, + Type: stereoscopeFile.TypeDirectory, UserID: 0, GroupID: 0, MIMEType: "", @@ -159,15 +138,15 @@ func TestFileMetadataCataloger(t *testing.T) { for _, test := range tests { t.Run(test.path, func(t *testing.T) { - _, ref, err := img.SquashedTree().File(file.Path(test.path)) + _, ref, err := img.SquashedTree().File(stereoscopeFile.Path(test.path)) require.NoError(t, err) - l := source.NewLocationFromImage(test.path, *ref.Reference, img) + l := file.NewLocationFromImage(test.path, *ref.Reference, img) if _, ok := actual[l.Coordinates]; ok { // we're not interested in keeping the test fixtures up to date with the latest file modification times // thus ModTime is not under test - fi := test.expected.FileInfo.(file.ManualInfo) + fi := test.expected.FileInfo.(stereoscopeFile.ManualInfo) fi.ModTimeValue = actual[l.Coordinates].ModTime() test.expected.FileInfo = fi } diff --git a/syft/file/cataloger/filemetadata/test-fixtures/image-file-type-mix/Dockerfile b/syft/file/cataloger/filemetadata/test-fixtures/image-file-type-mix/Dockerfile new file mode 100644 index 00000000000..6ede1b68da8 --- /dev/null +++ b/syft/file/cataloger/filemetadata/test-fixtures/image-file-type-mix/Dockerfile @@ -0,0 +1,13 @@ +FROM busybox:1.28.1@sha256:c7b0a24019b0e6eda714ec0fa137ad42bc44a754d9cea17d14fba3a80ccc1ee4 + +ADD file-1.txt . +RUN chmod 644 file-1.txt +RUN chown 1:2 file-1.txt +RUN ln -s file-1.txt symlink-1 +# note: hard links may behave inconsistently, this should be a golden image +RUN ln file-1.txt hardlink-1 +RUN mknod char-device-1 c 89 1 +RUN mknod block-device-1 b 0 1 +RUN mknod fifo-1 p +RUN mkdir /dir +RUN rm -rf home etc/group etc/localtime etc/mtab etc/network etc/passwd etc/shadow var usr bin/* \ No newline at end of file diff --git a/syft/source/test-fixtures/image-symlinks/file-1.txt b/syft/file/cataloger/filemetadata/test-fixtures/image-file-type-mix/file-1.txt similarity index 100% rename from syft/source/test-fixtures/image-symlinks/file-1.txt rename to syft/file/cataloger/filemetadata/test-fixtures/image-file-type-mix/file-1.txt diff --git a/syft/file/all_regular_files.go b/syft/file/cataloger/internal/all_regular_files.go similarity index 70% rename from syft/file/all_regular_files.go rename to syft/file/cataloger/internal/all_regular_files.go index 5dcf8974430..ccc1b3813ba 100644 --- a/syft/file/all_regular_files.go +++ b/syft/file/cataloger/internal/all_regular_files.go @@ -1,12 +1,12 @@ -package file +package internal import ( - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/syft/internal/log" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) -func allRegularFiles(resolver source.FileResolver) (locations []source.Location) { +func AllRegularFiles(resolver file.Resolver) (locations []file.Location) { for location := range resolver.AllLocations() { resolvedLocations, err := resolver.FilesByPath(location.RealPath) if err != nil { @@ -21,7 +21,7 @@ func allRegularFiles(resolver source.FileResolver) (locations []source.Location) continue } - if metadata.Type != file.TypeRegular { + if metadata.Type != stereoscopeFile.TypeRegular { continue } locations = append(locations, resolvedLocation) diff --git a/syft/file/all_regular_files_test.go b/syft/file/cataloger/internal/all_regular_files_test.go similarity index 78% rename from syft/file/all_regular_files_test.go rename to syft/file/cataloger/internal/all_regular_files_test.go index 096480721a5..714e733e689 100644 --- a/syft/file/all_regular_files_test.go +++ b/syft/file/cataloger/internal/all_regular_files_test.go @@ -1,4 +1,4 @@ -package file +package internal import ( "testing" @@ -9,30 +9,23 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/stereoscope/pkg/imagetest" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/source" ) func Test_allRegularFiles(t *testing.T) { - type access struct { - realPath string - virtualPath string - } tests := []struct { name string - setup func() source.FileResolver + setup func() file.Resolver wantRealPaths *strset.Set wantVirtualPaths *strset.Set }{ { name: "image", - setup: func() source.FileResolver { + setup: func() file.Resolver { testImage := "image-file-type-mix" - if *updateImageGoldenFiles { - imagetest.UpdateGoldenFixtureImage(t, testImage) - } - - img := imagetest.GetGoldenFixtureImage(t, testImage) + img := imagetest.GetFixtureImage(t, "docker-archive", testImage) s, err := source.NewFromImage(img, "---") require.NoError(t, err) @@ -47,7 +40,7 @@ func Test_allRegularFiles(t *testing.T) { }, { name: "directory", - setup: func() source.FileResolver { + setup: func() file.Resolver { s, err := source.NewFromDirectory("test-fixtures/symlinked-root/nested/link-root") require.NoError(t, err) r, err := s.FileResolver(source.SquashedScope) @@ -61,7 +54,7 @@ func Test_allRegularFiles(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { resolver := tt.setup() - locations := allRegularFiles(resolver) + locations := AllRegularFiles(resolver) realLocations := strset.New() virtualLocations := strset.New() for _, l := range locations { @@ -70,6 +63,13 @@ func Test_allRegularFiles(t *testing.T) { virtualLocations.Add(l.VirtualPath) } } + + // this is difficult to reproduce in a cross-platform way + realLocations.Remove("/hardlink-1") + virtualLocations.Remove("/hardlink-1") + tt.wantRealPaths.Remove("/hardlink-1") + tt.wantVirtualPaths.Remove("/hardlink-1") + assert.ElementsMatch(t, tt.wantRealPaths.List(), realLocations.List(), "real paths differ: "+cmp.Diff(tt.wantRealPaths.List(), realLocations.List())) assert.ElementsMatch(t, tt.wantVirtualPaths.List(), virtualLocations.List(), "virtual paths differ: "+cmp.Diff(tt.wantVirtualPaths.List(), virtualLocations.List())) }) diff --git a/syft/file/cataloger/internal/test-fixtures/image-file-type-mix/Dockerfile b/syft/file/cataloger/internal/test-fixtures/image-file-type-mix/Dockerfile new file mode 100644 index 00000000000..6ede1b68da8 --- /dev/null +++ b/syft/file/cataloger/internal/test-fixtures/image-file-type-mix/Dockerfile @@ -0,0 +1,13 @@ +FROM busybox:1.28.1@sha256:c7b0a24019b0e6eda714ec0fa137ad42bc44a754d9cea17d14fba3a80ccc1ee4 + +ADD file-1.txt . +RUN chmod 644 file-1.txt +RUN chown 1:2 file-1.txt +RUN ln -s file-1.txt symlink-1 +# note: hard links may behave inconsistently, this should be a golden image +RUN ln file-1.txt hardlink-1 +RUN mknod char-device-1 c 89 1 +RUN mknod block-device-1 b 0 1 +RUN mknod fifo-1 p +RUN mkdir /dir +RUN rm -rf home etc/group etc/localtime etc/mtab etc/network etc/passwd etc/shadow var usr bin/* \ No newline at end of file diff --git a/syft/file/cataloger/internal/test-fixtures/image-file-type-mix/file-1.txt b/syft/file/cataloger/internal/test-fixtures/image-file-type-mix/file-1.txt new file mode 100644 index 00000000000..d86db8155c3 --- /dev/null +++ b/syft/file/cataloger/internal/test-fixtures/image-file-type-mix/file-1.txt @@ -0,0 +1 @@ +file 1! \ No newline at end of file diff --git a/syft/file/test-fixtures/symlinked-root/nested/link-root b/syft/file/cataloger/internal/test-fixtures/symlinked-root/nested/link-root similarity index 100% rename from syft/file/test-fixtures/symlinked-root/nested/link-root rename to syft/file/cataloger/internal/test-fixtures/symlinked-root/nested/link-root diff --git a/syft/file/test-fixtures/symlinked-root/real-root/file1.txt b/syft/file/cataloger/internal/test-fixtures/symlinked-root/real-root/file1.txt similarity index 100% rename from syft/file/test-fixtures/symlinked-root/real-root/file1.txt rename to syft/file/cataloger/internal/test-fixtures/symlinked-root/real-root/file1.txt diff --git a/syft/file/test-fixtures/symlinked-root/real-root/nested/file2.txt b/syft/file/cataloger/internal/test-fixtures/symlinked-root/real-root/nested/file2.txt similarity index 100% rename from syft/file/test-fixtures/symlinked-root/real-root/nested/file2.txt rename to syft/file/cataloger/internal/test-fixtures/symlinked-root/real-root/nested/file2.txt diff --git a/syft/file/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt b/syft/file/cataloger/internal/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt similarity index 100% rename from syft/file/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt rename to syft/file/cataloger/internal/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt diff --git a/syft/file/secrets_cataloger.go b/syft/file/cataloger/secrets/cataloger.go similarity index 82% rename from syft/file/secrets_cataloger.go rename to syft/file/cataloger/secrets/cataloger.go index 56537d04968..488f849b53b 100644 --- a/syft/file/secrets_cataloger.go +++ b/syft/file/cataloger/secrets/cataloger.go @@ -1,4 +1,4 @@ -package file +package secrets import ( "bytes" @@ -14,7 +14,8 @@ import ( "github.com/anchore/syft/internal/bus" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/event" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" + internal2 "github.com/anchore/syft/syft/file/cataloger/internal" ) var DefaultSecretsPatterns = map[string]string{ @@ -25,23 +26,25 @@ var DefaultSecretsPatterns = map[string]string{ "generic-api-key": `(?i)api(-|_)?key["'=:\s]*?(?P[A-Z0-9]{20,60})["']?(\s|$)`, } -type SecretsCataloger struct { +// Deprecated: will be removed in syft v1.0.0 +type Cataloger struct { patterns map[string]*regexp.Regexp revealValues bool skipFilesAboveSize int64 } -func NewSecretsCataloger(patterns map[string]*regexp.Regexp, revealValues bool, maxFileSize int64) (*SecretsCataloger, error) { - return &SecretsCataloger{ +// Deprecated: will be removed in syft v1.0.0 +func NewCataloger(patterns map[string]*regexp.Regexp, revealValues bool, maxFileSize int64) (*Cataloger, error) { + return &Cataloger{ patterns: patterns, revealValues: revealValues, skipFilesAboveSize: maxFileSize, }, nil } -func (i *SecretsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]SearchResult, error) { - results := make(map[source.Coordinates][]SearchResult) - locations := allRegularFiles(resolver) +func (i *Cataloger) Catalog(resolver file.Resolver) (map[file.Coordinates][]file.SearchResult, error) { + results := make(map[file.Coordinates][]file.SearchResult) + locations := internal2.AllRegularFiles(resolver) stage, prog, secretsDiscovered := secretsCatalogingProgress(int64(len(locations))) for _, location := range locations { stage.Current = location.RealPath @@ -65,7 +68,7 @@ func (i *SecretsCataloger) Catalog(resolver source.FileResolver) (map[source.Coo return results, nil } -func (i *SecretsCataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]SearchResult, error) { +func (i *Cataloger) catalogLocation(resolver file.Resolver, location file.Location) ([]file.SearchResult, error) { metadata, err := resolver.FileMetadataByLocation(location) if err != nil { return nil, err @@ -103,7 +106,7 @@ func (i *SecretsCataloger) catalogLocation(resolver source.FileResolver, locatio return secrets, nil } -func extractValue(resolver source.FileResolver, location source.Location, start, length int64) (string, error) { +func extractValue(resolver file.Resolver, location file.Location, start, length int64) (string, error) { readCloser, err := resolver.FileContentsByLocation(location) if err != nil { return "", fmt.Errorf("unable to fetch reader for location=%q : %w", location, err) @@ -130,7 +133,7 @@ func extractValue(resolver source.FileResolver, location source.Location, start, return buf.String(), nil } -type SecretsMonitor struct { +type Monitor struct { progress.Stager SecretsDiscovered progress.Monitorable progress.Progressable @@ -144,7 +147,7 @@ func secretsCatalogingProgress(locations int64) (*progress.Stage, *progress.Manu bus.Publish(partybus.Event{ Type: event.SecretsCatalogerStarted, Source: secretsDiscovered, - Value: SecretsMonitor{ + Value: Monitor{ Stager: progress.Stager(stage), SecretsDiscovered: secretsDiscovered, Progressable: prog, diff --git a/syft/file/secrets_cataloger_test.go b/syft/file/cataloger/secrets/cataloger_test.go similarity index 92% rename from syft/file/secrets_cataloger_test.go rename to syft/file/cataloger/secrets/cataloger_test.go index b2c55a9f3f0..2a44417ba5d 100644 --- a/syft/file/secrets_cataloger_test.go +++ b/syft/file/cataloger/secrets/cataloger_test.go @@ -1,4 +1,4 @@ -package file +package secrets import ( "regexp" @@ -6,8 +6,8 @@ import ( "github.com/stretchr/testify/assert" - "github.com/anchore/syft/internal/file" - "github.com/anchore/syft/syft/source" + intFile "github.com/anchore/syft/internal/file" + "github.com/anchore/syft/syft/file" ) func TestSecretsCataloger(t *testing.T) { @@ -17,7 +17,7 @@ func TestSecretsCataloger(t *testing.T) { reveal bool maxSize int64 patterns map[string]string - expected []SearchResult + expected []file.SearchResult constructorErr bool catalogErr bool }{ @@ -28,7 +28,7 @@ func TestSecretsCataloger(t *testing.T) { patterns: map[string]string{ "simple-secret-key": `^secret_key=.*`, }, - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "simple-secret-key", LineNumber: 2, @@ -46,7 +46,7 @@ func TestSecretsCataloger(t *testing.T) { patterns: map[string]string{ "simple-secret-key": `^secret_key=.*`, }, - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "simple-secret-key", LineNumber: 2, @@ -64,7 +64,7 @@ func TestSecretsCataloger(t *testing.T) { patterns: map[string]string{ "simple-secret-key": `^secret_key=(?P.*)`, }, - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "simple-secret-key", LineNumber: 2, @@ -82,7 +82,7 @@ func TestSecretsCataloger(t *testing.T) { patterns: map[string]string{ "simple-secret-key": `secret_key=.*`, }, - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "simple-secret-key", LineNumber: 1, @@ -125,7 +125,7 @@ func TestSecretsCataloger(t *testing.T) { patterns: map[string]string{ "simple-secret-key": `secret_key=(?P.*)`, }, - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "simple-secret-key", LineNumber: 1, @@ -176,7 +176,7 @@ func TestSecretsCataloger(t *testing.T) { regexObjs[name] = obj } - c, err := NewSecretsCataloger(regexObjs, test.reveal, test.maxSize) + c, err := NewCataloger(regexObjs, test.reveal, test.maxSize) if err != nil && !test.constructorErr { t.Fatalf("could not create cataloger (but should have been able to): %+v", err) } else if err == nil && test.constructorErr { @@ -185,7 +185,7 @@ func TestSecretsCataloger(t *testing.T) { return } - resolver := source.NewMockResolverForPaths(test.fixture) + resolver := file.NewMockResolverForPaths(test.fixture) actualResults, err := c.Catalog(resolver) if err != nil && !test.catalogErr { @@ -196,7 +196,7 @@ func TestSecretsCataloger(t *testing.T) { return } - loc := source.NewLocation(test.fixture) + loc := file.NewLocation(test.fixture) if _, exists := actualResults[loc.Coordinates]; !exists { t.Fatalf("could not find location=%q in results", loc) } @@ -214,11 +214,11 @@ func TestSecretsCataloger_DefaultSecrets(t *testing.T) { tests := []struct { fixture string - expected []SearchResult + expected []file.SearchResult }{ { fixture: "test-fixtures/secrets/default/aws.env", - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "aws-access-key", LineNumber: 2, @@ -239,7 +239,7 @@ func TestSecretsCataloger_DefaultSecrets(t *testing.T) { }, { fixture: "test-fixtures/secrets/default/aws.ini", - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "aws-access-key", LineNumber: 3, @@ -260,7 +260,7 @@ func TestSecretsCataloger_DefaultSecrets(t *testing.T) { }, { fixture: "test-fixtures/secrets/default/private-key.pem", - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "pem-private-key", LineNumber: 2, @@ -280,7 +280,7 @@ z3P668YfhUbKdRF6S42Cg6zn }, { fixture: "test-fixtures/secrets/default/private-key-openssl.pem", - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "pem-private-key", LineNumber: 2, @@ -302,7 +302,7 @@ z3P668YfhUbKdRF6S42Cg6zn // note: this test proves that the PEM regex matches the smallest possible match // since the test catches two adjacent secrets fixture: "test-fixtures/secrets/default/private-keys.pem", - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "pem-private-key", LineNumber: 1, @@ -345,7 +345,7 @@ j4f668YfhUbKdRF6S6734856 // 2. a named capture group with the correct line number and line offset case // 3. the named capture group is in a different line than the match start, and both the match start and the capture group have different line offsets fixture: "test-fixtures/secrets/default/docker-config.json", - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "docker-config-auth", LineNumber: 5, @@ -362,7 +362,7 @@ j4f668YfhUbKdRF6S6734856 }, { fixture: "test-fixtures/secrets/default/api-key.txt", - expected: []SearchResult{ + expected: []file.SearchResult{ { Classification: "generic-api-key", LineNumber: 2, @@ -418,19 +418,19 @@ j4f668YfhUbKdRF6S6734856 for _, test := range tests { t.Run(test.fixture, func(t *testing.T) { - c, err := NewSecretsCataloger(regexObjs, true, 10*file.MB) + c, err := NewCataloger(regexObjs, true, 10*intFile.MB) if err != nil { t.Fatalf("could not create cataloger: %+v", err) } - resolver := source.NewMockResolverForPaths(test.fixture) + resolver := file.NewMockResolverForPaths(test.fixture) actualResults, err := c.Catalog(resolver) if err != nil { t.Fatalf("could not catalog: %+v", err) } - loc := source.NewLocation(test.fixture) + loc := file.NewLocation(test.fixture) if _, exists := actualResults[loc.Coordinates]; !exists && test.expected != nil { t.Fatalf("could not find location=%q in results", loc) } else if !exists && test.expected == nil { diff --git a/syft/file/generate_search_patterns.go b/syft/file/cataloger/secrets/generate_search_patterns.go similarity index 98% rename from syft/file/generate_search_patterns.go rename to syft/file/cataloger/secrets/generate_search_patterns.go index 5e2c074dc35..a46ff483cc4 100644 --- a/syft/file/generate_search_patterns.go +++ b/syft/file/cataloger/secrets/generate_search_patterns.go @@ -1,4 +1,4 @@ -package file +package secrets import ( "fmt" diff --git a/syft/file/generate_search_patterns_test.go b/syft/file/cataloger/secrets/generate_search_patterns_test.go similarity index 99% rename from syft/file/generate_search_patterns_test.go rename to syft/file/cataloger/secrets/generate_search_patterns_test.go index bdd6d422bdf..37dc3441d1a 100644 --- a/syft/file/generate_search_patterns_test.go +++ b/syft/file/cataloger/secrets/generate_search_patterns_test.go @@ -1,4 +1,4 @@ -package file +package secrets import ( "testing" diff --git a/syft/file/newline_counter.go b/syft/file/cataloger/secrets/newline_counter.go similarity index 97% rename from syft/file/newline_counter.go rename to syft/file/cataloger/secrets/newline_counter.go index fec905523cd..d3c8ef894fc 100644 --- a/syft/file/newline_counter.go +++ b/syft/file/cataloger/secrets/newline_counter.go @@ -1,4 +1,4 @@ -package file +package secrets import "io" diff --git a/syft/file/newline_counter_test.go b/syft/file/cataloger/secrets/newline_counter_test.go similarity index 98% rename from syft/file/newline_counter_test.go rename to syft/file/cataloger/secrets/newline_counter_test.go index 24282bceb84..0760e892cd3 100644 --- a/syft/file/newline_counter_test.go +++ b/syft/file/cataloger/secrets/newline_counter_test.go @@ -1,4 +1,4 @@ -package file +package secrets import ( "bufio" diff --git a/syft/file/secrets_search_by_line_strategy.go b/syft/file/cataloger/secrets/secrets_search_by_line_strategy.go similarity index 86% rename from syft/file/secrets_search_by_line_strategy.go rename to syft/file/cataloger/secrets/secrets_search_by_line_strategy.go index d241846fab1..f1beeb525e7 100644 --- a/syft/file/secrets_search_by_line_strategy.go +++ b/syft/file/cataloger/secrets/secrets_search_by_line_strategy.go @@ -1,4 +1,4 @@ -package file +package secrets import ( "bufio" @@ -8,10 +8,10 @@ import ( "regexp" "github.com/anchore/syft/internal" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) -func catalogLocationByLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp) ([]SearchResult, error) { +func catalogLocationByLine(resolver file.Resolver, location file.Location, patterns map[string]*regexp.Regexp) ([]file.SearchResult, error) { readCloser, err := resolver.FileContentsByLocation(location) if err != nil { return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err) @@ -20,7 +20,7 @@ func catalogLocationByLine(resolver source.FileResolver, location source.Locatio var scanner = bufio.NewReader(readCloser) var position int64 - var allSecrets []SearchResult + var allSecrets []file.SearchResult var lineNo int64 var readErr error for !errors.Is(readErr, io.EOF) { @@ -43,8 +43,8 @@ func catalogLocationByLine(resolver source.FileResolver, location source.Locatio return allSecrets, nil } -func searchForSecretsWithinLine(resolver source.FileResolver, location source.Location, patterns map[string]*regexp.Regexp, line []byte, lineNo int64, position int64) ([]SearchResult, error) { - var secrets []SearchResult +func searchForSecretsWithinLine(resolver file.Resolver, location file.Location, patterns map[string]*regexp.Regexp, line []byte, lineNo int64, position int64) ([]file.SearchResult, error) { + var secrets []file.SearchResult for name, pattern := range patterns { matches := pattern.FindAllIndex(line, -1) for i, match := range matches { @@ -72,7 +72,7 @@ func searchForSecretsWithinLine(resolver source.FileResolver, location source.Lo return secrets, nil } -func readerAtPosition(resolver source.FileResolver, location source.Location, seekPosition int64) (io.ReadCloser, error) { +func readerAtPosition(resolver file.Resolver, location file.Location, seekPosition int64) (io.ReadCloser, error) { readCloser, err := resolver.FileContentsByLocation(location) if err != nil { return nil, fmt.Errorf("unable to fetch reader for location=%q : %w", location, err) @@ -89,7 +89,7 @@ func readerAtPosition(resolver source.FileResolver, location source.Location, se return readCloser, nil } -func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *regexp.Regexp, lineNo, lineOffset, seekPosition int64) *SearchResult { +func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *regexp.Regexp, lineNo, lineOffset, seekPosition int64) *file.SearchResult { reader := &newlineCounter{RuneReader: bufio.NewReader(readCloser)} positions := pattern.FindReaderSubmatchIndex(reader) if len(positions) == 0 { @@ -125,7 +125,7 @@ func extractSecretFromPosition(readCloser io.ReadCloser, name string, pattern *r lineOffsetOfSecret += lineOffset } - return &SearchResult{ + return &file.SearchResult{ Classification: name, SeekPosition: start + seekPosition, Length: stop - start, diff --git a/syft/file/test-fixtures/secrets/default/api-key.txt b/syft/file/cataloger/secrets/test-fixtures/secrets/default/api-key.txt similarity index 100% rename from syft/file/test-fixtures/secrets/default/api-key.txt rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/api-key.txt diff --git a/syft/file/test-fixtures/secrets/default/aws.env b/syft/file/cataloger/secrets/test-fixtures/secrets/default/aws.env similarity index 100% rename from syft/file/test-fixtures/secrets/default/aws.env rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/aws.env diff --git a/syft/file/test-fixtures/secrets/default/aws.ini b/syft/file/cataloger/secrets/test-fixtures/secrets/default/aws.ini similarity index 100% rename from syft/file/test-fixtures/secrets/default/aws.ini rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/aws.ini diff --git a/syft/file/test-fixtures/secrets/default/docker-config.json b/syft/file/cataloger/secrets/test-fixtures/secrets/default/docker-config.json similarity index 100% rename from syft/file/test-fixtures/secrets/default/docker-config.json rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/docker-config.json diff --git a/syft/file/test-fixtures/secrets/default/not-docker-config.json b/syft/file/cataloger/secrets/test-fixtures/secrets/default/not-docker-config.json similarity index 100% rename from syft/file/test-fixtures/secrets/default/not-docker-config.json rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/not-docker-config.json diff --git a/syft/file/test-fixtures/secrets/default/private-key-false-positive.pem b/syft/file/cataloger/secrets/test-fixtures/secrets/default/private-key-false-positive.pem similarity index 100% rename from syft/file/test-fixtures/secrets/default/private-key-false-positive.pem rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/private-key-false-positive.pem diff --git a/syft/file/test-fixtures/secrets/default/private-key-openssl.pem b/syft/file/cataloger/secrets/test-fixtures/secrets/default/private-key-openssl.pem similarity index 100% rename from syft/file/test-fixtures/secrets/default/private-key-openssl.pem rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/private-key-openssl.pem diff --git a/syft/file/test-fixtures/secrets/default/private-key.pem b/syft/file/cataloger/secrets/test-fixtures/secrets/default/private-key.pem similarity index 100% rename from syft/file/test-fixtures/secrets/default/private-key.pem rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/private-key.pem diff --git a/syft/file/test-fixtures/secrets/default/private-keys.pem b/syft/file/cataloger/secrets/test-fixtures/secrets/default/private-keys.pem similarity index 100% rename from syft/file/test-fixtures/secrets/default/private-keys.pem rename to syft/file/cataloger/secrets/test-fixtures/secrets/default/private-keys.pem diff --git a/syft/file/test-fixtures/secrets/multiple.txt b/syft/file/cataloger/secrets/test-fixtures/secrets/multiple.txt similarity index 100% rename from syft/file/test-fixtures/secrets/multiple.txt rename to syft/file/cataloger/secrets/test-fixtures/secrets/multiple.txt diff --git a/syft/file/test-fixtures/secrets/simple.txt b/syft/file/cataloger/secrets/test-fixtures/secrets/simple.txt similarity index 100% rename from syft/file/test-fixtures/secrets/simple.txt rename to syft/file/cataloger/secrets/test-fixtures/secrets/simple.txt diff --git a/syft/file/contents_cataloger_test.go b/syft/file/contents_cataloger_test.go deleted file mode 100644 index 526baae5c33..00000000000 --- a/syft/file/contents_cataloger_test.go +++ /dev/null @@ -1,80 +0,0 @@ -package file - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/anchore/syft/syft/source" -) - -func TestContentsCataloger(t *testing.T) { - allFiles := []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"} - - tests := []struct { - name string - globs []string - maxSize int64 - files []string - expected map[source.Coordinates]string - }{ - { - name: "multi-pattern", - globs: []string{"test-fixtures/last/*.txt", "test-fixtures/*.txt"}, - files: allFiles, - expected: map[source.Coordinates]string{ - source.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - }, - }, - { - name: "no-patterns", - globs: []string{}, - files: []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt"}, - expected: map[source.Coordinates]string{}, - }, - { - name: "all-txt", - globs: []string{"**/*.txt"}, - files: allFiles, - expected: map[source.Coordinates]string{ - source.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - }, - }, - { - name: "subpath", - globs: []string{"test-fixtures/*.txt"}, - files: allFiles, - expected: map[source.Coordinates]string{ - source.NewLocation("test-fixtures/another-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hbm90aGVyLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - }, - }, - { - name: "size-filter", - maxSize: 42, - globs: []string{"**/*.txt"}, - files: allFiles, - expected: map[source.Coordinates]string{ - source.NewLocation("test-fixtures/last/path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9sYXN0L3BhdGgudHh0IGZpbGUgY29udGVudHMh", - source.NewLocation("test-fixtures/a-path.txt").Coordinates: "dGVzdC1maXh0dXJlcy9hLXBhdGgudHh0IGZpbGUgY29udGVudHMh", - }, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - c, err := NewContentsCataloger(test.globs, test.maxSize) - assert.NoError(t, err) - - resolver := source.NewMockResolverForPaths(test.files...) - actual, err := c.Catalog(resolver) - assert.NoError(t, err) - assert.Equal(t, test.expected, actual, "mismatched contents") - - }) - } -} diff --git a/syft/source/coordinate_set.go b/syft/file/coordinate_set.go similarity index 99% rename from syft/source/coordinate_set.go rename to syft/file/coordinate_set.go index 0ae080c270f..ba56005d302 100644 --- a/syft/source/coordinate_set.go +++ b/syft/file/coordinate_set.go @@ -1,4 +1,4 @@ -package source +package file import ( "sort" diff --git a/syft/source/coordinate_set_test.go b/syft/file/coordinate_set_test.go similarity index 99% rename from syft/source/coordinate_set_test.go rename to syft/file/coordinate_set_test.go index 7f50a61ffd2..6fae658e76b 100644 --- a/syft/source/coordinate_set_test.go +++ b/syft/file/coordinate_set_test.go @@ -1,4 +1,4 @@ -package source +package file import ( "testing" diff --git a/syft/source/coordinates.go b/syft/file/coordinates.go similarity index 98% rename from syft/source/coordinates.go rename to syft/file/coordinates.go index c35d3dcc297..24ba486ae91 100644 --- a/syft/source/coordinates.go +++ b/syft/file/coordinates.go @@ -1,4 +1,4 @@ -package source +package file import ( "fmt" diff --git a/syft/file/digest.go b/syft/file/digest.go index 87b53dbb833..23219e68875 100644 --- a/syft/file/digest.go +++ b/syft/file/digest.go @@ -1,6 +1,76 @@ package file +import ( + "crypto" + "fmt" + "hash" + "io" + "strings" +) + type Digest struct { Algorithm string `json:"algorithm"` Value string `json:"value"` } + +func NewDigestsFromFile(closer io.ReadCloser, hashes []crypto.Hash) ([]Digest, error) { + // create a set of hasher objects tied together with a single writer to feed content into + hashers := make([]hash.Hash, len(hashes)) + writers := make([]io.Writer, len(hashes)) + for idx, hashObj := range hashes { + hashers[idx] = hashObj.New() + writers[idx] = hashers[idx] + } + + size, err := io.Copy(io.MultiWriter(writers...), closer) + if err != nil { + return nil, err + } + + if size == 0 { + return make([]Digest, 0), nil + } + + result := make([]Digest, len(hashes)) + // only capture digests when there is content. It is important to do this based on SIZE and not + // FILE TYPE. The reasoning is that it is possible for a tar to be crafted with a header-only + // file type but a body is still allowed. + for idx, hasher := range hashers { + result[idx] = Digest{ + Algorithm: DigestAlgorithmName(hashes[idx]), + Value: fmt.Sprintf("%+x", hasher.Sum(nil)), + } + } + + return result, nil +} + +func Hashers(names ...string) ([]crypto.Hash, error) { + supportedHashAlgorithms := make(map[string]crypto.Hash) + for _, h := range []crypto.Hash{ + crypto.MD5, + crypto.SHA1, + crypto.SHA256, + } { + supportedHashAlgorithms[DigestAlgorithmName(h)] = h + } + + var hashers []crypto.Hash + for _, hashStr := range names { + hashObj, ok := supportedHashAlgorithms[CleanDigestAlgorithmName(hashStr)] + if !ok { + return nil, fmt.Errorf("unsupported hash algorithm: %s", hashStr) + } + hashers = append(hashers, hashObj) + } + return hashers, nil +} + +func DigestAlgorithmName(hash crypto.Hash) string { + return CleanDigestAlgorithmName(hash.String()) +} + +func CleanDigestAlgorithmName(name string) string { + lower := strings.ToLower(name) + return strings.ReplaceAll(lower, "-", "") +} diff --git a/syft/file/digest_cataloger.go b/syft/file/digest_cataloger.go deleted file mode 100644 index db2d468a31f..00000000000 --- a/syft/file/digest_cataloger.go +++ /dev/null @@ -1,140 +0,0 @@ -package file - -import ( - "crypto" - "errors" - "fmt" - "hash" - "io" - "strings" - - "github.com/wagoodman/go-partybus" - "github.com/wagoodman/go-progress" - - "github.com/anchore/stereoscope/pkg/file" - "github.com/anchore/syft/internal" - "github.com/anchore/syft/internal/bus" - "github.com/anchore/syft/internal/log" - "github.com/anchore/syft/syft/event" - "github.com/anchore/syft/syft/source" -) - -var errUndigestableFile = errors.New("undigestable file") - -type DigestsCataloger struct { - hashes []crypto.Hash -} - -func NewDigestsCataloger(hashes []crypto.Hash) (*DigestsCataloger, error) { - return &DigestsCataloger{ - hashes: hashes, - }, nil -} - -func (i *DigestsCataloger) Catalog(resolver source.FileResolver) (map[source.Coordinates][]Digest, error) { - results := make(map[source.Coordinates][]Digest) - locations := allRegularFiles(resolver) - stage, prog := digestsCatalogingProgress(int64(len(locations))) - for _, location := range locations { - stage.Current = location.RealPath - result, err := i.catalogLocation(resolver, location) - - if errors.Is(err, errUndigestableFile) { - continue - } - - if internal.IsErrPathPermission(err) { - log.Debugf("file digests cataloger skipping %q: %+v", location.RealPath, err) - continue - } - - if err != nil { - return nil, err - } - prog.Increment() - results[location.Coordinates] = result - } - log.Debugf("file digests cataloger processed %d files", prog.Current()) - prog.SetCompleted() - return results, nil -} - -func (i *DigestsCataloger) catalogLocation(resolver source.FileResolver, location source.Location) ([]Digest, error) { - meta, err := resolver.FileMetadataByLocation(location) - if err != nil { - return nil, err - } - - // we should only attempt to report digests for files that are regular files (don't attempt to resolve links) - if meta.Type != file.TypeRegular { - return nil, errUndigestableFile - } - - contentReader, err := resolver.FileContentsByLocation(location) - if err != nil { - return nil, err - } - defer internal.CloseAndLogError(contentReader, location.VirtualPath) - - digests, err := DigestsFromFile(contentReader, i.hashes) - if err != nil { - return nil, internal.ErrPath{Context: "digests-cataloger", Path: location.RealPath, Err: err} - } - - return digests, nil -} - -func DigestsFromFile(closer io.ReadCloser, hashes []crypto.Hash) ([]Digest, error) { - // create a set of hasher objects tied together with a single writer to feed content into - hashers := make([]hash.Hash, len(hashes)) - writers := make([]io.Writer, len(hashes)) - for idx, hashObj := range hashes { - hashers[idx] = hashObj.New() - writers[idx] = hashers[idx] - } - - _, err := io.Copy(io.MultiWriter(writers...), closer) - if err != nil { - return nil, err - } - - result := make([]Digest, len(hashes)) - // only capture digests when there is content. It is important to do this based on SIZE and not - // FILE TYPE. The reasoning is that it is possible for a tar to be crafted with a header-only - // file type but a body is still allowed. - for idx, hasher := range hashers { - result[idx] = Digest{ - Algorithm: DigestAlgorithmName(hashes[idx]), - Value: fmt.Sprintf("%+x", hasher.Sum(nil)), - } - } - - return result, nil -} - -func DigestAlgorithmName(hash crypto.Hash) string { - return CleanDigestAlgorithmName(hash.String()) -} - -func CleanDigestAlgorithmName(name string) string { - lower := strings.ToLower(name) - return strings.ReplaceAll(lower, "-", "") -} - -func digestsCatalogingProgress(locations int64) (*progress.Stage, *progress.Manual) { - stage := &progress.Stage{} - prog := progress.NewManual(locations) - - bus.Publish(partybus.Event{ - Type: event.FileDigestsCatalogerStarted, - Value: struct { - progress.Stager - progress.Progressable - }{ - Stager: progress.Stager(stage), - Progressable: prog, - }, - }) - - return stage, prog -} diff --git a/syft/source/location.go b/syft/file/location.go similarity index 94% rename from syft/source/location.go rename to syft/file/location.go index 3abadd3fb1f..65af91c5164 100644 --- a/syft/source/location.go +++ b/syft/file/location.go @@ -1,4 +1,4 @@ -package source +package file import ( "fmt" @@ -24,6 +24,10 @@ type LocationData struct { ref file.Reference `hash:"ignore"` // The file reference relative to the stereoscope.FileCatalog that has more information about this location. } +func (l LocationData) Reference() file.Reference { + return l.ref +} + type LocationMetadata struct { Annotations map[string]string `json:"annotations,omitempty"` // Arbitrary key-value pairs that can be used to annotate a location } @@ -108,7 +112,7 @@ func NewVirtualLocationFromCoordinates(coordinates Coordinates, virtualPath stri }} } -// NewLocationFromImage creates a new Location representing the given path (extracted from the ref) relative to the given image. +// NewLocationFromImage creates a new Location representing the given path (extracted from the Reference) relative to the given image. func NewLocationFromImage(virtualPath string, ref file.Reference, img *image.Image) Location { layer := img.FileCatalog.Layer(ref) return Location{ @@ -126,7 +130,7 @@ func NewLocationFromImage(virtualPath string, ref file.Reference, img *image.Ima } } -// NewLocationFromDirectory creates a new Location representing the given path (extracted from the ref) relative to the given directory. +// NewLocationFromDirectory creates a new Location representing the given path (extracted from the Reference) relative to the given directory. func NewLocationFromDirectory(responsePath string, ref file.Reference) Location { return Location{ LocationData: LocationData{ @@ -141,7 +145,7 @@ func NewLocationFromDirectory(responsePath string, ref file.Reference) Location } } -// NewVirtualLocationFromDirectory creates a new Location representing the given path (extracted from the ref) relative to the given directory with a separate virtual access path. +// NewVirtualLocationFromDirectory creates a new Location representing the given path (extracted from the Reference) relative to the given directory with a separate virtual access path. func NewVirtualLocationFromDirectory(responsePath, virtualResponsePath string, ref file.Reference) Location { if responsePath == virtualResponsePath { return NewLocationFromDirectory(responsePath, ref) diff --git a/syft/source/location_read_closer.go b/syft/file/location_read_closer.go similarity index 94% rename from syft/source/location_read_closer.go rename to syft/file/location_read_closer.go index b5aa2b6efb5..480a0b50fe1 100644 --- a/syft/source/location_read_closer.go +++ b/syft/file/location_read_closer.go @@ -1,4 +1,4 @@ -package source +package file import "io" diff --git a/syft/source/location_set.go b/syft/file/location_set.go similarity index 99% rename from syft/source/location_set.go rename to syft/file/location_set.go index 100bf95e5d4..8e9ed2fc1f4 100644 --- a/syft/source/location_set.go +++ b/syft/file/location_set.go @@ -1,4 +1,4 @@ -package source +package file import ( "sort" diff --git a/syft/source/location_set_test.go b/syft/file/location_set_test.go similarity index 99% rename from syft/source/location_set_test.go rename to syft/file/location_set_test.go index b3d53ae5856..1613c71a539 100644 --- a/syft/source/location_set_test.go +++ b/syft/file/location_set_test.go @@ -1,4 +1,4 @@ -package source +package file import ( "testing" diff --git a/syft/source/location_test.go b/syft/file/location_test.go similarity index 98% rename from syft/source/location_test.go rename to syft/file/location_test.go index 96f0e3fcd3d..261ae9caa0f 100644 --- a/syft/source/location_test.go +++ b/syft/file/location_test.go @@ -1,4 +1,4 @@ -package source +package file import ( "testing" diff --git a/syft/source/locations.go b/syft/file/locations.go similarity index 96% rename from syft/source/locations.go rename to syft/file/locations.go index 045d1ed9d3c..da298643ec9 100644 --- a/syft/source/locations.go +++ b/syft/file/locations.go @@ -1,4 +1,4 @@ -package source +package file type Locations []Location diff --git a/syft/file/metadata.go b/syft/file/metadata.go new file mode 100644 index 00000000000..b5e0669d59c --- /dev/null +++ b/syft/file/metadata.go @@ -0,0 +1,5 @@ +package file + +import "github.com/anchore/stereoscope/pkg/file" + +type Metadata = file.Metadata diff --git a/syft/source/mock_resolver.go b/syft/file/mock_resolver.go similarity index 94% rename from syft/source/mock_resolver.go rename to syft/file/mock_resolver.go index 12cab882b86..7a0f89ffd00 100644 --- a/syft/source/mock_resolver.go +++ b/syft/file/mock_resolver.go @@ -1,4 +1,4 @@ -package source +package file import ( "fmt" @@ -11,14 +11,14 @@ import ( "github.com/anchore/stereoscope/pkg/file" ) -var _ FileResolver = (*MockResolver)(nil) +var _ Resolver = (*MockResolver)(nil) // MockResolver implements the FileResolver interface and is intended for use *only in test code*. // It provides an implementation that can resolve local filesystem paths using only a provided discrete list of file // paths, which are typically paths to test fixtures. type MockResolver struct { locations []Location - metadata map[Coordinates]FileMetadata + metadata map[Coordinates]Metadata mimeTypeIndex map[string][]Location extension map[string][]Location basename map[string][]Location @@ -41,13 +41,13 @@ func NewMockResolverForPaths(paths ...string) *MockResolver { return &MockResolver{ locations: locations, - metadata: make(map[Coordinates]FileMetadata), + metadata: make(map[Coordinates]Metadata), extension: extension, basename: basename, } } -func NewMockResolverForPathsWithMetadata(metadata map[Coordinates]FileMetadata) *MockResolver { +func NewMockResolverForPathsWithMetadata(metadata map[Coordinates]Metadata) *MockResolver { var locations []Location var mimeTypeIndex = make(map[string][]Location) extension := make(map[string][]Location) @@ -155,10 +155,10 @@ func (r MockResolver) AllLocations() <-chan Location { return results } -func (r MockResolver) FileMetadataByLocation(l Location) (FileMetadata, error) { +func (r MockResolver) FileMetadataByLocation(l Location) (Metadata, error) { info, err := os.Stat(l.RealPath) if err != nil { - return FileMetadata{}, err + return Metadata{}, err } // other types not supported @@ -167,7 +167,7 @@ func (r MockResolver) FileMetadataByLocation(l Location) (FileMetadata, error) { ty = file.TypeDirectory } - return FileMetadata{ + return Metadata{ FileInfo: info, Type: ty, UserID: 0, // not supported diff --git a/syft/source/file_resolver.go b/syft/file/resolver.go similarity index 75% rename from syft/source/file_resolver.go rename to syft/file/resolver.go index 414be6373f9..57726b9b22c 100644 --- a/syft/source/file_resolver.go +++ b/syft/file/resolver.go @@ -1,28 +1,26 @@ -package source +package file -import ( - "io" -) +import "io" -// FileResolver is an interface that encompasses how to get specific file references and file contents for a generic data source. -type FileResolver interface { - FileContentResolver - FilePathResolver - FileLocationResolver - FileMetadataResolver +// Resolver is an interface that encompasses how to get specific file references and file contents for a generic data source. +type Resolver interface { + ContentResolver + PathResolver + LocationResolver + MetadataResolver } -// FileContentResolver knows how to get file content for a given Location -type FileContentResolver interface { +// ContentResolver knows how to get file content for a given Location +type ContentResolver interface { FileContentsByLocation(Location) (io.ReadCloser, error) } -type FileMetadataResolver interface { - FileMetadataByLocation(Location) (FileMetadata, error) +type MetadataResolver interface { + FileMetadataByLocation(Location) (Metadata, error) } -// FilePathResolver knows how to get a Location for given string paths and globs -type FilePathResolver interface { +// PathResolver knows how to get a Location for given string paths and globs +type PathResolver interface { // HasPath indicates if the given path exists in the underlying source. // The implementation for this may vary, however, generally the following considerations should be made: // - full symlink resolution should be performed on all requests @@ -50,7 +48,7 @@ type FilePathResolver interface { RelativeFileByPath(_ Location, path string) *Location } -type FileLocationResolver interface { +type LocationResolver interface { // AllLocations returns a channel of all file references from the underlying source. // The implementation for this may vary, however, generally the following considerations should be made: // - NO symlink resolution should be performed on results @@ -58,8 +56,8 @@ type FileLocationResolver interface { AllLocations() <-chan Location } -type WritableFileResolver interface { - FileResolver +type WritableResolver interface { + Resolver Write(location Location, reader io.Reader) error } diff --git a/syft/file/test-fixtures/snapshot/stereoscope-fixture-image-file-type-mix.golden b/syft/file/test-fixtures/snapshot/stereoscope-fixture-image-file-type-mix.golden deleted file mode 100644 index e85036214d9..00000000000 Binary files a/syft/file/test-fixtures/snapshot/stereoscope-fixture-image-file-type-mix.golden and /dev/null differ diff --git a/syft/formats/common/cyclonedxhelpers/component.go b/syft/formats/common/cyclonedxhelpers/component.go index e51c9d11250..d7c79875025 100644 --- a/syft/formats/common/cyclonedxhelpers/component.go +++ b/syft/formats/common/cyclonedxhelpers/component.go @@ -6,9 +6,9 @@ import ( "github.com/CycloneDX/cyclonedx-go" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/formats/common" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func encodeComponent(p pkg.Package) cyclonedx.Component { @@ -100,13 +100,13 @@ func decodeComponent(c *cyclonedx.Component) *pkg.Package { return p } -func decodeLocations(vals map[string]string) source.LocationSet { - v := common.Decode(reflect.TypeOf([]source.Location{}), vals, "syft:location", CycloneDXFields) - out, ok := v.([]source.Location) +func decodeLocations(vals map[string]string) file.LocationSet { + v := common.Decode(reflect.TypeOf([]file.Location{}), vals, "syft:location", CycloneDXFields) + out, ok := v.([]file.Location) if !ok { out = nil } - return source.NewLocationSet(out...) + return file.NewLocationSet(out...) } func decodePackageMetadata(vals map[string]string, c *cyclonedx.Component, typ pkg.MetadataType) interface{} { diff --git a/syft/formats/common/cyclonedxhelpers/component_test.go b/syft/formats/common/cyclonedxhelpers/component_test.go index 4ee69aa0c21..ed217fa3422 100644 --- a/syft/formats/common/cyclonedxhelpers/component_test.go +++ b/syft/formats/common/cyclonedxhelpers/component_test.go @@ -8,8 +8,8 @@ import ( "github.com/CycloneDX/cyclonedx-go" "github.com/stretchr/testify/assert" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func Test_encodeComponentProperties(t *testing.T) { @@ -28,8 +28,8 @@ func Test_encodeComponentProperties(t *testing.T) { name: "from apk", input: pkg.Package{ FoundBy: "cataloger", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates(source.Coordinates{RealPath: "test"}), + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates(file.Coordinates{RealPath: "test"}), ), Metadata: pkg.ApkMetadata{ Package: "libc-utils", diff --git a/syft/formats/common/spdxhelpers/source_info_test.go b/syft/formats/common/spdxhelpers/source_info_test.go index a56efff9338..a7b2dc14299 100644 --- a/syft/formats/common/spdxhelpers/source_info_test.go +++ b/syft/formats/common/spdxhelpers/source_info_test.go @@ -5,8 +5,8 @@ import ( "github.com/stretchr/testify/assert" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func Test_SourceInfo(t *testing.T) { @@ -19,9 +19,9 @@ func Test_SourceInfo(t *testing.T) { name: "locations are captured", input: pkg.Package{ // note: no type given - Locations: source.NewLocationSet( - source.NewVirtualLocation("/a-place", "/b-place"), - source.NewVirtualLocation("/c-place", "/d-place"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/a-place", "/b-place"), + file.NewVirtualLocation("/c-place", "/d-place"), ), }, expected: []string{ diff --git a/syft/formats/common/spdxhelpers/to_format_model.go b/syft/formats/common/spdxhelpers/to_format_model.go index 4c39dbe3bee..6b412a25585 100644 --- a/syft/formats/common/spdxhelpers/to_format_model.go +++ b/syft/formats/common/spdxhelpers/to_format_model.go @@ -21,7 +21,6 @@ import ( "github.com/anchore/syft/syft/formats/common/util" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/sbom" - "github.com/anchore/syft/syft/source" ) const ( @@ -137,7 +136,7 @@ func toSPDXID(identifiable artifact.Identifiable) spdx.ElementID { switch it := identifiable.(type) { case pkg.Package: id = SanitizeElementID(fmt.Sprintf("Package-%s-%s-%s", it.Type, it.Name, it.ID())) - case source.Coordinates: + case file.Coordinates: p := "" parts := strings.Split(it.RealPath, "/") for i := len(parts); i > 0; i-- { @@ -437,7 +436,7 @@ func toFiles(s sbom.SBOM) (results []*spdx.File) { artifacts := s.Artifacts for _, coordinates := range s.AllCoordinates() { - var metadata *source.FileMetadata + var metadata *file.Metadata if metadataForLocation, exists := artifacts.FileMetadata[coordinates]; exists { metadata = &metadataForLocation } @@ -500,7 +499,7 @@ func toChecksumAlgorithm(algorithm string) spdx.ChecksumAlgorithm { return spdx.ChecksumAlgorithm(strings.ToUpper(algorithm)) } -func toFileTypes(metadata *source.FileMetadata) (ty []string) { +func toFileTypes(metadata *file.Metadata) (ty []string) { if metadata == nil { return nil } diff --git a/syft/formats/common/spdxhelpers/to_format_model_test.go b/syft/formats/common/spdxhelpers/to_format_model_test.go index 170de95ea8c..e36e29435d1 100644 --- a/syft/formats/common/spdxhelpers/to_format_model_test.go +++ b/syft/formats/common/spdxhelpers/to_format_model_test.go @@ -115,12 +115,12 @@ func Test_toFileTypes(t *testing.T) { tests := []struct { name string - metadata source.FileMetadata + metadata file.Metadata expected []string }{ { name: "application", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "application/vnd.unknown", }, expected: []string{ @@ -129,7 +129,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "archive", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "application/zip", }, expected: []string{ @@ -139,7 +139,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "audio", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "audio/ogg", }, expected: []string{ @@ -148,7 +148,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "video", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "video/3gpp", }, expected: []string{ @@ -157,7 +157,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "text", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "text/html", }, expected: []string{ @@ -166,7 +166,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "image", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "image/png", }, expected: []string{ @@ -175,7 +175,7 @@ func Test_toFileTypes(t *testing.T) { }, { name: "binary", - metadata: source.FileMetadata{ + metadata: file.Metadata{ MIMEType: "application/x-sharedlib", }, expected: []string{ @@ -276,7 +276,7 @@ func Test_fileIDsForPackage(t *testing.T) { Name: "bogus", } - c := source.Coordinates{ + c := file.Coordinates{ RealPath: "/path", FileSystemID: "nowhere", } diff --git a/syft/formats/common/spdxhelpers/to_syft_model.go b/syft/formats/common/spdxhelpers/to_syft_model.go index a31cee81218..fd34541df99 100644 --- a/syft/formats/common/spdxhelpers/to_syft_model.go +++ b/syft/formats/common/spdxhelpers/to_syft_model.go @@ -35,8 +35,8 @@ func ToSyftModel(doc *spdx.Document) (*sbom.SBOM, error) { Source: src, Artifacts: sbom.Artifacts{ Packages: pkg.NewCollection(), - FileMetadata: map[source.Coordinates]source.FileMetadata{}, - FileDigests: map[source.Coordinates][]file.Digest{}, + FileMetadata: map[file.Coordinates]file.Metadata{}, + FileDigests: map[file.Coordinates][]file.Digest{}, LinuxDistribution: findLinuxReleaseByPURL(doc), }, } @@ -135,7 +135,7 @@ func toFileDigests(f *spdx.File) (digests []file.Digest) { return digests } -func toFileMetadata(f *spdx.File) (meta source.FileMetadata) { +func toFileMetadata(f *spdx.File) (meta file.Metadata) { // FIXME Syft is currently lossy due to the SPDX 2.2.1 spec not supporting arbitrary mimetypes for _, typ := range f.FileTypes { switch FileType(typ) { @@ -169,7 +169,7 @@ func toSyftRelationships(spdxIDMap map[string]interface{}, doc *spdx.Document) [ b := spdxIDMap[string(r.RefB.ElementRefID)] from, fromOk := a.(*pkg.Package) toPackage, toPackageOk := b.(*pkg.Package) - toLocation, toLocationOk := b.(*source.Location) + toLocation, toLocationOk := b.(*file.Location) if !fromOk || !(toPackageOk || toLocationOk) { log.Debugf("unable to find valid relationship mapping from SPDX 2.2 JSON, ignoring: (from: %+v) (to: %+v)", a, b) continue @@ -212,7 +212,7 @@ func toSyftRelationships(spdxIDMap map[string]interface{}, doc *spdx.Document) [ return out } -func toSyftCoordinates(f *spdx.File) source.Coordinates { +func toSyftCoordinates(f *spdx.File) file.Coordinates { const layerIDPrefix = "layerID: " var fileSystemID string if strings.Index(f.FileComment, layerIDPrefix) == 0 { @@ -221,14 +221,14 @@ func toSyftCoordinates(f *spdx.File) source.Coordinates { if strings.Index(string(f.FileSPDXIdentifier), layerIDPrefix) == 0 { fileSystemID = strings.TrimPrefix(string(f.FileSPDXIdentifier), layerIDPrefix) } - return source.Coordinates{ + return file.Coordinates{ RealPath: f.FileName, FileSystemID: fileSystemID, } } -func toSyftLocation(f *spdx.File) *source.Location { - l := source.NewVirtualLocationFromCoordinates(toSyftCoordinates(f), f.FileName) +func toSyftLocation(f *spdx.File) *file.Location { + l := file.NewVirtualLocationFromCoordinates(toSyftCoordinates(f), f.FileName) return &l } diff --git a/syft/formats/common/spdxhelpers/to_syft_model_test.go b/syft/formats/common/spdxhelpers/to_syft_model_test.go index a4b5c1e81d9..e4a98f5abd2 100644 --- a/syft/formats/common/spdxhelpers/to_syft_model_test.go +++ b/syft/formats/common/spdxhelpers/to_syft_model_test.go @@ -9,6 +9,7 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/source" ) @@ -336,7 +337,7 @@ func Test_toSyftRelationships(t *testing.T) { } pkg3.SetID() - loc1 := source.NewLocationFromCoordinates(source.Coordinates{ + loc1 := file.NewLocationFromCoordinates(file.Coordinates{ RealPath: "/somewhere/real", FileSystemID: "abc", }) diff --git a/syft/formats/github/encoder_test.go b/syft/formats/github/encoder_test.go index ba405dad63c..a0770f2520e 100644 --- a/syft/formats/github/encoder_test.go +++ b/syft/formats/github/encoder_test.go @@ -7,6 +7,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/sbom" @@ -35,8 +36,8 @@ func Test_toGithubModel(t *testing.T) { { Name: "pkg-1", Version: "1.0.1", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates(source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates(file.Coordinates{ RealPath: "/usr/lib", FileSystemID: "fsid-1", }), @@ -45,8 +46,8 @@ func Test_toGithubModel(t *testing.T) { { Name: "pkg-2", Version: "2.0.2", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates(source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates(file.Coordinates{ RealPath: "/usr/lib", FileSystemID: "fsid-1", }), @@ -55,8 +56,8 @@ func Test_toGithubModel(t *testing.T) { { Name: "pkg-3", Version: "3.0.3", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates(source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates(file.Coordinates{ RealPath: "/etc", FileSystemID: "fsid-1", }), diff --git a/syft/formats/internal/testutils/utils.go b/syft/formats/internal/testutils/utils.go index 7ddf942173b..f9f4941d4e0 100644 --- a/syft/formats/internal/testutils/utils.go +++ b/syft/formats/internal/testutils/utils.go @@ -17,6 +17,7 @@ import ( "github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/cpe" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/sbom" @@ -155,8 +156,8 @@ func populateImageCatalog(catalog *pkg.Collection, img *image.Image) { catalog.Add(pkg.Package{ Name: "package-1", Version: "1.0.1", - Locations: source.NewLocationSet( - source.NewLocationFromImage(string(ref1.RealPath), *ref1.Reference, img), + Locations: file.NewLocationSet( + file.NewLocationFromImage(string(ref1.RealPath), *ref1.Reference, img), ), Type: pkg.PythonPkg, FoundBy: "the-cataloger-1", @@ -177,8 +178,8 @@ func populateImageCatalog(catalog *pkg.Collection, img *image.Image) { catalog.Add(pkg.Package{ Name: "package-2", Version: "2.0.1", - Locations: source.NewLocationSet( - source.NewLocationFromImage(string(ref2.RealPath), *ref2.Reference, img), + Locations: file.NewLocationSet( + file.NewLocationFromImage(string(ref2.RealPath), *ref2.Reference, img), ), Type: pkg.DebPkg, FoundBy: "the-cataloger-2", @@ -265,8 +266,8 @@ func newDirectoryCatalog() *pkg.Collection { Version: "1.0.1", Type: pkg.PythonPkg, FoundBy: "the-cataloger-1", - Locations: source.NewLocationSet( - source.NewLocation("/some/path/pkg1"), + Locations: file.NewLocationSet( + file.NewLocation("/some/path/pkg1"), ), Language: pkg.Python, MetadataType: pkg.PythonPackageMetadataType, @@ -292,8 +293,8 @@ func newDirectoryCatalog() *pkg.Collection { Version: "2.0.1", Type: pkg.DebPkg, FoundBy: "the-cataloger-2", - Locations: source.NewLocationSet( - source.NewLocation("/some/path/pkg1"), + Locations: file.NewLocationSet( + file.NewLocation("/some/path/pkg1"), ), MetadataType: pkg.DpkgMetadataType, Metadata: pkg.DpkgMetadata{ @@ -318,8 +319,8 @@ func newDirectoryCatalogWithAuthorField() *pkg.Collection { Version: "1.0.1", Type: pkg.PythonPkg, FoundBy: "the-cataloger-1", - Locations: source.NewLocationSet( - source.NewLocation("/some/path/pkg1"), + Locations: file.NewLocationSet( + file.NewLocation("/some/path/pkg1"), ), Language: pkg.Python, MetadataType: pkg.PythonPackageMetadataType, @@ -346,8 +347,8 @@ func newDirectoryCatalogWithAuthorField() *pkg.Collection { Version: "2.0.1", Type: pkg.DebPkg, FoundBy: "the-cataloger-2", - Locations: source.NewLocationSet( - source.NewLocation("/some/path/pkg1"), + Locations: file.NewLocationSet( + file.NewLocation("/some/path/pkg1"), ), MetadataType: pkg.DpkgMetadataType, Metadata: pkg.DpkgMetadata{ @@ -366,15 +367,15 @@ func newDirectoryCatalogWithAuthorField() *pkg.Collection { //nolint:gosec func AddSampleFileRelationships(s *sbom.SBOM) { catalog := s.Artifacts.Packages.Sorted() - s.Artifacts.FileMetadata = map[source.Coordinates]source.FileMetadata{} + s.Artifacts.FileMetadata = map[file.Coordinates]file.Metadata{} files := []string{"/f1", "/f2", "/d1/f3", "/d2/f4", "/z1/f5", "/a1/f6"} rnd := rand.New(rand.NewSource(time.Now().UnixNano())) rnd.Shuffle(len(files), func(i, j int) { files[i], files[j] = files[j], files[i] }) for _, f := range files { - meta := source.FileMetadata{} - coords := source.Coordinates{RealPath: f} + meta := file.Metadata{} + coords := file.Coordinates{RealPath: f} s.Artifacts.FileMetadata[coords] = meta s.Relationships = append(s.Relationships, artifact.Relationship{ diff --git a/syft/formats/syftjson/encoder_test.go b/syft/formats/syftjson/encoder_test.go index 5b9a0f25c92..231333bb869 100644 --- a/syft/formats/syftjson/encoder_test.go +++ b/syft/formats/syftjson/encoder_test.go @@ -52,8 +52,8 @@ func TestEncodeFullJSONDocument(t *testing.T) { p1 := pkg.Package{ Name: "package-1", Version: "1.0.1", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates(source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates(file.Coordinates{ RealPath: "/a/place/a", }), ), @@ -76,8 +76,8 @@ func TestEncodeFullJSONDocument(t *testing.T) { p2 := pkg.Package{ Name: "package-2", Version: "2.0.1", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates(source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates(file.Coordinates{ RealPath: "/b/place/b", }), ), @@ -101,8 +101,8 @@ func TestEncodeFullJSONDocument(t *testing.T) { s := sbom.SBOM{ Artifacts: sbom.Artifacts{ Packages: catalog, - FileMetadata: map[source.Coordinates]source.FileMetadata{ - source.NewLocation("/a/place").Coordinates: { + FileMetadata: map[file.Coordinates]file.Metadata{ + file.NewLocation("/a/place").Coordinates: { FileInfo: stereoFile.ManualInfo{ NameValue: "/a/place", ModeValue: 0775, @@ -111,7 +111,7 @@ func TestEncodeFullJSONDocument(t *testing.T) { UserID: 0, GroupID: 0, }, - source.NewLocation("/a/place/a").Coordinates: { + file.NewLocation("/a/place/a").Coordinates: { FileInfo: stereoFile.ManualInfo{ NameValue: "/a/place/a", ModeValue: 0775, @@ -120,7 +120,7 @@ func TestEncodeFullJSONDocument(t *testing.T) { UserID: 0, GroupID: 0, }, - source.NewLocation("/b").Coordinates: { + file.NewLocation("/b").Coordinates: { FileInfo: stereoFile.ManualInfo{ NameValue: "/b", ModeValue: 0775, @@ -130,7 +130,7 @@ func TestEncodeFullJSONDocument(t *testing.T) { UserID: 0, GroupID: 0, }, - source.NewLocation("/b/place/b").Coordinates: { + file.NewLocation("/b/place/b").Coordinates: { FileInfo: stereoFile.ManualInfo{ NameValue: "/b/place/b", ModeValue: 0644, @@ -140,22 +140,22 @@ func TestEncodeFullJSONDocument(t *testing.T) { GroupID: 2, }, }, - FileDigests: map[source.Coordinates][]file.Digest{ - source.NewLocation("/a/place/a").Coordinates: { + FileDigests: map[file.Coordinates][]file.Digest{ + file.NewLocation("/a/place/a").Coordinates: { { Algorithm: "sha256", Value: "366a3f5653e34673b875891b021647440d0127c2ef041e3b1a22da2a7d4f3703", }, }, - source.NewLocation("/b/place/b").Coordinates: { + file.NewLocation("/b/place/b").Coordinates: { { Algorithm: "sha256", Value: "1b3722da2a7d90d033b87581a2a3f12021647445653e34666ef041e3b4f3707c", }, }, }, - FileContents: map[source.Coordinates]string{ - source.NewLocation("/a/place/a").Coordinates: "the-contents", + FileContents: map[file.Coordinates]string{ + file.NewLocation("/a/place/a").Coordinates: "the-contents", }, LinuxDistribution: &linux.Release{ ID: "redhat", diff --git a/syft/formats/syftjson/model/file.go b/syft/formats/syftjson/model/file.go index 796cecebf1c..757a293154a 100644 --- a/syft/formats/syftjson/model/file.go +++ b/syft/formats/syftjson/model/file.go @@ -2,12 +2,11 @@ package model import ( "github.com/anchore/syft/syft/file" - "github.com/anchore/syft/syft/source" ) type File struct { ID string `json:"id"` - Location source.Coordinates `json:"location"` + Location file.Coordinates `json:"location"` Metadata *FileMetadataEntry `json:"metadata,omitempty"` Contents string `json:"contents,omitempty"` Digests []file.Digest `json:"digests,omitempty"` diff --git a/syft/formats/syftjson/model/package.go b/syft/formats/syftjson/model/package.go index c4fc9580467..fccf04c0bda 100644 --- a/syft/formats/syftjson/model/package.go +++ b/syft/formats/syftjson/model/package.go @@ -7,9 +7,9 @@ import ( "reflect" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/license" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) var errUnknownMetadataType = errors.New("unknown metadata type") @@ -22,26 +22,26 @@ type Package struct { // PackageBasicData contains non-ambiguous values (type-wise) from pkg.Package. type PackageBasicData struct { - ID string `json:"id"` - Name string `json:"name"` - Version string `json:"version"` - Type pkg.Type `json:"type"` - FoundBy string `json:"foundBy"` - Locations []source.Location `json:"locations"` - Licenses licenses `json:"licenses"` - Language pkg.Language `json:"language"` - CPEs []string `json:"cpes"` - PURL string `json:"purl"` + ID string `json:"id"` + Name string `json:"name"` + Version string `json:"version"` + Type pkg.Type `json:"type"` + FoundBy string `json:"foundBy"` + Locations []file.Location `json:"locations"` + Licenses licenses `json:"licenses"` + Language pkg.Language `json:"language"` + CPEs []string `json:"cpes"` + PURL string `json:"purl"` } type licenses []License type License struct { - Value string `json:"value"` - SPDXExpression string `json:"spdxExpression"` - Type license.Type `json:"type"` - URLs []string `json:"urls"` - Locations []source.Location `json:"locations"` + Value string `json:"value"` + SPDXExpression string `json:"spdxExpression"` + Type license.Type `json:"type"` + URLs []string `json:"urls"` + Locations []file.Location `json:"locations"` } func newModelLicensesFromValues(licenses []string) (ml []License) { diff --git a/syft/formats/syftjson/model/secrets.go b/syft/formats/syftjson/model/secrets.go index c5f4685765d..5562b76bb2c 100644 --- a/syft/formats/syftjson/model/secrets.go +++ b/syft/formats/syftjson/model/secrets.go @@ -2,10 +2,9 @@ package model import ( "github.com/anchore/syft/syft/file" - "github.com/anchore/syft/syft/source" ) type Secrets struct { - Location source.Coordinates `json:"location"` + Location file.Coordinates `json:"location"` Secrets []file.SearchResult `json:"secrets"` } diff --git a/syft/formats/syftjson/to_format_model.go b/syft/formats/syftjson/to_format_model.go index efddddde268..7b3688ced69 100644 --- a/syft/formats/syftjson/to_format_model.go +++ b/syft/formats/syftjson/to_format_model.go @@ -74,7 +74,7 @@ func toDescriptor(d sbom.Descriptor) model.Descriptor { } } -func toSecrets(data map[source.Coordinates][]file.SearchResult) []model.Secrets { +func toSecrets(data map[file.Coordinates][]file.SearchResult) []model.Secrets { results := make([]model.Secrets, 0) for coordinates, secrets := range data { results = append(results, model.Secrets{ @@ -95,7 +95,7 @@ func toFile(s sbom.SBOM) []model.File { artifacts := s.Artifacts for _, coordinates := range s.AllCoordinates() { - var metadata *source.FileMetadata + var metadata *file.Metadata if metadataForLocation, exists := artifacts.FileMetadata[coordinates]; exists { metadata = &metadataForLocation } @@ -126,7 +126,7 @@ func toFile(s sbom.SBOM) []model.File { return results } -func toFileMetadataEntry(coordinates source.Coordinates, metadata *source.FileMetadata) *model.FileMetadataEntry { +func toFileMetadataEntry(coordinates file.Coordinates, metadata *file.Metadata) *model.FileMetadataEntry { if metadata == nil { return nil } @@ -195,7 +195,7 @@ func toPackageModels(catalog *pkg.Collection) []model.Package { func toLicenseModel(pkgLicenses []pkg.License) (modelLicenses []model.License) { for _, l := range pkgLicenses { // guarantee collection - locations := make([]source.Location, 0) + locations := make([]file.Location, 0) if v := l.Locations.ToSlice(); v != nil { locations = v } diff --git a/syft/formats/syftjson/to_format_model_test.go b/syft/formats/syftjson/to_format_model_test.go index 9794a1b7633..98f03c7b08f 100644 --- a/syft/formats/syftjson/to_format_model_test.go +++ b/syft/formats/syftjson/to_format_model_test.go @@ -7,7 +7,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/formats/syftjson/model" "github.com/anchore/syft/syft/source" ) @@ -94,46 +95,46 @@ func Test_toSourceModel(t *testing.T) { func Test_toFileType(t *testing.T) { - badType := file.Type(0x1337) - var allTypesTested []file.Type + badType := stereoscopeFile.Type(0x1337) + var allTypesTested []stereoscopeFile.Type tests := []struct { - ty file.Type + ty stereoscopeFile.Type name string }{ { - ty: file.TypeRegular, + ty: stereoscopeFile.TypeRegular, name: "RegularFile", }, { - ty: file.TypeDirectory, + ty: stereoscopeFile.TypeDirectory, name: "Directory", }, { - ty: file.TypeSymLink, + ty: stereoscopeFile.TypeSymLink, name: "SymbolicLink", }, { - ty: file.TypeHardLink, + ty: stereoscopeFile.TypeHardLink, name: "HardLink", }, { - ty: file.TypeSocket, + ty: stereoscopeFile.TypeSocket, name: "Socket", }, { - ty: file.TypeCharacterDevice, + ty: stereoscopeFile.TypeCharacterDevice, name: "CharacterDevice", }, { - ty: file.TypeBlockDevice, + ty: stereoscopeFile.TypeBlockDevice, name: "BlockDevice", }, { - ty: file.TypeFIFO, + ty: stereoscopeFile.TypeFIFO, name: "FIFONode", }, { - ty: file.TypeIrregular, + ty: stereoscopeFile.TypeIrregular, name: "IrregularFile", }, { @@ -150,17 +151,17 @@ func Test_toFileType(t *testing.T) { }) } - assert.ElementsMatch(t, allTypesTested, file.AllTypes(), "not all file.Types are under test") + assert.ElementsMatch(t, allTypesTested, stereoscopeFile.AllTypes(), "not all file.Types are under test") } func Test_toFileMetadataEntry(t *testing.T) { - coords := source.Coordinates{ + coords := file.Coordinates{ RealPath: "/path", FileSystemID: "x", } tests := []struct { name string - metadata *source.FileMetadata + metadata *file.Metadata want *model.FileMetadataEntry }{ { @@ -168,23 +169,23 @@ func Test_toFileMetadataEntry(t *testing.T) { }, { name: "no file info", - metadata: &source.FileMetadata{ + metadata: &file.Metadata{ FileInfo: nil, }, want: &model.FileMetadataEntry{ - Type: file.TypeRegular.String(), + Type: stereoscopeFile.TypeRegular.String(), }, }, { name: "with file info", - metadata: &source.FileMetadata{ - FileInfo: &file.ManualInfo{ + metadata: &file.Metadata{ + FileInfo: &stereoscopeFile.ManualInfo{ ModeValue: 1, }, }, want: &model.FileMetadataEntry{ Mode: 1, - Type: file.TypeRegular.String(), + Type: stereoscopeFile.TypeRegular.String(), }, }, } diff --git a/syft/formats/syftjson/to_syft_model.go b/syft/formats/syftjson/to_syft_model.go index 7b420183839..aeb0c24f165 100644 --- a/syft/formats/syftjson/to_syft_model.go +++ b/syft/formats/syftjson/to_syft_model.go @@ -64,8 +64,8 @@ func deduplicateErrors(errors []error) []string { func toSyftFiles(files []model.File) sbom.Artifacts { ret := sbom.Artifacts{ - FileMetadata: make(map[source.Coordinates]source.FileMetadata), - FileDigests: make(map[source.Coordinates][]file.Digest), + FileMetadata: make(map[file.Coordinates]file.Metadata), + FileDigests: make(map[file.Coordinates][]file.Digest), } for _, f := range files { @@ -79,7 +79,7 @@ func toSyftFiles(files []model.File) sbom.Artifacts { fm := os.FileMode(mode) - ret.FileMetadata[coord] = source.FileMetadata{ + ret.FileMetadata[coord] = file.Metadata{ FileInfo: stereoscopeFile.ManualInfo{ NameValue: path.Base(coord.RealPath), SizeValue: f.Metadata.Size, @@ -112,7 +112,7 @@ func toSyftLicenses(m []model.License) (p []pkg.License) { SPDXExpression: l.SPDXExpression, Type: l.Type, URLs: internal.NewStringSet(l.URLs...), - Locations: source.NewLocationSet(l.Locations...), + Locations: file.NewLocationSet(l.Locations...), }) } return @@ -320,7 +320,7 @@ func toSyftPackage(p model.Package, idAliases map[string]string) pkg.Package { Name: p.Name, Version: p.Version, FoundBy: p.FoundBy, - Locations: source.NewLocationSet(p.Locations...), + Locations: file.NewLocationSet(p.Locations...), Licenses: pkg.NewLicenseSet(toSyftLicenses(p.Licenses)...), Language: p.Language, Type: p.Type, diff --git a/syft/formats/syftjson/to_syft_model_test.go b/syft/formats/syftjson/to_syft_model_test.go index 8c4ab3cee52..dabc33f3841 100644 --- a/syft/formats/syftjson/to_syft_model_test.go +++ b/syft/formats/syftjson/to_syft_model_test.go @@ -131,7 +131,7 @@ func Test_idsHaveChanged(t *testing.T) { } func Test_toSyftFiles(t *testing.T) { - coord := source.Coordinates{ + coord := file.Coordinates{ RealPath: "/somerwhere/place", FileSystemID: "abc", } @@ -145,8 +145,8 @@ func Test_toSyftFiles(t *testing.T) { name: "empty", files: []model.File{}, want: sbom.Artifacts{ - FileMetadata: map[source.Coordinates]source.FileMetadata{}, - FileDigests: map[source.Coordinates][]file.Digest{}, + FileMetadata: map[file.Coordinates]file.Metadata{}, + FileDigests: map[file.Coordinates][]file.Digest{}, }, }, { @@ -165,8 +165,8 @@ func Test_toSyftFiles(t *testing.T) { }, }, want: sbom.Artifacts{ - FileMetadata: map[source.Coordinates]source.FileMetadata{}, - FileDigests: map[source.Coordinates][]file.Digest{ + FileMetadata: map[file.Coordinates]file.Metadata{}, + FileDigests: map[file.Coordinates][]file.Digest{ coord: { { Algorithm: "sha256", @@ -200,7 +200,7 @@ func Test_toSyftFiles(t *testing.T) { }, }, want: sbom.Artifacts{ - FileMetadata: map[source.Coordinates]source.FileMetadata{ + FileMetadata: map[file.Coordinates]file.Metadata{ coord: { FileInfo: stereoFile.ManualInfo{ NameValue: "place", @@ -215,7 +215,7 @@ func Test_toSyftFiles(t *testing.T) { MIMEType: "text/plain", }, }, - FileDigests: map[source.Coordinates][]file.Digest{ + FileDigests: map[file.Coordinates][]file.Digest{ coord: { { Algorithm: "sha256", diff --git a/syft/source/image_all_layers_resolver.go b/syft/internal/fileresolver/container_image_all_layers.go similarity index 65% rename from syft/source/image_all_layers_resolver.go rename to syft/internal/fileresolver/container_image_all_layers.go index dd9a0bd2e0d..e66c92aaf1b 100644 --- a/syft/source/image_all_layers_resolver.go +++ b/syft/internal/fileresolver/container_image_all_layers.go @@ -1,25 +1,26 @@ -package source +package fileresolver import ( "fmt" "io" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/filetree" "github.com/anchore/stereoscope/pkg/image" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" ) -var _ FileResolver = (*imageAllLayersResolver)(nil) +var _ file.Resolver = (*ContainerImageAllLayers)(nil) -// imageAllLayersResolver implements path and content access for the AllLayers source option for container image data sources. -type imageAllLayersResolver struct { +// ContainerImageAllLayers implements path and content access for the AllLayers source option for container image data sources. +type ContainerImageAllLayers struct { img *image.Image layers []int } -// newAllLayersResolver returns a new resolver from the perspective of all image layers for the given image. -func newAllLayersResolver(img *image.Image) (*imageAllLayersResolver, error) { +// NewFromContainerImageAllLayers returns a new resolver from the perspective of all image layers for the given image. +func NewFromContainerImageAllLayers(img *image.Image) (*ContainerImageAllLayers, error) { if len(img.Layers) == 0 { return nil, fmt.Errorf("the image does not contain any layers") } @@ -28,15 +29,15 @@ func newAllLayersResolver(img *image.Image) (*imageAllLayersResolver, error) { for idx := range img.Layers { layers = append(layers, idx) } - return &imageAllLayersResolver{ + return &ContainerImageAllLayers{ img: img, layers: layers, }, nil } // HasPath indicates if the given path exists in the underlying source. -func (r *imageAllLayersResolver) HasPath(path string) bool { - p := file.Path(path) +func (r *ContainerImageAllLayers) HasPath(path string) bool { + p := stereoscopeFile.Path(path) for _, layerIdx := range r.layers { tree := r.img.Layers[layerIdx].Tree if tree.HasPath(p) { @@ -46,8 +47,8 @@ func (r *imageAllLayersResolver) HasPath(path string) bool { return false } -func (r *imageAllLayersResolver) fileByRef(ref file.Reference, uniqueFileIDs file.ReferenceSet, layerIdx int) ([]file.Reference, error) { - uniqueFiles := make([]file.Reference, 0) +func (r *ContainerImageAllLayers) fileByRef(ref stereoscopeFile.Reference, uniqueFileIDs stereoscopeFile.ReferenceSet, layerIdx int) ([]stereoscopeFile.Reference, error) { + uniqueFiles := make([]stereoscopeFile.Reference, 0) // since there is potentially considerable work for each symlink/hardlink that needs to be resolved, let's check to see if this is a symlink/hardlink first entry, err := r.img.FileCatalog.Get(ref) @@ -55,7 +56,7 @@ func (r *imageAllLayersResolver) fileByRef(ref file.Reference, uniqueFileIDs fil return nil, fmt.Errorf("unable to fetch metadata (ref=%+v): %w", ref, err) } - if entry.Metadata.Type == file.TypeHardLink || entry.Metadata.Type == file.TypeSymLink { + if entry.Metadata.Type == stereoscopeFile.TypeHardLink || entry.Metadata.Type == stereoscopeFile.TypeSymLink { // a link may resolve in this layer or higher, assuming a squashed tree is used to search // we should search all possible resolutions within the valid source for _, subLayerIdx := range r.layers[layerIdx:] { @@ -77,9 +78,9 @@ func (r *imageAllLayersResolver) fileByRef(ref file.Reference, uniqueFileIDs fil } // FilesByPath returns all file.References that match the given paths from any layer in the image. -func (r *imageAllLayersResolver) FilesByPath(paths ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +func (r *ContainerImageAllLayers) FilesByPath(paths ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, path := range paths { for idx, layerIdx := range r.layers { @@ -110,7 +111,7 @@ func (r *imageAllLayersResolver) FilesByPath(paths ...string) ([]Location, error return nil, err } for _, result := range results { - uniqueLocations = append(uniqueLocations, NewLocationFromImage(path, result, r.img)) + uniqueLocations = append(uniqueLocations, file.NewLocationFromImage(path, result, r.img)) } } } @@ -119,9 +120,9 @@ func (r *imageAllLayersResolver) FilesByPath(paths ...string) ([]Location, error // FilesByGlob returns all file.References that match the given path glob pattern from any layer in the image. // nolint:gocognit -func (r *imageAllLayersResolver) FilesByGlob(patterns ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +func (r *ContainerImageAllLayers) FilesByGlob(patterns ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, pattern := range patterns { for idx, layerIdx := range r.layers { @@ -153,7 +154,7 @@ func (r *imageAllLayersResolver) FilesByGlob(patterns ...string) ([]Location, er return nil, err } for _, refResult := range refResults { - uniqueLocations = append(uniqueLocations, NewLocationFromImage(string(result.RequestPath), refResult, r.img)) + uniqueLocations = append(uniqueLocations, file.NewLocationFromImage(string(result.RequestPath), refResult, r.img)) } } } @@ -164,10 +165,10 @@ func (r *imageAllLayersResolver) FilesByGlob(patterns ...string) ([]Location, er // RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference. // This is helpful when attempting to find a file that is in the same layer or lower as another file. -func (r *imageAllLayersResolver) RelativeFileByPath(location Location, path string) *Location { - layer := r.img.FileCatalog.Layer(location.ref) +func (r *ContainerImageAllLayers) RelativeFileByPath(location file.Location, path string) *file.Location { + layer := r.img.FileCatalog.Layer(location.Reference()) - exists, relativeRef, err := layer.SquashedTree.File(file.Path(path), filetree.FollowBasenameLinks) + exists, relativeRef, err := layer.SquashedTree.File(stereoscopeFile.Path(path), filetree.FollowBasenameLinks) if err != nil { log.Errorf("failed to find path=%q in squash: %+w", path, err) return nil @@ -176,21 +177,21 @@ func (r *imageAllLayersResolver) RelativeFileByPath(location Location, path stri return nil } - relativeLocation := NewLocationFromImage(path, *relativeRef.Reference, r.img) + relativeLocation := file.NewLocationFromImage(path, *relativeRef.Reference, r.img) return &relativeLocation } // FileContentsByLocation fetches file contents for a single file reference, irregardless of the source layer. // If the path does not exist an error is returned. -func (r *imageAllLayersResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { - entry, err := r.img.FileCatalog.Get(location.ref) +func (r *ContainerImageAllLayers) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { + entry, err := r.img.FileCatalog.Get(location.Reference()) if err != nil { return nil, fmt.Errorf("unable to get metadata for path=%q from file catalog: %w", location.RealPath, err) } switch entry.Metadata.Type { - case file.TypeSymLink, file.TypeHardLink: + case stereoscopeFile.TypeSymLink, stereoscopeFile.TypeHardLink: // the location we are searching may be a symlink, we should always work with the resolved file newLocation := r.RelativeFileByPath(location, location.VirtualPath) if newLocation == nil { @@ -198,16 +199,16 @@ func (r *imageAllLayersResolver) FileContentsByLocation(location Location) (io.R return nil, fmt.Errorf("no contents for location=%q", location.VirtualPath) } location = *newLocation - case file.TypeDirectory: - return nil, fmt.Errorf("cannot read contents of non-file %q", location.ref.RealPath) + case stereoscopeFile.TypeDirectory: + return nil, fmt.Errorf("cannot read contents of non-file %q", location.Reference().RealPath) } - return r.img.FileContentsByRef(location.ref) + return r.img.OpenReference(location.Reference()) } -func (r *imageAllLayersResolver) FilesByMIMEType(types ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +func (r *ContainerImageAllLayers) FilesByMIMEType(types ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for idx, layerIdx := range r.layers { refs, err := r.img.Layers[layerIdx].SearchContext.SearchByMIMEType(types...) @@ -225,7 +226,7 @@ func (r *imageAllLayersResolver) FilesByMIMEType(types ...string) ([]Location, e return nil, err } for _, refResult := range refResults { - uniqueLocations = append(uniqueLocations, NewLocationFromImage(string(ref.RequestPath), refResult, r.img)) + uniqueLocations = append(uniqueLocations, file.NewLocationFromImage(string(ref.RequestPath), refResult, r.img)) } } } @@ -233,20 +234,20 @@ func (r *imageAllLayersResolver) FilesByMIMEType(types ...string) ([]Location, e return uniqueLocations, nil } -func (r *imageAllLayersResolver) AllLocations() <-chan Location { - results := make(chan Location) +func (r *ContainerImageAllLayers) AllLocations() <-chan file.Location { + results := make(chan file.Location) go func() { defer close(results) for _, layerIdx := range r.layers { tree := r.img.Layers[layerIdx].Tree - for _, ref := range tree.AllFiles(file.AllTypes()...) { - results <- NewLocationFromImage(string(ref.RealPath), ref, r.img) + for _, ref := range tree.AllFiles(stereoscopeFile.AllTypes()...) { + results <- file.NewLocationFromImage(string(ref.RealPath), ref, r.img) } } }() return results } -func (r *imageAllLayersResolver) FileMetadataByLocation(location Location) (FileMetadata, error) { +func (r *ContainerImageAllLayers) FileMetadataByLocation(location file.Location) (file.Metadata, error) { return fileMetadataByLocation(r.img, location) } diff --git a/syft/source/image_all_layers_resolver_test.go b/syft/internal/fileresolver/container_image_all_layers_test.go similarity index 76% rename from syft/source/image_all_layers_resolver_test.go rename to syft/internal/fileresolver/container_image_all_layers_test.go index 0a804290727..7fb04d56b78 100644 --- a/syft/source/image_all_layers_resolver_test.go +++ b/syft/internal/fileresolver/container_image_all_layers_test.go @@ -1,4 +1,4 @@ -package source +package fileresolver import ( "fmt" @@ -13,6 +13,7 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/stereoscope/pkg/imagetest" + "github.com/anchore/syft/syft/file" ) type resolution struct { @@ -93,7 +94,7 @@ func TestAllLayersResolver_FilesByPath(t *testing.T) { t.Run(c.name, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) if err != nil { t.Fatalf("could not create resolver: %+v", err) } @@ -121,15 +122,15 @@ func TestAllLayersResolver_FilesByPath(t *testing.T) { for idx, actual := range refs { expected := c.resolutions[idx] - if string(actual.ref.RealPath) != expected.path { - t.Errorf("bad resolve path: '%s'!='%s'", string(actual.ref.RealPath), expected.path) + if string(actual.Reference().RealPath) != expected.path { + t.Errorf("bad resolve path: '%s'!='%s'", string(actual.Reference().RealPath), expected.path) } - if expected.path != "" && string(actual.ref.RealPath) != actual.RealPath { + if expected.path != "" && string(actual.Reference().RealPath) != actual.RealPath { t.Errorf("we should always prefer real paths over ones with links") } - layer := img.FileCatalog.Layer(actual.ref) + layer := img.FileCatalog.Layer(actual.Reference()) if layer.Metadata.Index != expected.layer { t.Errorf("bad resolve layer: '%d'!='%d'", layer.Metadata.Index, expected.layer) } @@ -207,7 +208,7 @@ func TestAllLayersResolver_FilesByGlob(t *testing.T) { t.Run(c.name, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) if err != nil { t.Fatalf("could not create resolver: %+v", err) } @@ -224,15 +225,15 @@ func TestAllLayersResolver_FilesByGlob(t *testing.T) { for idx, actual := range refs { expected := c.resolutions[idx] - if string(actual.ref.RealPath) != expected.path { - t.Errorf("bad resolve path: '%s'!='%s'", string(actual.ref.RealPath), expected.path) + if string(actual.Reference().RealPath) != expected.path { + t.Errorf("bad resolve path: '%s'!='%s'", string(actual.Reference().RealPath), expected.path) } - if expected.path != "" && string(actual.ref.RealPath) != actual.RealPath { + if expected.path != "" && string(actual.Reference().RealPath) != actual.RealPath { t.Errorf("we should always prefer real paths over ones with links") } - layer := img.FileCatalog.Layer(actual.ref) + layer := img.FileCatalog.Layer(actual.Reference()) if layer.Metadata.Index != expected.layer { t.Errorf("bad resolve layer: '%d'!='%d'", layer.Metadata.Index, expected.layer) @@ -259,7 +260,7 @@ func Test_imageAllLayersResolver_FilesByMIMEType(t *testing.T) { t.Run(test.fixtureName, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", test.fixtureName) - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) assert.NoError(t, err) locations, err := resolver.FilesByMIMEType(test.mimeType) @@ -276,7 +277,7 @@ func Test_imageAllLayersResolver_FilesByMIMEType(t *testing.T) { func Test_imageAllLayersResolver_hasFilesystemIDInLocation(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-duplicate-path") - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) assert.NoError(t, err) locations, err := resolver.FilesByMIMEType("text/plain") @@ -336,7 +337,7 @@ func TestAllLayersImageResolver_FilesContents(t *testing.T) { t.Run(test.name, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) assert.NoError(t, err) refs, err := resolver.FilesByPath(test.fixture) @@ -363,12 +364,12 @@ func TestAllLayersImageResolver_FilesContents_errorOnDirRequest(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) assert.NoError(t, err) - var dirLoc *Location + var dirLoc *file.Location for loc := range resolver.AllLocations() { - entry, err := resolver.img.FileCatalog.Get(loc.ref) + entry, err := resolver.img.FileCatalog.Get(loc.Reference()) require.NoError(t, err) if entry.Metadata.IsDir() { dirLoc = &loc @@ -386,119 +387,119 @@ func TestAllLayersImageResolver_FilesContents_errorOnDirRequest(t *testing.T) { func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) { tests := []struct { name string - runner func(FileResolver) []Location - expected []Location + runner func(file.Resolver) []file.Location + expected []file.Location }{ { name: "by mimetype", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links should not show up when searching mimetype actualLocations, err := resolver.FilesByMIMEType("text/plain") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/etc/group", "/etc/group"), - NewVirtualLocation("/etc/passwd", "/etc/passwd"), - NewVirtualLocation("/etc/shadow", "/etc/shadow"), - NewVirtualLocation("/file-1.txt", "/file-1.txt"), - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 + expected: []file.Location{ + file.NewVirtualLocation("/etc/group", "/etc/group"), + file.NewVirtualLocation("/etc/passwd", "/etc/passwd"), + file.NewVirtualLocation("/etc/shadow", "/etc/shadow"), + file.NewVirtualLocation("/file-1.txt", "/file-1.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 // note: we're de-duping the redundant access to file-3.txt // ... (there would usually be two copies) - NewVirtualLocation("/file-3.txt", "/file-3.txt"), - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // copy 1 - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // copy 2 + file.NewVirtualLocation("/file-3.txt", "/file-3.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // copy 1 + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // copy 2 }, }, { name: "by glob to links", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("*ink-*") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/file-1.txt", "/link-1"), - NewVirtualLocation("/file-2.txt", "/link-2"), // copy 1 - NewVirtualLocation("/file-2.txt", "/link-2"), // copy 2 - NewVirtualLocation("/file-3.txt", "/link-within"), + expected: []file.Location{ + file.NewVirtualLocation("/file-1.txt", "/link-1"), + file.NewVirtualLocation("/file-2.txt", "/link-2"), // copy 1 + file.NewVirtualLocation("/file-2.txt", "/link-2"), // copy 2 + file.NewVirtualLocation("/file-3.txt", "/link-within"), }, }, { name: "by basename", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/file-2.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 + expected: []file.Location{ + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 }, }, { name: "by basename glob", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/file-?.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/file-1.txt", "/file-1.txt"), - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 - NewVirtualLocation("/file-3.txt", "/file-3.txt"), - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // when we copy into the link path, the same file-4.txt is copied + expected: []file.Location{ + file.NewVirtualLocation("/file-1.txt", "/file-1.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 + file.NewVirtualLocation("/file-3.txt", "/file-3.txt"), + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // when we copy into the link path, the same file-4.txt is copied }, }, { name: "by extension", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/*.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/file-1.txt", "/file-1.txt"), - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 - NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 - NewVirtualLocation("/file-3.txt", "/file-3.txt"), - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // when we copy into the link path, the same file-4.txt is copied + expected: []file.Location{ + file.NewVirtualLocation("/file-1.txt", "/file-1.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 1 + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), // copy 2 + file.NewVirtualLocation("/file-3.txt", "/file-3.txt"), + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), // when we copy into the link path, the same file-4.txt is copied }, }, { name: "by path to degree 1 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links resolve to the final file actualLocations, err := resolver.FilesByPath("/link-2") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers - NewVirtualLocation("/file-2.txt", "/link-2"), - NewVirtualLocation("/file-2.txt", "/link-2"), + file.NewVirtualLocation("/file-2.txt", "/link-2"), + file.NewVirtualLocation("/file-2.txt", "/link-2"), }, }, { name: "by path to degree 2 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // multiple links resolves to the final file actualLocations, err := resolver.FilesByPath("/link-indirect") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers - NewVirtualLocation("/file-2.txt", "/link-indirect"), - NewVirtualLocation("/file-2.txt", "/link-indirect"), + file.NewVirtualLocation("/file-2.txt", "/link-indirect"), + file.NewVirtualLocation("/file-2.txt", "/link-indirect"), }, }, } @@ -508,7 +509,7 @@ func Test_imageAllLayersResolver_resolvesLinks(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) assert.NoError(t, err) actual := test.runner(resolver) @@ -527,7 +528,7 @@ func TestAllLayersResolver_AllLocations(t *testing.T) { arch = "aarch64" } - resolver, err := newAllLayersResolver(img) + resolver, err := NewFromContainerImageAllLayers(img) assert.NoError(t, err) paths := strset.New() diff --git a/syft/source/image_squash_resolver.go b/syft/internal/fileresolver/container_image_squash.go similarity index 66% rename from syft/source/image_squash_resolver.go rename to syft/internal/fileresolver/container_image_squash.go index 233f008436d..92b4a8a13ee 100644 --- a/syft/source/image_squash_resolver.go +++ b/syft/internal/fileresolver/container_image_squash.go @@ -1,41 +1,42 @@ -package source +package fileresolver import ( "fmt" "io" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/filetree" "github.com/anchore/stereoscope/pkg/image" + "github.com/anchore/syft/syft/file" ) -var _ FileResolver = (*imageSquashResolver)(nil) +var _ file.Resolver = (*ContainerImageSquash)(nil) -// imageSquashResolver implements path and content access for the Squashed source option for container image data sources. -type imageSquashResolver struct { +// ContainerImageSquash implements path and content access for the Squashed source option for container image data sources. +type ContainerImageSquash struct { img *image.Image } -// newImageSquashResolver returns a new resolver from the perspective of the squashed representation for the given image. -func newImageSquashResolver(img *image.Image) (*imageSquashResolver, error) { +// NewFromContainerImageSquash returns a new resolver from the perspective of the squashed representation for the given image. +func NewFromContainerImageSquash(img *image.Image) (*ContainerImageSquash, error) { if img.SquashedTree() == nil { return nil, fmt.Errorf("the image does not have have a squashed tree") } - return &imageSquashResolver{ + return &ContainerImageSquash{ img: img, }, nil } // HasPath indicates if the given path exists in the underlying source. -func (r *imageSquashResolver) HasPath(path string) bool { - return r.img.SquashedTree().HasPath(file.Path(path)) +func (r *ContainerImageSquash) HasPath(path string) bool { + return r.img.SquashedTree().HasPath(stereoscopeFile.Path(path)) } // FilesByPath returns all file.References that match the given paths within the squashed representation of the image. -func (r *imageSquashResolver) FilesByPath(paths ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +func (r *ContainerImageSquash) FilesByPath(paths ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, path := range paths { ref, err := r.img.SquashedSearchContext.SearchByPath(path, filetree.FollowBasenameLinks) @@ -69,7 +70,7 @@ func (r *imageSquashResolver) FilesByPath(paths ...string) ([]Location, error) { if resolvedRef.HasReference() && !uniqueFileIDs.Contains(*resolvedRef.Reference) { uniqueFileIDs.Add(*resolvedRef.Reference) - uniqueLocations = append(uniqueLocations, NewLocationFromImage(path, *resolvedRef.Reference, r.img)) + uniqueLocations = append(uniqueLocations, file.NewLocationFromImage(path, *resolvedRef.Reference, r.img)) } } @@ -78,9 +79,9 @@ func (r *imageSquashResolver) FilesByPath(paths ...string) ([]Location, error) { // FilesByGlob returns all file.References that match the given path glob pattern within the squashed representation of the image. // nolint:gocognit -func (r *imageSquashResolver) FilesByGlob(patterns ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +func (r *ContainerImageSquash) FilesByGlob(patterns ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, pattern := range patterns { results, err := r.img.SquashedSearchContext.SearchByGlob(pattern, filetree.FollowBasenameLinks) @@ -113,10 +114,10 @@ func (r *imageSquashResolver) FilesByGlob(patterns ...string) ([]Location, error return nil, fmt.Errorf("failed to find files by path (result=%+v): %w", result, err) } for _, resolvedLocation := range resolvedLocations { - if uniqueFileIDs.Contains(resolvedLocation.ref) { + if uniqueFileIDs.Contains(resolvedLocation.Reference()) { continue } - uniqueFileIDs.Add(resolvedLocation.ref) + uniqueFileIDs.Add(resolvedLocation.Reference()) uniqueLocations = append(uniqueLocations, resolvedLocation) } } @@ -127,8 +128,8 @@ func (r *imageSquashResolver) FilesByGlob(patterns ...string) ([]Location, error // RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference. // This is helpful when attempting to find a file that is in the same layer or lower as another file. For the -// imageSquashResolver, this is a simple path lookup. -func (r *imageSquashResolver) RelativeFileByPath(_ Location, path string) *Location { +// ContainerImageSquash, this is a simple path lookup. +func (r *ContainerImageSquash) RelativeFileByPath(_ file.Location, path string) *file.Location { paths, err := r.FilesByPath(path) if err != nil { return nil @@ -142,14 +143,14 @@ func (r *imageSquashResolver) RelativeFileByPath(_ Location, path string) *Locat // FileContentsByLocation fetches file contents for a single file reference, regardless of the source layer. // If the path does not exist an error is returned. -func (r *imageSquashResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { - entry, err := r.img.FileCatalog.Get(location.ref) +func (r *ContainerImageSquash) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { + entry, err := r.img.FileCatalog.Get(location.Reference()) if err != nil { return nil, fmt.Errorf("unable to get metadata for path=%q from file catalog: %w", location.RealPath, err) } switch entry.Metadata.Type { - case file.TypeSymLink, file.TypeHardLink: + case stereoscopeFile.TypeSymLink, stereoscopeFile.TypeHardLink: // the location we are searching may be a symlink, we should always work with the resolved file locations, err := r.FilesByPath(location.RealPath) if err != nil { @@ -164,39 +165,39 @@ func (r *imageSquashResolver) FileContentsByLocation(location Location) (io.Read default: return nil, fmt.Errorf("link resolution resulted in multiple results while resolving content location: %+v", location) } - case file.TypeDirectory: + case stereoscopeFile.TypeDirectory: return nil, fmt.Errorf("unable to get file contents for directory: %+v", location) } - return r.img.FileContentsByRef(location.ref) + return r.img.OpenReference(location.Reference()) } -func (r *imageSquashResolver) AllLocations() <-chan Location { - results := make(chan Location) +func (r *ContainerImageSquash) AllLocations() <-chan file.Location { + results := make(chan file.Location) go func() { defer close(results) - for _, ref := range r.img.SquashedTree().AllFiles(file.AllTypes()...) { - results <- NewLocationFromImage(string(ref.RealPath), ref, r.img) + for _, ref := range r.img.SquashedTree().AllFiles(stereoscopeFile.AllTypes()...) { + results <- file.NewLocationFromImage(string(ref.RealPath), ref, r.img) } }() return results } -func (r *imageSquashResolver) FilesByMIMEType(types ...string) ([]Location, error) { +func (r *ContainerImageSquash) FilesByMIMEType(types ...string) ([]file.Location, error) { refs, err := r.img.SquashedSearchContext.SearchByMIMEType(types...) if err != nil { return nil, err } - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, ref := range refs { if ref.HasReference() { if uniqueFileIDs.Contains(*ref.Reference) { continue } - location := NewLocationFromImage(string(ref.RequestPath), *ref.Reference, r.img) + location := file.NewLocationFromImage(string(ref.RequestPath), *ref.Reference, r.img) uniqueFileIDs.Add(*ref.Reference) uniqueLocations = append(uniqueLocations, location) @@ -206,6 +207,6 @@ func (r *imageSquashResolver) FilesByMIMEType(types ...string) ([]Location, erro return uniqueLocations, nil } -func (r *imageSquashResolver) FileMetadataByLocation(location Location) (FileMetadata, error) { +func (r *ContainerImageSquash) FileMetadataByLocation(location file.Location) (file.Metadata, error) { return fileMetadataByLocation(r.img, location) } diff --git a/syft/source/image_squash_resolver_test.go b/syft/internal/fileresolver/container_image_squash_test.go similarity index 72% rename from syft/source/image_squash_resolver_test.go rename to syft/internal/fileresolver/container_image_squash_test.go index 0fd9c4f99be..d65d0bccc88 100644 --- a/syft/source/image_squash_resolver_test.go +++ b/syft/internal/fileresolver/container_image_squash_test.go @@ -1,4 +1,4 @@ -package source +package fileresolver import ( "io" @@ -6,13 +6,12 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" "github.com/scylladb/go-set/strset" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/imagetest" + "github.com/anchore/syft/syft/file" ) func TestImageSquashResolver_FilesByPath(t *testing.T) { @@ -73,7 +72,7 @@ func TestImageSquashResolver_FilesByPath(t *testing.T) { t.Run(c.name, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) if err != nil { t.Fatalf("could not create resolver: %+v", err) } @@ -110,15 +109,15 @@ func TestImageSquashResolver_FilesByPath(t *testing.T) { actual := refs[0] - if string(actual.ref.RealPath) != c.resolvePath { - t.Errorf("bad resolve path: '%s'!='%s'", string(actual.ref.RealPath), c.resolvePath) + if string(actual.Reference().RealPath) != c.resolvePath { + t.Errorf("bad resolve path: '%s'!='%s'", string(actual.Reference().RealPath), c.resolvePath) } - if c.resolvePath != "" && string(actual.ref.RealPath) != actual.RealPath { + if c.resolvePath != "" && string(actual.Reference().RealPath) != actual.RealPath { t.Errorf("we should always prefer real paths over ones with links") } - layer := img.FileCatalog.Layer(actual.ref) + layer := img.FileCatalog.Layer(actual.Reference()) if layer.Metadata.Index != c.resolveLayer { t.Errorf("bad resolve layer: '%d'!='%d'", layer.Metadata.Index, c.resolveLayer) @@ -186,7 +185,7 @@ func TestImageSquashResolver_FilesByGlob(t *testing.T) { t.Run(c.name, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) if err != nil { t.Fatalf("could not create resolver: %+v", err) } @@ -212,15 +211,15 @@ func TestImageSquashResolver_FilesByGlob(t *testing.T) { actual := refs[0] - if string(actual.ref.RealPath) != c.resolvePath { - t.Errorf("bad resolve path: '%s'!='%s'", string(actual.ref.RealPath), c.resolvePath) + if string(actual.Reference().RealPath) != c.resolvePath { + t.Errorf("bad resolve path: '%s'!='%s'", string(actual.Reference().RealPath), c.resolvePath) } - if c.resolvePath != "" && string(actual.ref.RealPath) != actual.RealPath { + if c.resolvePath != "" && string(actual.Reference().RealPath) != actual.RealPath { t.Errorf("we should always prefer real paths over ones with links") } - layer := img.FileCatalog.Layer(actual.ref) + layer := img.FileCatalog.Layer(actual.Reference()) if layer.Metadata.Index != c.resolveLayer { t.Errorf("bad resolve layer: '%d'!='%d'", layer.Metadata.Index, c.resolveLayer) @@ -247,7 +246,7 @@ func Test_imageSquashResolver_FilesByMIMEType(t *testing.T) { t.Run(test.fixtureName, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", test.fixtureName) - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) assert.NoError(t, err) locations, err := resolver.FilesByMIMEType(test.mimeType) @@ -264,7 +263,7 @@ func Test_imageSquashResolver_FilesByMIMEType(t *testing.T) { func Test_imageSquashResolver_hasFilesystemIDInLocation(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-duplicate-path") - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) assert.NoError(t, err) locations, err := resolver.FilesByMIMEType("text/plain") @@ -322,7 +321,7 @@ func TestSquashImageResolver_FilesContents(t *testing.T) { t.Run(test.name, func(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) assert.NoError(t, err) refs, err := resolver.FilesByPath(test.path) @@ -347,12 +346,12 @@ func TestSquashImageResolver_FilesContents_errorOnDirRequest(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) assert.NoError(t, err) - var dirLoc *Location + var dirLoc *file.Location for loc := range resolver.AllLocations() { - entry, err := resolver.img.FileCatalog.Get(loc.ref) + entry, err := resolver.img.FileCatalog.Get(loc.Reference()) require.NoError(t, err) if entry.Metadata.IsDir() { dirLoc = &loc @@ -370,162 +369,130 @@ func TestSquashImageResolver_FilesContents_errorOnDirRequest(t *testing.T) { func Test_imageSquashResolver_resolvesLinks(t *testing.T) { tests := []struct { name string - runner func(FileResolver) []Location - expected []Location + runner func(file.Resolver) []file.Location + expected []file.Location }{ { name: "by mimetype", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links should not show up when searching mimetype actualLocations, err := resolver.FilesByMIMEType("text/plain") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/etc/group", "/etc/group"), - NewVirtualLocation("/etc/passwd", "/etc/passwd"), - NewVirtualLocation("/etc/shadow", "/etc/shadow"), - NewVirtualLocation("/file-1.txt", "/file-1.txt"), - NewVirtualLocation("/file-3.txt", "/file-3.txt"), - NewVirtualLocation("/file-2.txt", "/file-2.txt"), - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), + expected: []file.Location{ + file.NewVirtualLocation("/etc/group", "/etc/group"), + file.NewVirtualLocation("/etc/passwd", "/etc/passwd"), + file.NewVirtualLocation("/etc/shadow", "/etc/shadow"), + file.NewVirtualLocation("/file-1.txt", "/file-1.txt"), + file.NewVirtualLocation("/file-3.txt", "/file-3.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), }, }, { name: "by glob to links", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("*ink-*") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/file-1.txt", "/link-1"), - NewVirtualLocation("/file-2.txt", "/link-2"), + expected: []file.Location{ + file.NewVirtualLocation("/file-1.txt", "/link-1"), + file.NewVirtualLocation("/file-2.txt", "/link-2"), // though this is a link, and it matches to the file, the resolver de-duplicates files // by the real path, so it is not included in the results - //NewVirtualLocation("/file-2.txt", "/link-indirect"), + //file.NewVirtualLocation("/file-2.txt", "/link-indirect"), - NewVirtualLocation("/file-3.txt", "/link-within"), + file.NewVirtualLocation("/file-3.txt", "/link-within"), }, }, { name: "by basename", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/file-2.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // this has two copies in the base image, which overwrites the same location - NewVirtualLocation("/file-2.txt", "/file-2.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), }, }, { name: "by basename glob", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/file-?.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/file-1.txt", "/file-1.txt"), - NewVirtualLocation("/file-2.txt", "/file-2.txt"), - NewVirtualLocation("/file-3.txt", "/file-3.txt"), - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), + expected: []file.Location{ + file.NewVirtualLocation("/file-1.txt", "/file-1.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), + file.NewVirtualLocation("/file-3.txt", "/file-3.txt"), + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), }, }, { name: "by basename glob to links", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { actualLocations, err := resolver.FilesByGlob("**/link-*") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - - { - LocationData: LocationData{ - Coordinates: Coordinates{ - RealPath: "/file-1.txt", - }, - VirtualPath: "/link-1", - ref: file.Reference{RealPath: "/file-1.txt"}, - }, - }, - { - LocationData: LocationData{ - - Coordinates: Coordinates{ - RealPath: "/file-2.txt", - }, - VirtualPath: "/link-2", - ref: file.Reference{RealPath: "/file-2.txt"}, - }, - }, + expected: []file.Location{ + file.NewVirtualLocation("/file-1.txt", "/link-1"), + file.NewVirtualLocation("/file-2.txt", "/link-2"), + // we already have this real file path via another link, so only one is returned - //{ - // LocationData: LocationData{ - // Coordinates: Coordinates{ - // RealPath: "/file-2.txt", - // }, - // VirtualPath: "/link-indirect", - // ref: file.Reference{RealPath: "/file-2.txt"}, - // }, - //}, - { - LocationData: LocationData{ - Coordinates: Coordinates{ - RealPath: "/file-3.txt", - }, - VirtualPath: "/link-within", - ref: file.Reference{RealPath: "/file-3.txt"}, - }, - }, + // file.NewVirtualLocation("/file-2.txt", "/link-indirect"), + + file.NewVirtualLocation("/file-3.txt", "/link-within"), }, }, { name: "by extension", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/*.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("/file-1.txt", "/file-1.txt"), - NewVirtualLocation("/file-2.txt", "/file-2.txt"), - NewVirtualLocation("/file-3.txt", "/file-3.txt"), - NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), + expected: []file.Location{ + file.NewVirtualLocation("/file-1.txt", "/file-1.txt"), + file.NewVirtualLocation("/file-2.txt", "/file-2.txt"), + file.NewVirtualLocation("/file-3.txt", "/file-3.txt"), + file.NewVirtualLocation("/parent/file-4.txt", "/parent/file-4.txt"), }, }, { name: "by path to degree 1 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links resolve to the final file actualLocations, err := resolver.FilesByPath("/link-2") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers - NewVirtualLocation("/file-2.txt", "/link-2"), + file.NewVirtualLocation("/file-2.txt", "/link-2"), }, }, { name: "by path to degree 2 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // multiple links resolves to the final file actualLocations, err := resolver.FilesByPath("/link-indirect") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers - NewVirtualLocation("/file-2.txt", "/link-indirect"), + file.NewVirtualLocation("/file-2.txt", "/link-indirect"), }, }, } @@ -535,7 +502,7 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-symlinks") - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) assert.NoError(t, err) actual := test.runner(resolver) @@ -546,30 +513,10 @@ func Test_imageSquashResolver_resolvesLinks(t *testing.T) { } -func compareLocations(t *testing.T, expected, actual []Location) { - t.Helper() - ignoreUnexported := cmpopts.IgnoreFields(LocationData{}, "ref") - ignoreMetadata := cmpopts.IgnoreFields(LocationMetadata{}, "Annotations") - ignoreFS := cmpopts.IgnoreFields(Coordinates{}, "FileSystemID") - - sort.Sort(Locations(expected)) - sort.Sort(Locations(actual)) - - if d := cmp.Diff(expected, actual, - ignoreUnexported, - ignoreFS, - ignoreMetadata, - ); d != "" { - - t.Errorf("unexpected locations (-want +got):\n%s", d) - } - -} - func TestSquashResolver_AllLocations(t *testing.T) { img := imagetest.GetFixtureImage(t, "docker-archive", "image-files-deleted") - resolver, err := newImageSquashResolver(img) + resolver, err := NewFromContainerImageSquash(img) assert.NoError(t, err) paths := strset.New() diff --git a/syft/internal/fileresolver/deferred.go b/syft/internal/fileresolver/deferred.go new file mode 100644 index 00000000000..55dbbb1628b --- /dev/null +++ b/syft/internal/fileresolver/deferred.go @@ -0,0 +1,98 @@ +package fileresolver + +import ( + "io" + + "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" +) + +var _ file.Resolver = (*Deferred)(nil) + +func NewDeferred(creator func() (file.Resolver, error)) *Deferred { + return &Deferred{ + creator: creator, + } +} + +type Deferred struct { + creator func() (file.Resolver, error) + resolver file.Resolver +} + +func (d *Deferred) getResolver() (file.Resolver, error) { + if d.resolver == nil { + resolver, err := d.creator() + if err != nil { + return nil, err + } + d.resolver = resolver + } + return d.resolver, nil +} + +func (d *Deferred) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { + r, err := d.getResolver() + if err != nil { + return nil, err + } + return r.FileContentsByLocation(location) +} + +func (d *Deferred) HasPath(s string) bool { + r, err := d.getResolver() + if err != nil { + log.Debug("unable to get resolver: %v", err) + return false + } + return r.HasPath(s) +} + +func (d *Deferred) FilesByPath(paths ...string) ([]file.Location, error) { + r, err := d.getResolver() + if err != nil { + return nil, err + } + return r.FilesByPath(paths...) +} + +func (d *Deferred) FilesByGlob(patterns ...string) ([]file.Location, error) { + r, err := d.getResolver() + if err != nil { + return nil, err + } + return r.FilesByGlob(patterns...) +} + +func (d *Deferred) FilesByMIMEType(types ...string) ([]file.Location, error) { + r, err := d.getResolver() + if err != nil { + return nil, err + } + return r.FilesByMIMEType(types...) +} + +func (d *Deferred) RelativeFileByPath(location file.Location, path string) *file.Location { + r, err := d.getResolver() + if err != nil { + return nil + } + return r.RelativeFileByPath(location, path) +} + +func (d *Deferred) AllLocations() <-chan file.Location { + r, err := d.getResolver() + if err != nil { + log.Debug("unable to get resolver: %v", err) + return nil + } + return r.AllLocations() +} + +func (d *Deferred) FileMetadataByLocation(location file.Location) (file.Metadata, error) { + r, err := d.getResolver() + if err != nil { + return file.Metadata{}, err + } + return r.FileMetadataByLocation(location) +} diff --git a/syft/source/deferred_resolver_test.go b/syft/internal/fileresolver/deferred_test.go similarity index 68% rename from syft/source/deferred_resolver_test.go rename to syft/internal/fileresolver/deferred_test.go index c7cd166c305..61f592387be 100644 --- a/syft/source/deferred_resolver_test.go +++ b/syft/internal/fileresolver/deferred_test.go @@ -1,17 +1,19 @@ -package source +package fileresolver import ( "testing" "github.com/stretchr/testify/require" + + "github.com/anchore/syft/syft/file" ) func Test_NewDeferredResolver(t *testing.T) { creatorCalled := false - deferredResolver := NewDeferredResolver(func() (FileResolver, error) { + deferredResolver := NewDeferred(func() (file.Resolver, error) { creatorCalled = true - return NewMockResolverForPaths(), nil + return file.NewMockResolverForPaths(), nil }) require.False(t, creatorCalled) diff --git a/syft/source/directory_resolver.go b/syft/internal/fileresolver/directory.go similarity index 77% rename from syft/source/directory_resolver.go rename to syft/internal/fileresolver/directory.go index a5a0e209de5..a892360d480 100644 --- a/syft/source/directory_resolver.go +++ b/syft/internal/fileresolver/directory.go @@ -1,4 +1,4 @@ -package source +package fileresolver import ( "errors" @@ -10,9 +10,10 @@ import ( "runtime" "strings" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/filetree" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" ) const WindowsOS = "windows" @@ -23,12 +24,12 @@ var unixSystemRuntimePrefixes = []string{ "/sys", } -var errSkipPath = errors.New("skip path") +var ErrSkipPath = errors.New("skip path") -var _ FileResolver = (*directoryResolver)(nil) +var _ file.Resolver = (*Directory)(nil) -// directoryResolver implements path and content access for the directory data source. -type directoryResolver struct { +// Directory implements path and content access for the directory data source. +type Directory struct { path string base string currentWdRelativeToRoot string @@ -39,8 +40,8 @@ type directoryResolver struct { indexer *directoryIndexer } -func newDirectoryResolver(root string, base string, pathFilters ...pathIndexVisitor) (*directoryResolver, error) { - r, err := newDirectoryResolverWithoutIndex(root, base, pathFilters...) +func NewFromDirectory(root string, base string, pathFilters ...PathIndexVisitor) (*Directory, error) { + r, err := newFromDirectoryWithoutIndex(root, base, pathFilters...) if err != nil { return nil, err } @@ -48,7 +49,7 @@ func newDirectoryResolver(root string, base string, pathFilters ...pathIndexVisi return r, r.buildIndex() } -func newDirectoryResolverWithoutIndex(root string, base string, pathFilters ...pathIndexVisitor) (*directoryResolver, error) { +func newFromDirectoryWithoutIndex(root string, base string, pathFilters ...PathIndexVisitor) (*Directory, error) { currentWD, err := os.Getwd() if err != nil { return nil, fmt.Errorf("could not get CWD: %w", err) @@ -87,7 +88,7 @@ func newDirectoryResolverWithoutIndex(root string, base string, pathFilters ...p currentWdRelRoot = filepath.Clean(cleanRoot) } - return &directoryResolver{ + return &Directory{ path: cleanRoot, base: cleanBase, currentWd: cleanCWD, @@ -98,7 +99,7 @@ func newDirectoryResolverWithoutIndex(root string, base string, pathFilters ...p }, nil } -func (r *directoryResolver) buildIndex() error { +func (r *Directory) buildIndex() error { if r.indexer == nil { return fmt.Errorf("no directory indexer configured") } @@ -114,7 +115,7 @@ func (r *directoryResolver) buildIndex() error { return nil } -func (r directoryResolver) requestPath(userPath string) (string, error) { +func (r Directory) requestPath(userPath string) (string, error) { if filepath.IsAbs(userPath) { // don't allow input to potentially hop above root path userPath = path.Join(r.path, userPath) @@ -131,7 +132,7 @@ func (r directoryResolver) requestPath(userPath string) (string, error) { return userPath, nil } -func (r directoryResolver) responsePath(path string) string { +func (r Directory) responsePath(path string) string { // check to see if we need to encode back to Windows from posix if runtime.GOOS == WindowsOS { path = posixToWindows(path) @@ -154,22 +155,22 @@ func (r directoryResolver) responsePath(path string) string { } // HasPath indicates if the given path exists in the underlying source. -func (r *directoryResolver) HasPath(userPath string) bool { +func (r *Directory) HasPath(userPath string) bool { requestPath, err := r.requestPath(userPath) if err != nil { return false } - return r.tree.HasPath(file.Path(requestPath)) + return r.tree.HasPath(stereoscopeFile.Path(requestPath)) } // Stringer to represent a directory path data source -func (r directoryResolver) String() string { +func (r Directory) String() string { return fmt.Sprintf("dir:%s", r.path) } // FilesByPath returns all file.References that match the given paths from the directory. -func (r directoryResolver) FilesByPath(userPaths ...string) ([]Location, error) { - var references = make([]Location, 0) +func (r Directory) FilesByPath(userPaths ...string) ([]file.Location, error) { + var references = make([]file.Location, 0) for _, userPath := range userPaths { userStrPath, err := r.requestPath(userPath) @@ -206,7 +207,7 @@ func (r directoryResolver) FilesByPath(userPaths ...string) ([]Location, error) if ref.HasReference() { references = append(references, - NewVirtualLocationFromDirectory( + file.NewVirtualLocationFromDirectory( r.responsePath(string(ref.RealPath)), // the actual path relative to the resolver root r.responsePath(userStrPath), // the path used to access this file, relative to the resolver root *ref.Reference, @@ -219,9 +220,9 @@ func (r directoryResolver) FilesByPath(userPaths ...string) ([]Location, error) } // FilesByGlob returns all file.References that match the given path glob pattern from any layer in the image. -func (r directoryResolver) FilesByGlob(patterns ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +func (r Directory) FilesByGlob(patterns ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) for _, pattern := range patterns { refVias, err := r.searchContext.SearchByGlob(pattern, filetree.FollowBasenameLinks) @@ -242,7 +243,7 @@ func (r directoryResolver) FilesByGlob(patterns ...string) ([]Location, error) { continue } - loc := NewVirtualLocationFromDirectory( + loc := file.NewVirtualLocationFromDirectory( r.responsePath(string(refVia.Reference.RealPath)), // the actual path relative to the resolver root r.responsePath(string(refVia.RequestPath)), // the path used to access this file, relative to the resolver root *refVia.Reference, @@ -257,8 +258,8 @@ func (r directoryResolver) FilesByGlob(patterns ...string) ([]Location, error) { // RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference. // This is helpful when attempting to find a file that is in the same layer or lower as another file. For the -// directoryResolver, this is a simple path lookup. -func (r *directoryResolver) RelativeFileByPath(_ Location, path string) *Location { +// Directory, this is a simple path lookup. +func (r *Directory) RelativeFileByPath(_ file.Location, path string) *file.Location { paths, err := r.FilesByPath(path) if err != nil { return nil @@ -272,54 +273,54 @@ func (r *directoryResolver) RelativeFileByPath(_ Location, path string) *Locatio // FileContentsByLocation fetches file contents for a single file reference relative to a directory. // If the path does not exist an error is returned. -func (r directoryResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { - if location.ref.RealPath == "" { +func (r Directory) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { + if location.RealPath == "" { return nil, errors.New("empty path given") } - entry, err := r.index.Get(location.ref) + entry, err := r.index.Get(location.Reference()) if err != nil { return nil, err } // don't consider directories - if entry.Type == file.TypeDirectory { - return nil, fmt.Errorf("cannot read contents of non-file %q", location.ref.RealPath) + if entry.Type == stereoscopeFile.TypeDirectory { + return nil, fmt.Errorf("cannot read contents of non-file %q", location.Reference().RealPath) } // RealPath is posix so for windows directory resolver we need to translate // to its true on disk path. - filePath := string(location.ref.RealPath) + filePath := string(location.Reference().RealPath) if runtime.GOOS == WindowsOS { filePath = posixToWindows(filePath) } - return file.NewLazyReadCloser(filePath), nil + return stereoscopeFile.NewLazyReadCloser(filePath), nil } -func (r *directoryResolver) AllLocations() <-chan Location { - results := make(chan Location) +func (r *Directory) AllLocations() <-chan file.Location { + results := make(chan file.Location) go func() { defer close(results) - for _, ref := range r.tree.AllFiles(file.AllTypes()...) { - results <- NewLocationFromDirectory(r.responsePath(string(ref.RealPath)), ref) + for _, ref := range r.tree.AllFiles(stereoscopeFile.AllTypes()...) { + results <- file.NewLocationFromDirectory(r.responsePath(string(ref.RealPath)), ref) } }() return results } -func (r *directoryResolver) FileMetadataByLocation(location Location) (FileMetadata, error) { - entry, err := r.index.Get(location.ref) +func (r *Directory) FileMetadataByLocation(location file.Location) (file.Metadata, error) { + entry, err := r.index.Get(location.Reference()) if err != nil { - return FileMetadata{}, fmt.Errorf("location: %+v : %w", location, os.ErrNotExist) + return file.Metadata{}, fmt.Errorf("location: %+v : %w", location, os.ErrNotExist) } return entry.Metadata, nil } -func (r *directoryResolver) FilesByMIMEType(types ...string) ([]Location, error) { - uniqueFileIDs := file.NewFileReferenceSet() - uniqueLocations := make([]Location, 0) +func (r *Directory) FilesByMIMEType(types ...string) ([]file.Location, error) { + uniqueFileIDs := stereoscopeFile.NewFileReferenceSet() + uniqueLocations := make([]file.Location, 0) refVias, err := r.searchContext.SearchByMIMEType(types...) if err != nil { @@ -332,7 +333,7 @@ func (r *directoryResolver) FilesByMIMEType(types ...string) ([]Location, error) if uniqueFileIDs.Contains(*refVia.Reference) { continue } - location := NewLocationFromDirectory( + location := file.NewLocationFromDirectory( r.responsePath(string(refVia.Reference.RealPath)), *refVia.Reference, ) diff --git a/syft/source/directory_indexer.go b/syft/internal/fileresolver/directory_indexer.go similarity index 96% rename from syft/source/directory_indexer.go rename to syft/internal/fileresolver/directory_indexer.go index 186f8f8f9a6..c590e6caec0 100644 --- a/syft/source/directory_indexer.go +++ b/syft/internal/fileresolver/directory_indexer.go @@ -1,4 +1,4 @@ -package source +package fileresolver import ( "errors" @@ -20,30 +20,30 @@ import ( "github.com/anchore/syft/syft/event" ) -type pathIndexVisitor func(string, os.FileInfo, error) error +type PathIndexVisitor func(string, os.FileInfo, error) error type directoryIndexer struct { path string base string - pathIndexVisitors []pathIndexVisitor + pathIndexVisitors []PathIndexVisitor errPaths map[string]error tree filetree.ReadWriter index filetree.Index } -func newDirectoryIndexer(path, base string, visitors ...pathIndexVisitor) *directoryIndexer { +func newDirectoryIndexer(path, base string, visitors ...PathIndexVisitor) *directoryIndexer { i := &directoryIndexer{ path: path, base: base, tree: filetree.New(), index: filetree.NewIndex(), - pathIndexVisitors: append([]pathIndexVisitor{requireFileInfo, disallowByFileType, disallowUnixSystemRuntimePath}, visitors...), + pathIndexVisitors: append([]PathIndexVisitor{requireFileInfo, disallowByFileType, disallowUnixSystemRuntimePath}, visitors...), errPaths: make(map[string]error), } // these additional stateful visitors should be the first thing considered when walking / indexing i.pathIndexVisitors = append( - []pathIndexVisitor{ + []PathIndexVisitor{ i.disallowRevisitingVisitor, i.disallowFileAccessErr, }, @@ -181,7 +181,7 @@ func (r *directoryIndexer) indexPath(path string, info os.FileInfo, err error) ( func (r *directoryIndexer) disallowFileAccessErr(path string, _ os.FileInfo, err error) error { if r.isFileAccessErr(path, err) { - return errSkipPath + return ErrSkipPath } return nil } @@ -311,7 +311,7 @@ func (r *directoryIndexer) disallowRevisitingVisitor(path string, _ os.FileInfo, // signal to walk() that we should skip this directory entirely return fs.SkipDir } - return errSkipPath + return ErrSkipPath } return nil } @@ -330,7 +330,7 @@ func disallowByFileType(_ string, info os.FileInfo, _ error) error { } switch file.TypeFromMode(info.Mode()) { case file.TypeCharacterDevice, file.TypeSocket, file.TypeBlockDevice, file.TypeFIFO, file.TypeIrregular: - return errSkipPath + return ErrSkipPath // note: symlinks that point to these files may still get by. // We handle this later in processing to help prevent against infinite links traversal. } @@ -340,7 +340,7 @@ func disallowByFileType(_ string, info os.FileInfo, _ error) error { func requireFileInfo(_ string, info os.FileInfo, _ error) error { if info == nil { - return errSkipPath + return ErrSkipPath } return nil } diff --git a/syft/source/directory_indexer_test.go b/syft/internal/fileresolver/directory_indexer_test.go similarity index 97% rename from syft/source/directory_indexer_test.go rename to syft/internal/fileresolver/directory_indexer_test.go index b6403559d16..cccacfc2c57 100644 --- a/syft/source/directory_indexer_test.go +++ b/syft/internal/fileresolver/directory_indexer_test.go @@ -1,4 +1,4 @@ -package source +package fileresolver import ( "io/fs" @@ -172,7 +172,7 @@ func TestDirectoryIndexer_indexPath_skipsNilFileInfo(t *testing.T) { } func TestDirectoryIndexer_index(t *testing.T) { - // note: this test is testing the effects from newDirectoryResolver, indexTree, and addPathToIndex + // note: this test is testing the effects from NewFromDirectory, indexTree, and addPathToIndex indexer := newDirectoryIndexer("test-fixtures/system_paths/target", "") tree, index, err := indexer.build() require.NoError(t, err) @@ -237,7 +237,7 @@ func TestDirectoryIndexer_SkipsAlreadyVisitedLinkDestinations(t *testing.T) { } resolver := newDirectoryIndexer("./test-fixtures/symlinks-prune-indexing", "") // we want to cut ahead of any possible filters to see what paths are considered for indexing (closest to walking) - resolver.pathIndexVisitors = append([]pathIndexVisitor{pathObserver}, resolver.pathIndexVisitors...) + resolver.pathIndexVisitors = append([]PathIndexVisitor{pathObserver}, resolver.pathIndexVisitors...) // note: this test is NOT about the effects left on the tree or the index, but rather the WHICH paths that are // considered for indexing and HOW traversal prunes paths that have already been visited diff --git a/syft/source/directory_resolver_test.go b/syft/internal/fileresolver/directory_test.go similarity index 78% rename from syft/source/directory_resolver_test.go rename to syft/internal/fileresolver/directory_test.go index 1ab5eca8552..819c1df28b2 100644 --- a/syft/source/directory_resolver_test.go +++ b/syft/internal/fileresolver/directory_test.go @@ -1,12 +1,11 @@ //go:build !windows // +build !windows -package source +package fileresolver import ( "io" "io/fs" - "io/ioutil" "os" "path/filepath" "sort" @@ -19,7 +18,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" + "github.com/anchore/syft/syft/file" ) func TestDirectoryResolver_FilesByPath_relativeRoot(t *testing.T) { @@ -46,17 +46,18 @@ func TestDirectoryResolver_FilesByPath_relativeRoot(t *testing.T) { }, }, { - name: "should find a file from a relative path (root above cwd)", + name: "should find a file from a relative path (root above cwd)", + // TODO: refactor me! this test depends on the structure of the source dir not changing, which isn't great relativeRoot: "../", - input: "sbom/sbom.go", + input: "fileresolver/directory.go", expected: []string{ - "sbom/sbom.go", + "fileresolver/directory.go", }, }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { - resolver, err := newDirectoryResolver(c.relativeRoot, "") + resolver, err := NewFromDirectory(c.relativeRoot, "") assert.NoError(t, err) refs, err := resolver.FilesByPath(c.input) @@ -95,11 +96,12 @@ func TestDirectoryResolver_FilesByPath_absoluteRoot(t *testing.T) { }, }, { - name: "should find a file from a relative path (root above cwd)", + name: "should find a file from a relative path (root above cwd)", + // TODO: refactor me! this test depends on the structure of the source dir not changing, which isn't great relativeRoot: "../", - input: "sbom/sbom.go", + input: "fileresolver/directory.go", expected: []string{ - "sbom/sbom.go", + "fileresolver/directory.go", }, }, } @@ -110,7 +112,7 @@ func TestDirectoryResolver_FilesByPath_absoluteRoot(t *testing.T) { absRoot, err := filepath.Abs(c.relativeRoot) require.NoError(t, err) - resolver, err := newDirectoryResolver(absRoot, "") + resolver, err := NewFromDirectory(absRoot, "") assert.NoError(t, err) refs, err := resolver.FilesByPath(c.input) @@ -171,7 +173,7 @@ func TestDirectoryResolver_FilesByPath(t *testing.T) { } for _, c := range cases { t.Run(c.name, func(t *testing.T) { - resolver, err := newDirectoryResolver(c.root, "") + resolver, err := NewFromDirectory(c.root, "") assert.NoError(t, err) hasPath := resolver.HasPath(c.input) @@ -219,7 +221,7 @@ func TestDirectoryResolver_MultipleFilesByPath(t *testing.T) { } for _, c := range cases { t.Run(c.name, func(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures", "") + resolver, err := NewFromDirectory("./test-fixtures", "") assert.NoError(t, err) refs, err := resolver.FilesByPath(c.input...) assert.NoError(t, err) @@ -232,7 +234,7 @@ func TestDirectoryResolver_MultipleFilesByPath(t *testing.T) { } func TestDirectoryResolver_FilesByGlobMultiple(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures", "") + resolver, err := NewFromDirectory("./test-fixtures", "") assert.NoError(t, err) refs, err := resolver.FilesByGlob("**/image-symlinks/file*") assert.NoError(t, err) @@ -241,7 +243,7 @@ func TestDirectoryResolver_FilesByGlobMultiple(t *testing.T) { } func TestDirectoryResolver_FilesByGlobRecursive(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/image-symlinks", "") + resolver, err := NewFromDirectory("./test-fixtures/image-symlinks", "") assert.NoError(t, err) refs, err := resolver.FilesByGlob("**/*.txt") assert.NoError(t, err) @@ -249,7 +251,7 @@ func TestDirectoryResolver_FilesByGlobRecursive(t *testing.T) { } func TestDirectoryResolver_FilesByGlobSingle(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures", "") + resolver, err := NewFromDirectory("./test-fixtures", "") assert.NoError(t, err) refs, err := resolver.FilesByGlob("**/image-symlinks/*1.txt") assert.NoError(t, err) @@ -276,7 +278,7 @@ func TestDirectoryResolver_FilesByPath_ResolvesSymlinks(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-simple", "") + resolver, err := NewFromDirectory("./test-fixtures/symlinks-simple", "") assert.NoError(t, err) refs, err := resolver.FilesByPath(test.fixture) @@ -299,7 +301,7 @@ func TestDirectoryResolver_FilesByPath_ResolvesSymlinks(t *testing.T) { func TestDirectoryResolverDoesNotIgnoreRelativeSystemPaths(t *testing.T) { // let's make certain that "dev/place" is not ignored, since it is not "/dev/place" - resolver, err := newDirectoryResolver("test-fixtures/system_paths/target", "") + resolver, err := NewFromDirectory("test-fixtures/system_paths/target", "") assert.NoError(t, err) // all paths should be found (non filtering matches a path) @@ -383,35 +385,35 @@ func Test_isUnallowableFileType(t *testing.T) { info: testFileInfo{ mode: os.ModeSocket, }, - expected: errSkipPath, + expected: ErrSkipPath, }, { name: "named pipe", info: testFileInfo{ mode: os.ModeNamedPipe, }, - expected: errSkipPath, + expected: ErrSkipPath, }, { name: "char device", info: testFileInfo{ mode: os.ModeCharDevice, }, - expected: errSkipPath, + expected: ErrSkipPath, }, { name: "block device", info: testFileInfo{ mode: os.ModeDevice, }, - expected: errSkipPath, + expected: ErrSkipPath, }, { name: "irregular", info: testFileInfo{ mode: os.ModeIrregular, }, - expected: errSkipPath, + expected: ErrSkipPath, }, } for _, test := range tests { @@ -435,7 +437,7 @@ func Test_directoryResolver_FilesByMIMEType(t *testing.T) { } for _, test := range tests { t.Run(test.fixturePath, func(t *testing.T) { - resolver, err := newDirectoryResolver(test.fixturePath, "") + resolver, err := NewFromDirectory(test.fixturePath, "") assert.NoError(t, err) locations, err := resolver.FilesByMIMEType(test.mimeType) assert.NoError(t, err) @@ -448,7 +450,7 @@ func Test_directoryResolver_FilesByMIMEType(t *testing.T) { } func Test_IndexingNestedSymLinks(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-simple", "") + resolver, err := NewFromDirectory("./test-fixtures/symlinks-simple", "") require.NoError(t, err) // check that we can get the real path @@ -499,12 +501,12 @@ func Test_IndexingNestedSymLinks(t *testing.T) { func Test_IndexingNestedSymLinks_ignoredIndexes(t *testing.T) { filterFn := func(path string, _ os.FileInfo, _ error) error { if strings.HasSuffix(path, string(filepath.Separator)+"readme") { - return errSkipPath + return ErrSkipPath } return nil } - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-simple", "", filterFn) + resolver, err := NewFromDirectory("./test-fixtures/symlinks-simple", "", filterFn) require.NoError(t, err) // the path to the real file is PRUNED from the index, so we should NOT expect a location returned @@ -524,7 +526,7 @@ func Test_IndexingNestedSymLinks_ignoredIndexes(t *testing.T) { } func Test_IndexingNestedSymLinksOutsideOfRoot(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-multiple-roots/root", "") + resolver, err := NewFromDirectory("./test-fixtures/symlinks-multiple-roots/root", "") require.NoError(t, err) // check that we can get the real path @@ -542,7 +544,7 @@ func Test_IndexingNestedSymLinksOutsideOfRoot(t *testing.T) { } func Test_RootViaSymlink(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinked-root/nested/link-root", "") + resolver, err := NewFromDirectory("./test-fixtures/symlinked-root/nested/link-root", "") require.NoError(t, err) locations, err := resolver.FilesByPath("./file1.txt") @@ -562,28 +564,28 @@ func Test_directoryResolver_FileContentsByLocation(t *testing.T) { cwd, err := os.Getwd() require.NoError(t, err) - r, err := newDirectoryResolver(".", "") + r, err := NewFromDirectory(".", "") require.NoError(t, err) - exists, existingPath, err := r.tree.File(file.Path(filepath.Join(cwd, "test-fixtures/image-simple/file-1.txt"))) + exists, existingPath, err := r.tree.File(stereoscopeFile.Path(filepath.Join(cwd, "test-fixtures/image-simple/file-1.txt"))) require.True(t, exists) require.NoError(t, err) require.True(t, existingPath.HasReference()) tests := []struct { name string - location Location + location file.Location expects string err bool }{ { name: "use file reference for content requests", - location: NewLocationFromDirectory("some/place", *existingPath.Reference), + location: file.NewLocationFromDirectory("some/place", *existingPath.Reference), expects: "this file has contents", }, { name: "error on empty file reference", - location: NewLocationFromDirectory("doesn't matter", file.Reference{}), + location: file.NewLocationFromDirectory("doesn't matter", stereoscopeFile.Reference{}), err: true, }, } @@ -598,7 +600,7 @@ func Test_directoryResolver_FileContentsByLocation(t *testing.T) { require.NoError(t, err) if test.expects != "" { - b, err := ioutil.ReadAll(actual) + b, err := io.ReadAll(actual) require.NoError(t, err) assert.Equal(t, test.expects, string(b)) } @@ -649,7 +651,7 @@ func Test_isUnixSystemRuntimePath(t *testing.T) { func Test_SymlinkLoopWithGlobsShouldResolve(t *testing.T) { test := func(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-loop", "") + resolver, err := NewFromDirectory("./test-fixtures/symlinks-loop", "") require.NoError(t, err) locations, err := resolver.FilesByGlob("**/file.target") @@ -662,20 +664,6 @@ func Test_SymlinkLoopWithGlobsShouldResolve(t *testing.T) { testWithTimeout(t, 5*time.Second, test) } -func testWithTimeout(t *testing.T, timeout time.Duration, test func(*testing.T)) { - done := make(chan bool) - go func() { - test(t) - done <- true - }() - - select { - case <-time.After(timeout): - t.Fatal("test timed out") - case <-done: - } -} - func TestDirectoryResolver_FilesByPath_baseRoot(t *testing.T) { cases := []struct { name string @@ -734,7 +722,7 @@ func TestDirectoryResolver_FilesByPath_baseRoot(t *testing.T) { } for _, c := range cases { t.Run(c.name, func(t *testing.T) { - resolver, err := newDirectoryResolver(c.root, c.root) + resolver, err := NewFromDirectory(c.root, c.root) assert.NoError(t, err) refs, err := resolver.FilesByPath(c.input) @@ -753,162 +741,132 @@ func TestDirectoryResolver_FilesByPath_baseRoot(t *testing.T) { func Test_directoryResolver_resolvesLinks(t *testing.T) { tests := []struct { name string - runner func(FileResolver) []Location - expected []Location + runner func(file.Resolver) []file.Location + expected []file.Location }{ { name: "by mimetype", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links should not show up when searching mimetype actualLocations, err := resolver.FilesByMIMEType("text/plain") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt" - NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt" - NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt" - NewLocation("parent/file-4.txt"), // note: missing virtual path "file-4.txt" + expected: []file.Location{ + file.NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt" + file.NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt" + file.NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt" + file.NewLocation("parent/file-4.txt"), // note: missing virtual path "file-4.txt" }, }, { name: "by glob to links", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files // for that reason we need to place **/ in front (which is not the same for other resolvers) actualLocations, err := resolver.FilesByGlob("**/*ink-*") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("file-1.txt", "link-1"), - NewVirtualLocation("file-2.txt", "link-2"), + expected: []file.Location{ + file.NewVirtualLocation("file-1.txt", "link-1"), + file.NewVirtualLocation("file-2.txt", "link-2"), // we already have this real file path via another link, so only one is returned - //NewVirtualLocation("file-2.txt", "link-indirect"), - NewVirtualLocation("file-3.txt", "link-within"), + //file.NewVirtualLocation("file-2.txt", "link-indirect"), + file.NewVirtualLocation("file-3.txt", "link-within"), }, }, { name: "by basename", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/file-2.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // this has two copies in the base image, which overwrites the same location - NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt", + file.NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt", }, }, { name: "by basename glob", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/file-?.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt" - NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt" - NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt" - NewLocation("parent/file-4.txt"), // note: missing virtual path "parent/file-4.txt" + expected: []file.Location{ + file.NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt" + file.NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt" + file.NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt" + file.NewLocation("parent/file-4.txt"), // note: missing virtual path "parent/file-4.txt" }, }, { name: "by basename glob to links", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { actualLocations, err := resolver.FilesByGlob("**/link-*") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - { - LocationData: LocationData{ - Coordinates: Coordinates{ - RealPath: "file-1.txt", - }, - VirtualPath: "link-1", - ref: file.Reference{RealPath: "file-1.txt"}, - }, - }, - { - LocationData: LocationData{ - Coordinates: Coordinates{ - RealPath: "file-2.txt", - }, - VirtualPath: "link-2", - ref: file.Reference{RealPath: "file-2.txt"}, - }, - }, + expected: []file.Location{ + file.NewVirtualLocation("file-1.txt", "link-1"), + file.NewVirtualLocation("file-2.txt", "link-2"), + // we already have this real file path via another link, so only one is returned - //{ - // LocationData: LocationData{ - // Coordinates: Coordinates{ - // RealPath: "file-2.txt", - // }, - // VirtualPath: "link-indirect", - // ref: file.Reference{RealPath: "file-2.txt"}, - // }, - //}, - { - LocationData: LocationData{ - Coordinates: Coordinates{ - RealPath: "file-3.txt", - }, - VirtualPath: "link-within", - ref: file.Reference{RealPath: "file-3.txt"}, - }, - }, + //file.NewVirtualLocation("file-2.txt", "link-indirect"), + + file.NewVirtualLocation("file-3.txt", "link-within"), }, }, { name: "by extension", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/*.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt" - NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt" - NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt" - NewLocation("parent/file-4.txt"), // note: missing virtual path "parent/file-4.txt" + expected: []file.Location{ + file.NewLocation("file-1.txt"), // note: missing virtual path "file-1.txt" + file.NewLocation("file-2.txt"), // note: missing virtual path "file-2.txt" + file.NewLocation("file-3.txt"), // note: missing virtual path "file-3.txt" + file.NewLocation("parent/file-4.txt"), // note: missing virtual path "parent/file-4.txt" }, }, { name: "by path to degree 1 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links resolve to the final file actualLocations, err := resolver.FilesByPath("/link-2") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers - NewVirtualLocation("file-2.txt", "link-2"), + file.NewVirtualLocation("file-2.txt", "link-2"), }, }, { name: "by path to degree 2 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // multiple links resolves to the final file actualLocations, err := resolver.FilesByPath("/link-indirect") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers - NewVirtualLocation("file-2.txt", "link-indirect"), + file.NewVirtualLocation("file-2.txt", "link-indirect"), }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-from-image-symlinks-fixture", "") + resolver, err := NewFromDirectory("./test-fixtures/symlinks-from-image-symlinks-fixture", "") require.NoError(t, err) assert.NoError(t, err) @@ -920,14 +878,14 @@ func Test_directoryResolver_resolvesLinks(t *testing.T) { } func TestDirectoryResolver_DoNotAddVirtualPathsToTree(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-prune-indexing", "") + resolver, err := NewFromDirectory("./test-fixtures/symlinks-prune-indexing", "") require.NoError(t, err) - var allRealPaths []file.Path + var allRealPaths []stereoscopeFile.Path for l := range resolver.AllLocations() { - allRealPaths = append(allRealPaths, file.Path(l.RealPath)) + allRealPaths = append(allRealPaths, stereoscopeFile.Path(l.RealPath)) } - pathSet := file.NewPathSet(allRealPaths...) + pathSet := stereoscopeFile.NewPathSet(allRealPaths...) assert.False(t, pathSet.Contains("before-path/file.txt"), @@ -942,12 +900,12 @@ func TestDirectoryResolver_DoNotAddVirtualPathsToTree(t *testing.T) { } func TestDirectoryResolver_FilesContents_errorOnDirRequest(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/system_paths", "") + resolver, err := NewFromDirectory("./test-fixtures/system_paths", "") assert.NoError(t, err) - var dirLoc *Location + var dirLoc *file.Location for loc := range resolver.AllLocations() { - entry, err := resolver.index.Get(loc.ref) + entry, err := resolver.index.Get(loc.Reference()) require.NoError(t, err) if entry.Metadata.IsDir() { dirLoc = &loc @@ -963,13 +921,13 @@ func TestDirectoryResolver_FilesContents_errorOnDirRequest(t *testing.T) { } func TestDirectoryResolver_AllLocations(t *testing.T) { - resolver, err := newDirectoryResolver("./test-fixtures/symlinks-from-image-symlinks-fixture", "") + resolver, err := NewFromDirectory("./test-fixtures/symlinks-from-image-symlinks-fixture", "") assert.NoError(t, err) paths := strset.New() for loc := range resolver.AllLocations() { if strings.HasPrefix(loc.RealPath, "/") { - // ignore outside of the fixture root for now + // ignore outside the fixture root for now continue } paths.Add(loc.RealPath) diff --git a/syft/source/directory_resolver_windows_test.go b/syft/internal/fileresolver/directory_windows_test.go similarity index 99% rename from syft/source/directory_resolver_windows_test.go rename to syft/internal/fileresolver/directory_windows_test.go index 18cbb7856c4..115fb30a3b1 100644 --- a/syft/source/directory_resolver_windows_test.go +++ b/syft/internal/fileresolver/directory_windows_test.go @@ -1,4 +1,4 @@ -package source +package fileresolver import "testing" diff --git a/syft/internal/fileresolver/empty.go b/syft/internal/fileresolver/empty.go new file mode 100644 index 00000000000..3b08f395c87 --- /dev/null +++ b/syft/internal/fileresolver/empty.go @@ -0,0 +1,47 @@ +package fileresolver + +import ( + "io" + + "github.com/anchore/syft/syft/file" +) + +var _ file.WritableResolver = (*Empty)(nil) + +type Empty struct{} + +func (e Empty) FileContentsByLocation(_ file.Location) (io.ReadCloser, error) { + return nil, nil +} + +func (e Empty) HasPath(_ string) bool { + return false +} + +func (e Empty) FilesByPath(_ ...string) ([]file.Location, error) { + return nil, nil +} + +func (e Empty) FilesByGlob(_ ...string) ([]file.Location, error) { + return nil, nil +} + +func (e Empty) FilesByMIMEType(_ ...string) ([]file.Location, error) { + return nil, nil +} + +func (e Empty) RelativeFileByPath(_ file.Location, _ string) *file.Location { + return nil +} + +func (e Empty) AllLocations() <-chan file.Location { + return nil +} + +func (e Empty) FileMetadataByLocation(_ file.Location) (file.Metadata, error) { + return file.Metadata{}, nil +} + +func (e Empty) Write(_ file.Location, _ io.Reader) error { + return nil +} diff --git a/syft/source/excluding_file_resolver.go b/syft/internal/fileresolver/excluding_file.go similarity index 55% rename from syft/source/excluding_file_resolver.go rename to syft/internal/fileresolver/excluding_file.go index 50969116a81..81caa49c765 100644 --- a/syft/source/excluding_file_resolver.go +++ b/syft/internal/fileresolver/excluding_file.go @@ -1,65 +1,67 @@ -package source +package fileresolver import ( "fmt" "io" + + "github.com/anchore/syft/syft/file" ) type excludeFn func(string) bool -// excludingResolver decorates a resolver with an exclusion function that is used to +// excluding decorates a resolver with an exclusion function that is used to // filter out entries in the delegate resolver -type excludingResolver struct { - delegate FileResolver +type excluding struct { + delegate file.Resolver excludeFn excludeFn } -// NewExcludingResolver create a new resolver which wraps the provided delegate and excludes +// NewExcluding create a new resolver which wraps the provided delegate and excludes // entries based on a provided path exclusion function -func NewExcludingResolver(delegate FileResolver, excludeFn excludeFn) FileResolver { - return &excludingResolver{ +func NewExcluding(delegate file.Resolver, excludeFn excludeFn) file.Resolver { + return &excluding{ delegate, excludeFn, } } -func (r *excludingResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { +func (r *excluding) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { if locationMatches(&location, r.excludeFn) { return nil, fmt.Errorf("no such location: %+v", location.RealPath) } return r.delegate.FileContentsByLocation(location) } -func (r *excludingResolver) FileMetadataByLocation(location Location) (FileMetadata, error) { +func (r *excluding) FileMetadataByLocation(location file.Location) (file.Metadata, error) { if locationMatches(&location, r.excludeFn) { - return FileMetadata{}, fmt.Errorf("no such location: %+v", location.RealPath) + return file.Metadata{}, fmt.Errorf("no such location: %+v", location.RealPath) } return r.delegate.FileMetadataByLocation(location) } -func (r *excludingResolver) HasPath(path string) bool { +func (r *excluding) HasPath(path string) bool { if r.excludeFn(path) { return false } return r.delegate.HasPath(path) } -func (r *excludingResolver) FilesByPath(paths ...string) ([]Location, error) { +func (r *excluding) FilesByPath(paths ...string) ([]file.Location, error) { locations, err := r.delegate.FilesByPath(paths...) return filterLocations(locations, err, r.excludeFn) } -func (r *excludingResolver) FilesByGlob(patterns ...string) ([]Location, error) { +func (r *excluding) FilesByGlob(patterns ...string) ([]file.Location, error) { locations, err := r.delegate.FilesByGlob(patterns...) return filterLocations(locations, err, r.excludeFn) } -func (r *excludingResolver) FilesByMIMEType(types ...string) ([]Location, error) { +func (r *excluding) FilesByMIMEType(types ...string) ([]file.Location, error) { locations, err := r.delegate.FilesByMIMEType(types...) return filterLocations(locations, err, r.excludeFn) } -func (r *excludingResolver) RelativeFileByPath(location Location, path string) *Location { +func (r *excluding) RelativeFileByPath(location file.Location, path string) *file.Location { l := r.delegate.RelativeFileByPath(location, path) if l != nil && locationMatches(l, r.excludeFn) { return nil @@ -67,8 +69,8 @@ func (r *excludingResolver) RelativeFileByPath(location Location, path string) * return l } -func (r *excludingResolver) AllLocations() <-chan Location { - c := make(chan Location) +func (r *excluding) AllLocations() <-chan file.Location { + c := make(chan file.Location) go func() { defer close(c) for location := range r.delegate.AllLocations() { @@ -80,11 +82,11 @@ func (r *excludingResolver) AllLocations() <-chan Location { return c } -func locationMatches(location *Location, exclusionFn excludeFn) bool { +func locationMatches(location *file.Location, exclusionFn excludeFn) bool { return exclusionFn(location.RealPath) || exclusionFn(location.VirtualPath) } -func filterLocations(locations []Location, err error, exclusionFn excludeFn) ([]Location, error) { +func filterLocations(locations []file.Location, err error, exclusionFn excludeFn) ([]file.Location, error) { if err != nil { return nil, err } diff --git a/syft/source/excluding_file_resolver_test.go b/syft/internal/fileresolver/excluding_file_test.go similarity index 66% rename from syft/source/excluding_file_resolver_test.go rename to syft/internal/fileresolver/excluding_file_test.go index c448e39210e..2ba51473682 100644 --- a/syft/source/excluding_file_resolver_test.go +++ b/syft/internal/fileresolver/excluding_file_test.go @@ -1,4 +1,4 @@ -package source +package fileresolver import ( "io" @@ -6,6 +6,8 @@ import ( "testing" "github.com/stretchr/testify/assert" + + "github.com/anchore/syft/syft/file" ) func TestExcludingResolver(t *testing.T) { @@ -54,7 +56,7 @@ func TestExcludingResolver(t *testing.T) { resolver := &mockResolver{ locations: test.locations, } - er := NewExcludingResolver(resolver, test.excludeFn) + er := NewExcluding(resolver, test.excludeFn) locations, _ := er.FilesByPath() assert.ElementsMatch(t, locationPaths(locations), test.expected) @@ -65,7 +67,7 @@ func TestExcludingResolver(t *testing.T) { locations, _ = er.FilesByMIMEType() assert.ElementsMatch(t, locationPaths(locations), test.expected) - locations = []Location{} + locations = []file.Location{} channel := er.AllLocations() for location := range channel { @@ -77,25 +79,25 @@ func TestExcludingResolver(t *testing.T) { for _, path := range diff { assert.False(t, er.HasPath(path)) - c, err := er.FileContentsByLocation(NewLocation(path)) + c, err := er.FileContentsByLocation(file.NewLocation(path)) assert.Nil(t, c) assert.Error(t, err) - m, err := er.FileMetadataByLocation(NewLocation(path)) + m, err := er.FileMetadataByLocation(file.NewLocation(path)) assert.Empty(t, m.LinkDestination) assert.Error(t, err) - l := er.RelativeFileByPath(NewLocation(""), path) + l := er.RelativeFileByPath(file.NewLocation(""), path) assert.Nil(t, l) } for _, path := range test.expected { assert.True(t, er.HasPath(path)) - c, err := er.FileContentsByLocation(NewLocation(path)) + c, err := er.FileContentsByLocation(file.NewLocation(path)) assert.NotNil(t, c) assert.Nil(t, err) - m, err := er.FileMetadataByLocation(NewLocation(path)) + m, err := er.FileMetadataByLocation(file.NewLocation(path)) assert.NotEmpty(t, m.LinkDestination) assert.Nil(t, err) - l := er.RelativeFileByPath(NewLocation(""), path) + l := er.RelativeFileByPath(file.NewLocation(""), path) assert.NotNil(t, l) } }) @@ -117,7 +119,7 @@ func difference(a, b []string) []string { return diff } -func locationPaths(locations []Location) []string { +func locationPaths(locations []file.Location) []string { paths := []string{} for _, l := range locations { paths = append(paths, l.RealPath) @@ -129,20 +131,20 @@ type mockResolver struct { locations []string } -func (r *mockResolver) getLocations() ([]Location, error) { - out := []Location{} +func (r *mockResolver) getLocations() ([]file.Location, error) { + out := []file.Location{} for _, path := range r.locations { - out = append(out, NewLocation(path)) + out = append(out, file.NewLocation(path)) } return out, nil } -func (r *mockResolver) FileContentsByLocation(_ Location) (io.ReadCloser, error) { +func (r *mockResolver) FileContentsByLocation(_ file.Location) (io.ReadCloser, error) { return io.NopCloser(strings.NewReader("Hello, world!")), nil } -func (r *mockResolver) FileMetadataByLocation(_ Location) (FileMetadata, error) { - return FileMetadata{ +func (r *mockResolver) FileMetadataByLocation(_ file.Location) (file.Metadata, error) { + return file.Metadata{ LinkDestination: "MOCK", }, nil } @@ -151,37 +153,37 @@ func (r *mockResolver) HasPath(_ string) bool { return true } -func (r *mockResolver) FilesByPath(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByPath(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) FilesByGlob(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByGlob(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) FilesByMIMEType(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByMIMEType(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) FilesByExtension(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByExtension(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) FilesByBasename(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByBasename(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) FilesByBasenameGlob(_ ...string) ([]Location, error) { +func (r *mockResolver) FilesByBasenameGlob(_ ...string) ([]file.Location, error) { return r.getLocations() } -func (r *mockResolver) RelativeFileByPath(_ Location, path string) *Location { - l := NewLocation(path) +func (r *mockResolver) RelativeFileByPath(_ file.Location, path string) *file.Location { + l := file.NewLocation(path) return &l } -func (r *mockResolver) AllLocations() <-chan Location { - c := make(chan Location) +func (r *mockResolver) AllLocations() <-chan file.Location { + c := make(chan file.Location) go func() { defer close(c) locations, _ := r.getLocations() diff --git a/syft/internal/fileresolver/file_metadata_by_location.go b/syft/internal/fileresolver/file_metadata_by_location.go new file mode 100644 index 00000000000..9d5974d9b05 --- /dev/null +++ b/syft/internal/fileresolver/file_metadata_by_location.go @@ -0,0 +1,15 @@ +package fileresolver + +import ( + "github.com/anchore/stereoscope/pkg/image" + "github.com/anchore/syft/syft/file" +) + +func fileMetadataByLocation(img *image.Image, location file.Location) (file.Metadata, error) { + entry, err := img.FileCatalog.Get(location.Reference()) + if err != nil { + return file.Metadata{}, err + } + + return entry.Metadata, nil +} diff --git a/syft/internal/fileresolver/test-fixtures/generate-tar-fixture-from-source-dir.sh b/syft/internal/fileresolver/test-fixtures/generate-tar-fixture-from-source-dir.sh new file mode 100755 index 00000000000..922941d36fb --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/generate-tar-fixture-from-source-dir.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +set -eux + +# $1 —— absolute path to destination file, should end with .tar +# $2 —— absolute path to directory from which to add entries to the archive + +pushd "$2" + tar -cvf "$1" . +popd diff --git a/syft/source/test-fixtures/image-duplicate-path/Dockerfile b/syft/internal/fileresolver/test-fixtures/image-duplicate-path/Dockerfile similarity index 100% rename from syft/source/test-fixtures/image-duplicate-path/Dockerfile rename to syft/internal/fileresolver/test-fixtures/image-duplicate-path/Dockerfile diff --git a/syft/source/test-fixtures/image-duplicate-path/file-1.txt b/syft/internal/fileresolver/test-fixtures/image-duplicate-path/file-1.txt similarity index 100% rename from syft/source/test-fixtures/image-duplicate-path/file-1.txt rename to syft/internal/fileresolver/test-fixtures/image-duplicate-path/file-1.txt diff --git a/syft/source/test-fixtures/image-duplicate-path/file-2.txt b/syft/internal/fileresolver/test-fixtures/image-duplicate-path/file-2.txt similarity index 100% rename from syft/source/test-fixtures/image-duplicate-path/file-2.txt rename to syft/internal/fileresolver/test-fixtures/image-duplicate-path/file-2.txt diff --git a/syft/source/test-fixtures/image-files-deleted/Dockerfile b/syft/internal/fileresolver/test-fixtures/image-files-deleted/Dockerfile similarity index 100% rename from syft/source/test-fixtures/image-files-deleted/Dockerfile rename to syft/internal/fileresolver/test-fixtures/image-files-deleted/Dockerfile diff --git a/syft/source/test-fixtures/image-files-deleted/file-1.txt b/syft/internal/fileresolver/test-fixtures/image-files-deleted/file-1.txt similarity index 100% rename from syft/source/test-fixtures/image-files-deleted/file-1.txt rename to syft/internal/fileresolver/test-fixtures/image-files-deleted/file-1.txt diff --git a/syft/source/test-fixtures/image-files-deleted/file-3.txt b/syft/internal/fileresolver/test-fixtures/image-files-deleted/file-3.txt similarity index 100% rename from syft/source/test-fixtures/image-files-deleted/file-3.txt rename to syft/internal/fileresolver/test-fixtures/image-files-deleted/file-3.txt diff --git a/syft/source/test-fixtures/image-files-deleted/target/file-2.txt b/syft/internal/fileresolver/test-fixtures/image-files-deleted/target/file-2.txt similarity index 100% rename from syft/source/test-fixtures/image-files-deleted/target/file-2.txt rename to syft/internal/fileresolver/test-fixtures/image-files-deleted/target/file-2.txt diff --git a/syft/internal/fileresolver/test-fixtures/image-simple/Dockerfile b/syft/internal/fileresolver/test-fixtures/image-simple/Dockerfile new file mode 100644 index 00000000000..62fb151e497 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/image-simple/Dockerfile @@ -0,0 +1,6 @@ +# Note: changes to this file will result in updating several test values. Consider making a new image fixture instead of editing this one. +FROM scratch +ADD file-1.txt /somefile-1.txt +ADD file-2.txt /somefile-2.txt +# note: adding a directory will behave differently on docker engine v18 vs v19 +ADD target / diff --git a/syft/internal/fileresolver/test-fixtures/image-simple/file-1.txt b/syft/internal/fileresolver/test-fixtures/image-simple/file-1.txt new file mode 100644 index 00000000000..985d3408e98 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/image-simple/file-1.txt @@ -0,0 +1 @@ +this file has contents \ No newline at end of file diff --git a/syft/internal/fileresolver/test-fixtures/image-simple/file-2.txt b/syft/internal/fileresolver/test-fixtures/image-simple/file-2.txt new file mode 100644 index 00000000000..396d08bbc72 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/image-simple/file-2.txt @@ -0,0 +1 @@ +file-2 contents! \ No newline at end of file diff --git a/syft/internal/fileresolver/test-fixtures/image-simple/target/really/nested/file-3.txt b/syft/internal/fileresolver/test-fixtures/image-simple/target/really/nested/file-3.txt new file mode 100644 index 00000000000..f85472c937d --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/image-simple/target/really/nested/file-3.txt @@ -0,0 +1,2 @@ +another file! +with lines... \ No newline at end of file diff --git a/syft/source/test-fixtures/image-symlinks/Dockerfile b/syft/internal/fileresolver/test-fixtures/image-symlinks/Dockerfile similarity index 100% rename from syft/source/test-fixtures/image-symlinks/Dockerfile rename to syft/internal/fileresolver/test-fixtures/image-symlinks/Dockerfile diff --git a/syft/internal/fileresolver/test-fixtures/image-symlinks/file-1.txt b/syft/internal/fileresolver/test-fixtures/image-symlinks/file-1.txt new file mode 100644 index 00000000000..d86db8155c3 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/image-symlinks/file-1.txt @@ -0,0 +1 @@ +file 1! \ No newline at end of file diff --git a/syft/source/test-fixtures/image-symlinks/file-2.txt b/syft/internal/fileresolver/test-fixtures/image-symlinks/file-2.txt similarity index 100% rename from syft/source/test-fixtures/image-symlinks/file-2.txt rename to syft/internal/fileresolver/test-fixtures/image-symlinks/file-2.txt diff --git a/syft/source/test-fixtures/image-symlinks/nested/nested/file-3.txt b/syft/internal/fileresolver/test-fixtures/image-symlinks/nested/nested/file-3.txt similarity index 100% rename from syft/source/test-fixtures/image-symlinks/nested/nested/file-3.txt rename to syft/internal/fileresolver/test-fixtures/image-symlinks/nested/nested/file-3.txt diff --git a/syft/source/test-fixtures/image-symlinks/new-file-2.txt b/syft/internal/fileresolver/test-fixtures/image-symlinks/new-file-2.txt similarity index 100% rename from syft/source/test-fixtures/image-symlinks/new-file-2.txt rename to syft/internal/fileresolver/test-fixtures/image-symlinks/new-file-2.txt diff --git a/syft/source/test-fixtures/image-symlinks/new-file-4.txt b/syft/internal/fileresolver/test-fixtures/image-symlinks/new-file-4.txt similarity index 100% rename from syft/source/test-fixtures/image-symlinks/new-file-4.txt rename to syft/internal/fileresolver/test-fixtures/image-symlinks/new-file-4.txt diff --git a/syft/source/test-fixtures/image-symlinks/parent/file-4.txt b/syft/internal/fileresolver/test-fixtures/image-symlinks/parent/file-4.txt similarity index 100% rename from syft/source/test-fixtures/image-symlinks/parent/file-4.txt rename to syft/internal/fileresolver/test-fixtures/image-symlinks/parent/file-4.txt diff --git a/syft/internal/fileresolver/test-fixtures/path-detected-2/.vimrc b/syft/internal/fileresolver/test-fixtures/path-detected-2/.vimrc new file mode 100644 index 00000000000..7f865a925e7 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/path-detected-2/.vimrc @@ -0,0 +1 @@ +Another .vimrc file \ No newline at end of file diff --git a/syft/source/test-fixtures/symlinks-base/sub/item b/syft/internal/fileresolver/test-fixtures/path-detected-2/empty similarity index 100% rename from syft/source/test-fixtures/symlinks-base/sub/item rename to syft/internal/fileresolver/test-fixtures/path-detected-2/empty diff --git a/syft/internal/fileresolver/test-fixtures/path-detected/.vimrc b/syft/internal/fileresolver/test-fixtures/path-detected/.vimrc new file mode 100644 index 00000000000..93b07e21b93 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/path-detected/.vimrc @@ -0,0 +1 @@ +" A .vimrc file diff --git a/syft/internal/fileresolver/test-fixtures/path-detected/empty b/syft/internal/fileresolver/test-fixtures/path-detected/empty new file mode 100644 index 00000000000..e69de29bb2d diff --git a/syft/source/test-fixtures/symlinked-root/nested/link-root b/syft/internal/fileresolver/test-fixtures/symlinked-root/nested/link-root similarity index 100% rename from syft/source/test-fixtures/symlinked-root/nested/link-root rename to syft/internal/fileresolver/test-fixtures/symlinked-root/nested/link-root diff --git a/syft/source/test-fixtures/symlinked-root/real-root/file1.txt b/syft/internal/fileresolver/test-fixtures/symlinked-root/real-root/file1.txt similarity index 100% rename from syft/source/test-fixtures/symlinked-root/real-root/file1.txt rename to syft/internal/fileresolver/test-fixtures/symlinked-root/real-root/file1.txt diff --git a/syft/source/test-fixtures/symlinked-root/real-root/nested/file2.txt b/syft/internal/fileresolver/test-fixtures/symlinked-root/real-root/nested/file2.txt similarity index 100% rename from syft/source/test-fixtures/symlinked-root/real-root/nested/file2.txt rename to syft/internal/fileresolver/test-fixtures/symlinked-root/real-root/nested/file2.txt diff --git a/syft/source/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt b/syft/internal/fileresolver/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt similarity index 100% rename from syft/source/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt rename to syft/internal/fileresolver/test-fixtures/symlinked-root/real-root/nested/linked-file1.txt diff --git a/syft/source/test-fixtures/symlinks-base/bar b/syft/internal/fileresolver/test-fixtures/symlinks-base/bar similarity index 100% rename from syft/source/test-fixtures/symlinks-base/bar rename to syft/internal/fileresolver/test-fixtures/symlinks-base/bar diff --git a/syft/internal/fileresolver/test-fixtures/symlinks-base/base b/syft/internal/fileresolver/test-fixtures/symlinks-base/base new file mode 100644 index 00000000000..e69de29bb2d diff --git a/syft/source/test-fixtures/symlinks-base/baz b/syft/internal/fileresolver/test-fixtures/symlinks-base/baz similarity index 100% rename from syft/source/test-fixtures/symlinks-base/baz rename to syft/internal/fileresolver/test-fixtures/symlinks-base/baz diff --git a/syft/source/test-fixtures/symlinks-base/chain b/syft/internal/fileresolver/test-fixtures/symlinks-base/chain similarity index 100% rename from syft/source/test-fixtures/symlinks-base/chain rename to syft/internal/fileresolver/test-fixtures/symlinks-base/chain diff --git a/syft/source/test-fixtures/symlinks-base/foo b/syft/internal/fileresolver/test-fixtures/symlinks-base/foo similarity index 100% rename from syft/source/test-fixtures/symlinks-base/foo rename to syft/internal/fileresolver/test-fixtures/symlinks-base/foo diff --git a/syft/internal/fileresolver/test-fixtures/symlinks-base/sub/item b/syft/internal/fileresolver/test-fixtures/symlinks-base/sub/item new file mode 100644 index 00000000000..e69de29bb2d diff --git a/syft/source/test-fixtures/symlinks-base/sub/link b/syft/internal/fileresolver/test-fixtures/symlinks-base/sub/link similarity index 100% rename from syft/source/test-fixtures/symlinks-base/sub/link rename to syft/internal/fileresolver/test-fixtures/symlinks-base/sub/link diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/file-1.txt b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/file-1.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/file-1.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/file-1.txt diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/file-2.txt b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/file-2.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/file-2.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/file-2.txt diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/file-3.txt b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/file-3.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/file-3.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/file-3.txt diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-1 b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-1 similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-1 rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-1 diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-2 b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-2 similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-2 rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-2 diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-dead b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-dead similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-dead rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-dead diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-indirect b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-indirect similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-indirect rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-indirect diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-within b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-within similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/link-within rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/link-within diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/parent-link b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/parent-link similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/parent-link rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/parent-link diff --git a/syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/parent/file-4.txt b/syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/parent/file-4.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-from-image-symlinks-fixture/parent/file-4.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-from-image-symlinks-fixture/parent/file-4.txt diff --git a/syft/source/test-fixtures/symlinks-loop/README.md b/syft/internal/fileresolver/test-fixtures/symlinks-loop/README.md similarity index 100% rename from syft/source/test-fixtures/symlinks-loop/README.md rename to syft/internal/fileresolver/test-fixtures/symlinks-loop/README.md diff --git a/syft/source/test-fixtures/symlinks-loop/block/loop0 b/syft/internal/fileresolver/test-fixtures/symlinks-loop/block/loop0 similarity index 100% rename from syft/source/test-fixtures/symlinks-loop/block/loop0 rename to syft/internal/fileresolver/test-fixtures/symlinks-loop/block/loop0 diff --git a/syft/source/test-fixtures/symlinks-loop/devices/loop0/file.target b/syft/internal/fileresolver/test-fixtures/symlinks-loop/devices/loop0/file.target similarity index 100% rename from syft/source/test-fixtures/symlinks-loop/devices/loop0/file.target rename to syft/internal/fileresolver/test-fixtures/symlinks-loop/devices/loop0/file.target diff --git a/syft/source/test-fixtures/symlinks-loop/devices/loop0/subsystem b/syft/internal/fileresolver/test-fixtures/symlinks-loop/devices/loop0/subsystem similarity index 100% rename from syft/source/test-fixtures/symlinks-loop/devices/loop0/subsystem rename to syft/internal/fileresolver/test-fixtures/symlinks-loop/devices/loop0/subsystem diff --git a/syft/source/test-fixtures/symlinks-multiple-roots/outside/link_to_readme b/syft/internal/fileresolver/test-fixtures/symlinks-multiple-roots/outside/link_to_readme similarity index 100% rename from syft/source/test-fixtures/symlinks-multiple-roots/outside/link_to_readme rename to syft/internal/fileresolver/test-fixtures/symlinks-multiple-roots/outside/link_to_readme diff --git a/syft/source/test-fixtures/symlinks-multiple-roots/root/link_to_link_to_readme b/syft/internal/fileresolver/test-fixtures/symlinks-multiple-roots/root/link_to_link_to_readme similarity index 100% rename from syft/source/test-fixtures/symlinks-multiple-roots/root/link_to_link_to_readme rename to syft/internal/fileresolver/test-fixtures/symlinks-multiple-roots/root/link_to_link_to_readme diff --git a/syft/source/test-fixtures/symlinks-multiple-roots/root/readme b/syft/internal/fileresolver/test-fixtures/symlinks-multiple-roots/root/readme similarity index 100% rename from syft/source/test-fixtures/symlinks-multiple-roots/root/readme rename to syft/internal/fileresolver/test-fixtures/symlinks-multiple-roots/root/readme diff --git a/syft/source/test-fixtures/symlinks-prune-indexing/before-path b/syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/before-path similarity index 100% rename from syft/source/test-fixtures/symlinks-prune-indexing/before-path rename to syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/before-path diff --git a/syft/source/test-fixtures/symlinks-prune-indexing/c-file.txt b/syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/c-file.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-prune-indexing/c-file.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/c-file.txt diff --git a/syft/source/test-fixtures/symlinks-prune-indexing/c-path b/syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/c-path similarity index 100% rename from syft/source/test-fixtures/symlinks-prune-indexing/c-path rename to syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/c-path diff --git a/syft/source/test-fixtures/symlinks-prune-indexing/path/1/2/3/4/dont-index-me-twice.txt b/syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/path/1/2/3/4/dont-index-me-twice.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-prune-indexing/path/1/2/3/4/dont-index-me-twice.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/path/1/2/3/4/dont-index-me-twice.txt diff --git a/syft/source/test-fixtures/symlinks-prune-indexing/path/5/6/7/8/dont-index-me-twice-either.txt b/syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/path/5/6/7/8/dont-index-me-twice-either.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-prune-indexing/path/5/6/7/8/dont-index-me-twice-either.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/path/5/6/7/8/dont-index-me-twice-either.txt diff --git a/syft/source/test-fixtures/symlinks-prune-indexing/path/file.txt b/syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/path/file.txt similarity index 100% rename from syft/source/test-fixtures/symlinks-prune-indexing/path/file.txt rename to syft/internal/fileresolver/test-fixtures/symlinks-prune-indexing/path/file.txt diff --git a/syft/source/test-fixtures/symlinks-simple/link_to_link_to_new_readme b/syft/internal/fileresolver/test-fixtures/symlinks-simple/link_to_link_to_new_readme similarity index 100% rename from syft/source/test-fixtures/symlinks-simple/link_to_link_to_new_readme rename to syft/internal/fileresolver/test-fixtures/symlinks-simple/link_to_link_to_new_readme diff --git a/syft/source/test-fixtures/symlinks-simple/link_to_new_readme b/syft/internal/fileresolver/test-fixtures/symlinks-simple/link_to_new_readme similarity index 100% rename from syft/source/test-fixtures/symlinks-simple/link_to_new_readme rename to syft/internal/fileresolver/test-fixtures/symlinks-simple/link_to_new_readme diff --git a/syft/source/test-fixtures/symlinks-simple/readme b/syft/internal/fileresolver/test-fixtures/symlinks-simple/readme similarity index 100% rename from syft/source/test-fixtures/symlinks-simple/readme rename to syft/internal/fileresolver/test-fixtures/symlinks-simple/readme diff --git a/syft/internal/fileresolver/test-fixtures/system_paths/outside_root/link_target/place b/syft/internal/fileresolver/test-fixtures/system_paths/outside_root/link_target/place new file mode 100644 index 00000000000..476e93d5714 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/system_paths/outside_root/link_target/place @@ -0,0 +1 @@ +good \ No newline at end of file diff --git a/syft/internal/fileresolver/test-fixtures/system_paths/target/dev/place b/syft/internal/fileresolver/test-fixtures/system_paths/target/dev/place new file mode 100644 index 00000000000..44d6628cdc6 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/system_paths/target/dev/place @@ -0,0 +1 @@ +bad \ No newline at end of file diff --git a/syft/internal/fileresolver/test-fixtures/system_paths/target/home/place b/syft/internal/fileresolver/test-fixtures/system_paths/target/home/place new file mode 100644 index 00000000000..476e93d5714 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/system_paths/target/home/place @@ -0,0 +1 @@ +good \ No newline at end of file diff --git a/syft/source/test-fixtures/system_paths/target/link/a-symlink b/syft/internal/fileresolver/test-fixtures/system_paths/target/link/a-symlink similarity index 100% rename from syft/source/test-fixtures/system_paths/target/link/a-symlink rename to syft/internal/fileresolver/test-fixtures/system_paths/target/link/a-symlink diff --git a/syft/internal/fileresolver/test-fixtures/system_paths/target/proc/place b/syft/internal/fileresolver/test-fixtures/system_paths/target/proc/place new file mode 100644 index 00000000000..44d6628cdc6 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/system_paths/target/proc/place @@ -0,0 +1 @@ +bad \ No newline at end of file diff --git a/syft/internal/fileresolver/test-fixtures/system_paths/target/sys/place b/syft/internal/fileresolver/test-fixtures/system_paths/target/sys/place new file mode 100644 index 00000000000..44d6628cdc6 --- /dev/null +++ b/syft/internal/fileresolver/test-fixtures/system_paths/target/sys/place @@ -0,0 +1 @@ +bad \ No newline at end of file diff --git a/syft/source/unindexed_directory_resolver.go b/syft/internal/fileresolver/unindexed_directory.go similarity index 80% rename from syft/source/unindexed_directory_resolver.go rename to syft/internal/fileresolver/unindexed_directory.go index e965fef5c34..ae2300c5a39 100644 --- a/syft/source/unindexed_directory_resolver.go +++ b/syft/internal/fileresolver/unindexed_directory.go @@ -1,4 +1,4 @@ -package source +package fileresolver import ( "fmt" @@ -16,9 +16,13 @@ import ( "golang.org/x/exp/slices" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" ) -type UnindexedDirectoryResolver struct { +var _ file.Resolver = (*UnindexedDirectory)(nil) +var _ file.WritableResolver = (*UnindexedDirectory)(nil) + +type UnindexedDirectory struct { ls afero.Lstater lr afero.LinkReader base string @@ -26,15 +30,15 @@ type UnindexedDirectoryResolver struct { fs afero.Fs } -func NewUnindexedDirectoryResolver(dir string) WritableFileResolver { - return NewUnindexedDirectoryResolverFS(afero.NewOsFs(), dir, "") +func NewFromUnindexedDirectory(dir string) file.WritableResolver { + return NewFromUnindexedDirectoryFS(afero.NewOsFs(), dir, "") } -func NewUnindexedDirectoryResolverRooted(dir string, base string) WritableFileResolver { - return NewUnindexedDirectoryResolverFS(afero.NewOsFs(), dir, base) +func NewFromRootedUnindexedDirectory(dir string, base string) file.WritableResolver { + return NewFromUnindexedDirectoryFS(afero.NewOsFs(), dir, base) } -func NewUnindexedDirectoryResolverFS(fs afero.Fs, dir string, base string) WritableFileResolver { +func NewFromUnindexedDirectoryFS(fs afero.Fs, dir string, base string) file.WritableResolver { ls, ok := fs.(afero.Lstater) if !ok { panic(fmt.Sprintf("unable to get afero.Lstater interface from: %+v", fs)) @@ -62,7 +66,7 @@ func NewUnindexedDirectoryResolverFS(fs afero.Fs, dir string, base string) Writa base = path.Clean(path.Join(wd, base)) } } - return UnindexedDirectoryResolver{ + return UnindexedDirectory{ base: base, dir: dir, fs: fs, @@ -71,7 +75,7 @@ func NewUnindexedDirectoryResolverFS(fs afero.Fs, dir string, base string) Writa } } -func (u UnindexedDirectoryResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { +func (u UnindexedDirectory) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { p := u.absPath(u.scrubInputPath(location.RealPath)) f, err := u.fs.Open(p) if err != nil { @@ -89,29 +93,29 @@ func (u UnindexedDirectoryResolver) FileContentsByLocation(location Location) (i // - full symlink resolution should be performed on all requests // - returns locations for any file or directory -func (u UnindexedDirectoryResolver) HasPath(p string) bool { +func (u UnindexedDirectory) HasPath(p string) bool { locs, err := u.filesByPath(true, true, p) return err == nil && len(locs) > 0 } -func (u UnindexedDirectoryResolver) canLstat(p string) bool { +func (u UnindexedDirectory) canLstat(p string) bool { _, _, err := u.ls.LstatIfPossible(u.absPath(p)) return err == nil } -func (u UnindexedDirectoryResolver) isRegularFile(p string) bool { +func (u UnindexedDirectory) isRegularFile(p string) bool { fi, _, err := u.ls.LstatIfPossible(u.absPath(p)) return err == nil && !fi.IsDir() } -func (u UnindexedDirectoryResolver) scrubInputPath(p string) string { +func (u UnindexedDirectory) scrubInputPath(p string) string { if path.IsAbs(p) { p = p[1:] } return path.Clean(p) } -func (u UnindexedDirectoryResolver) scrubResolutionPath(p string) string { +func (u UnindexedDirectory) scrubResolutionPath(p string) string { if u.base != "" { if path.IsAbs(p) { p = p[1:] @@ -123,7 +127,7 @@ func (u UnindexedDirectoryResolver) scrubResolutionPath(p string) string { return path.Clean(p) } -func (u UnindexedDirectoryResolver) absPath(p string) string { +func (u UnindexedDirectory) absPath(p string) string { if u.base != "" { if path.IsAbs(p) { p = p[1:] @@ -142,11 +146,11 @@ func (u UnindexedDirectoryResolver) absPath(p string) string { // - full symlink resolution should be performed on all requests // - only returns locations to files (NOT directories) -func (u UnindexedDirectoryResolver) FilesByPath(paths ...string) (out []Location, _ error) { +func (u UnindexedDirectory) FilesByPath(paths ...string) (out []file.Location, _ error) { return u.filesByPath(true, false, paths...) } -func (u UnindexedDirectoryResolver) filesByPath(resolveLinks bool, includeDirs bool, paths ...string) (out []Location, _ error) { +func (u UnindexedDirectory) filesByPath(resolveLinks bool, includeDirs bool, paths ...string) (out []file.Location, _ error) { // sort here for stable output sort.Strings(paths) nextPath: @@ -176,11 +180,11 @@ nextPath: // - full symlink resolution should be performed on all requests // - if multiple paths to the same file are found, the best single match should be returned // - only returns locations to files (NOT directories) -func (u UnindexedDirectoryResolver) FilesByGlob(patterns ...string) (out []Location, _ error) { +func (u UnindexedDirectory) FilesByGlob(patterns ...string) (out []file.Location, _ error) { return u.filesByGlob(true, false, patterns...) } -func (u UnindexedDirectoryResolver) filesByGlob(resolveLinks bool, includeDirs bool, patterns ...string) (out []Location, _ error) { +func (u UnindexedDirectory) filesByGlob(resolveLinks bool, includeDirs bool, patterns ...string) (out []file.Location, _ error) { f := unindexedDirectoryResolverFS{ u: u, } @@ -199,13 +203,13 @@ func (u UnindexedDirectoryResolver) filesByGlob(resolveLinks bool, includeDirs b return u.filesByPath(resolveLinks, includeDirs, paths...) } -func (u UnindexedDirectoryResolver) FilesByMIMEType(_ ...string) ([]Location, error) { +func (u UnindexedDirectory) FilesByMIMEType(_ ...string) ([]file.Location, error) { panic("FilesByMIMEType unsupported") } // RelativeFileByPath fetches a single file at the given path relative to the layer squash of the given reference. // This is helpful when attempting to find a file that is in the same layer or lower as another file. -func (u UnindexedDirectoryResolver) RelativeFileByPath(l Location, p string) *Location { +func (u UnindexedDirectory) RelativeFileByPath(l file.Location, p string) *file.Location { p = path.Clean(path.Join(l.RealPath, p)) locs, err := u.filesByPath(true, false, p) if err != nil || len(locs) == 0 { @@ -221,8 +225,8 @@ func (u UnindexedDirectoryResolver) RelativeFileByPath(l Location, p string) *Lo // - NO symlink resolution should be performed on results // - returns locations for any file or directory -func (u UnindexedDirectoryResolver) AllLocations() <-chan Location { - out := make(chan Location) +func (u UnindexedDirectory) AllLocations() <-chan file.Location { + out := make(chan file.Location) go func() { defer close(out) err := afero.Walk(u.fs, u.absPath("."), func(p string, info fs.FileInfo, err error) error { @@ -231,7 +235,7 @@ func (u UnindexedDirectoryResolver) AllLocations() <-chan Location { return nil } p = strings.TrimPrefix(p, "/") - out <- NewLocation(p) + out <- file.NewLocation(p) return nil }) if err != nil { @@ -241,11 +245,11 @@ func (u UnindexedDirectoryResolver) AllLocations() <-chan Location { return out } -func (u UnindexedDirectoryResolver) FileMetadataByLocation(_ Location) (FileMetadata, error) { +func (u UnindexedDirectory) FileMetadataByLocation(_ file.Location) (file.Metadata, error) { panic("FileMetadataByLocation unsupported") } -func (u UnindexedDirectoryResolver) Write(location Location, reader io.Reader) error { +func (u UnindexedDirectory) Write(location file.Location, reader io.Reader) error { filePath := location.RealPath if path.IsAbs(filePath) { filePath = filePath[1:] @@ -254,10 +258,7 @@ func (u UnindexedDirectoryResolver) Write(location Location, reader io.Reader) e return afero.WriteReader(u.fs, absPath, reader) } -var _ FileResolver = (*UnindexedDirectoryResolver)(nil) -var _ WritableFileResolver = (*UnindexedDirectoryResolver)(nil) - -func (u UnindexedDirectoryResolver) newLocation(filePath string, resolveLinks bool) *Location { +func (u UnindexedDirectory) newLocation(filePath string, resolveLinks bool) *file.Location { filePath = path.Clean(filePath) virtualPath := "" @@ -277,12 +278,12 @@ func (u UnindexedDirectoryResolver) newLocation(filePath string, resolveLinks bo } } - l := NewVirtualLocation(realPath, virtualPath) + l := file.NewVirtualLocation(realPath, virtualPath) return &l } //nolint:gocognit -func (u UnindexedDirectoryResolver) resolveLinks(filePath string) []string { +func (u UnindexedDirectory) resolveLinks(filePath string) []string { var visited []string out := []string{} @@ -349,15 +350,15 @@ func (u UnindexedDirectoryResolver) resolveLinks(filePath string) []string { return out } -func (u UnindexedDirectoryResolver) isSymlink(fi os.FileInfo) bool { +func (u UnindexedDirectory) isSymlink(fi os.FileInfo) bool { return fi.Mode().Type()&fs.ModeSymlink == fs.ModeSymlink } // ------------------------- fs.FS ------------------------------ -// unindexedDirectoryResolverFS wraps the UnindexedDirectoryResolver as a fs.FS, fs.ReadDirFS, and fs.StatFS +// unindexedDirectoryResolverFS wraps the UnindexedDirectory as a fs.FS, fs.ReadDirFS, and fs.StatFS type unindexedDirectoryResolverFS struct { - u UnindexedDirectoryResolver + u UnindexedDirectory } // resolve takes a virtual path and returns the resolved absolute or relative path and file info @@ -470,7 +471,7 @@ func (f unindexedDirectoryResolverDirEntry) Info() (fs.FileInfo, error) { var _ fs.DirEntry = (*unindexedDirectoryResolverDirEntry)(nil) type unindexedDirectoryResolverFile struct { - u UnindexedDirectoryResolver + u UnindexedDirectory path string } @@ -493,7 +494,7 @@ func (f unindexedDirectoryResolverFile) Close() error { var _ fs.File = (*unindexedDirectoryResolverFile)(nil) type unindexedDirectoryResolverFileInfo struct { - u UnindexedDirectoryResolver + u UnindexedDirectory name string size int64 mode fs.FileMode @@ -502,7 +503,7 @@ type unindexedDirectoryResolverFileInfo struct { sys any } -func newFsFileInfo(u UnindexedDirectoryResolver, name string, isDir bool, fi os.FileInfo) unindexedDirectoryResolverFileInfo { +func newFsFileInfo(u UnindexedDirectory, name string, isDir bool, fi os.FileInfo) unindexedDirectoryResolverFileInfo { return unindexedDirectoryResolverFileInfo{ u: u, name: name, diff --git a/syft/source/unindexed_directory_resolver_test.go b/syft/internal/fileresolver/unindexed_directory_test.go similarity index 76% rename from syft/source/unindexed_directory_resolver_test.go rename to syft/internal/fileresolver/unindexed_directory_test.go index f6b1586718d..14631fc4cd8 100644 --- a/syft/source/unindexed_directory_resolver_test.go +++ b/syft/internal/fileresolver/unindexed_directory_test.go @@ -1,7 +1,7 @@ //go:build !windows // +build !windows -package source +package fileresolver import ( "io" @@ -14,18 +14,20 @@ import ( "time" "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" "github.com/scylladb/go-set/strset" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/anchore/stereoscope/pkg/file" + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" + "github.com/anchore/syft/syft/file" ) func Test_UnindexedDirectoryResolver_Basic(t *testing.T) { wd, err := os.Getwd() require.NoError(t, err) - r := NewUnindexedDirectoryResolver(path.Join(wd, "test-fixtures")) + r := NewFromUnindexedDirectory(path.Join(wd, "test-fixtures")) locations, err := r.FilesByGlob("image-symlinks/*") require.NoError(t, err) require.Len(t, locations, 5) @@ -55,17 +57,18 @@ func Test_UnindexedDirectoryResolver_FilesByPath_relativeRoot(t *testing.T) { }, }, { - name: "should find a file from a relative path (root above cwd)", + name: "should find a file from a relative path (root above cwd)", + // TODO: refactor me! this test depends on the structure of the source dir not changing, which isn't great relativeRoot: "../", - input: "sbom/sbom.go", + input: "fileresolver/deferred.go", expected: []string{ - "sbom/sbom.go", + "fileresolver/deferred.go", }, }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { - resolver := NewUnindexedDirectoryResolver(c.relativeRoot) + resolver := NewFromUnindexedDirectory(c.relativeRoot) refs, err := resolver.FilesByPath(c.input) require.NoError(t, err) @@ -103,11 +106,12 @@ func Test_UnindexedDirectoryResolver_FilesByPath_absoluteRoot(t *testing.T) { }, }, { - name: "should find a file from a relative path (root above cwd)", + name: "should find a file from a relative path (root above cwd)", + // TODO: refactor me! this test depends on the structure of the source dir not changing, which isn't great relativeRoot: "../", - input: "sbom/sbom.go", + input: "fileresolver/directory.go", expected: []string{ - "sbom/sbom.go", + "fileresolver/directory.go", }, }, } @@ -118,7 +122,7 @@ func Test_UnindexedDirectoryResolver_FilesByPath_absoluteRoot(t *testing.T) { absRoot, err := filepath.Abs(c.relativeRoot) require.NoError(t, err) - resolver := NewUnindexedDirectoryResolver(absRoot) + resolver := NewFromUnindexedDirectory(absRoot) assert.NoError(t, err) refs, err := resolver.FilesByPath(c.input) @@ -179,7 +183,7 @@ func Test_UnindexedDirectoryResolver_FilesByPath(t *testing.T) { } for _, c := range cases { t.Run(c.name, func(t *testing.T) { - resolver := NewUnindexedDirectoryResolver(c.root) + resolver := NewFromUnindexedDirectory(c.root) hasPath := resolver.HasPath(c.input) if !c.forcePositiveHasPath { @@ -226,7 +230,7 @@ func Test_UnindexedDirectoryResolver_MultipleFilesByPath(t *testing.T) { } for _, c := range cases { t.Run(c.name, func(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures") + resolver := NewFromUnindexedDirectory("./test-fixtures") refs, err := resolver.FilesByPath(c.input...) assert.NoError(t, err) @@ -238,7 +242,7 @@ func Test_UnindexedDirectoryResolver_MultipleFilesByPath(t *testing.T) { } func Test_UnindexedDirectoryResolver_FilesByGlobMultiple(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures") + resolver := NewFromUnindexedDirectory("./test-fixtures") refs, err := resolver.FilesByGlob("**/image-symlinks/file*") assert.NoError(t, err) @@ -246,14 +250,14 @@ func Test_UnindexedDirectoryResolver_FilesByGlobMultiple(t *testing.T) { } func Test_UnindexedDirectoryResolver_FilesByGlobRecursive(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/image-symlinks") + resolver := NewFromUnindexedDirectory("./test-fixtures/image-symlinks") refs, err := resolver.FilesByGlob("**/*.txt") assert.NoError(t, err) assert.Len(t, refs, 6) } func Test_UnindexedDirectoryResolver_FilesByGlobSingle(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures") + resolver := NewFromUnindexedDirectory("./test-fixtures") refs, err := resolver.FilesByGlob("**/image-symlinks/*1.txt") assert.NoError(t, err) @@ -279,7 +283,7 @@ func Test_UnindexedDirectoryResolver_FilesByPath_ResolvesSymlinks(t *testing.T) for _, test := range tests { t.Run(test.name, func(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinks-simple") + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinks-simple") refs, err := resolver.FilesByPath(test.fixture) require.NoError(t, err) @@ -301,7 +305,7 @@ func Test_UnindexedDirectoryResolver_FilesByPath_ResolvesSymlinks(t *testing.T) func Test_UnindexedDirectoryResolverDoesNotIgnoreRelativeSystemPaths(t *testing.T) { // let's make certain that "dev/place" is not ignored, since it is not "/dev/place" - resolver := NewUnindexedDirectoryResolver("test-fixtures/system_paths/target") + resolver := NewFromUnindexedDirectory("test-fixtures/system_paths/target") // all paths should be found (non filtering matches a path) locations, err := resolver.FilesByGlob("**/place") @@ -326,7 +330,7 @@ func Test_UnindexedDirectoryResolverDoesNotIgnoreRelativeSystemPaths(t *testing. } func Test_UnindexedDirectoryResover_IndexingNestedSymLinks(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinks-simple") + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinks-simple") // check that we can get the real path locations, err := resolver.FilesByPath("./readme") @@ -374,7 +378,7 @@ func Test_UnindexedDirectoryResover_IndexingNestedSymLinks(t *testing.T) { } func Test_UnindexedDirectoryResover_IndexingNestedSymLinksOutsideOfRoot(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinks-multiple-roots/root") + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinks-multiple-roots/root") // check that we can get the real path locations, err := resolver.FilesByPath("./readme") @@ -391,7 +395,7 @@ func Test_UnindexedDirectoryResover_IndexingNestedSymLinksOutsideOfRoot(t *testi } func Test_UnindexedDirectoryResover_RootViaSymlink(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinked-root/nested/link-root") + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinked-root/nested/link-root") locations, err := resolver.FilesByPath("./file1.txt") require.NoError(t, err) @@ -410,23 +414,23 @@ func Test_UnindexedDirectoryResolver_FileContentsByLocation(t *testing.T) { cwd, err := os.Getwd() require.NoError(t, err) - r := NewUnindexedDirectoryResolver(path.Join(cwd, "test-fixtures/image-simple")) + r := NewFromUnindexedDirectory(path.Join(cwd, "test-fixtures/image-simple")) require.NoError(t, err) tests := []struct { name string - location Location + location file.Location expects string err bool }{ { name: "use file reference for content requests", - location: NewLocation("file-1.txt"), + location: file.NewLocation("file-1.txt"), expects: "this file has contents", }, { name: "error on empty file reference", - location: NewLocationFromDirectory("doesn't matter", file.Reference{}), + location: file.NewLocationFromDirectory("doesn't matter", stereoscopeFile.Reference{}), err: true, }, } @@ -451,7 +455,7 @@ func Test_UnindexedDirectoryResolver_FileContentsByLocation(t *testing.T) { func Test_UnindexedDirectoryResover_SymlinkLoopWithGlobsShouldResolve(t *testing.T) { test := func(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinks-loop") + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinks-loop") locations, err := resolver.FilesByGlob("**/file.target") require.NoError(t, err) @@ -521,7 +525,7 @@ func Test_UnindexedDirectoryResolver_FilesByPath_baseRoot(t *testing.T) { } for _, c := range cases { t.Run(c.name, func(t *testing.T) { - resolver := NewUnindexedDirectoryResolverRooted(c.root, c.root) + resolver := NewFromRootedUnindexedDirectory(c.root, c.root) refs, err := resolver.FilesByPath(c.input) require.NoError(t, err) @@ -539,115 +543,115 @@ func Test_UnindexedDirectoryResolver_FilesByPath_baseRoot(t *testing.T) { func Test_UnindexedDirectoryResolver_resolvesLinks(t *testing.T) { tests := []struct { name string - runner func(FileResolver) []Location - expected []Location + runner func(file.Resolver) []file.Location + expected []file.Location }{ { name: "by glob to links", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files // for that reason we need to place **/ in front (which is not the same for other resolvers) actualLocations, err := resolver.FilesByGlob("**/*ink-*") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocation("file-1.txt", "link-1"), - NewVirtualLocation("file-2.txt", "link-2"), + expected: []file.Location{ + file.NewVirtualLocation("file-1.txt", "link-1"), + file.NewVirtualLocation("file-2.txt", "link-2"), // we already have this real file path via another link, so only one is returned - // NewVirtualLocation("file-2.txt", "link-indirect"), - NewVirtualLocation("file-3.txt", "link-within"), + // file.NewVirtualLocation("file-2.txt", "link-indirect"), + file.NewVirtualLocation("file-3.txt", "link-within"), }, }, { name: "by basename", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/file-2.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // this has two copies in the base image, which overwrites the same location - NewLocation("file-2.txt"), + file.NewLocation("file-2.txt"), }, }, { name: "by basename glob", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/file-?.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewLocation("file-1.txt"), - NewLocation("file-2.txt"), - NewLocation("file-3.txt"), - NewLocation("parent/file-4.txt"), + expected: []file.Location{ + file.NewLocation("file-1.txt"), + file.NewLocation("file-2.txt"), + file.NewLocation("file-3.txt"), + file.NewLocation("parent/file-4.txt"), }, }, { name: "by basename glob to links", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { actualLocations, err := resolver.FilesByGlob("**/link-*") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewVirtualLocationFromDirectory("file-1.txt", "link-1", file.Reference{RealPath: "file-1.txt"}), - NewVirtualLocationFromDirectory("file-2.txt", "link-2", file.Reference{RealPath: "file-2.txt"}), + expected: []file.Location{ + file.NewVirtualLocationFromDirectory("file-1.txt", "link-1", stereoscopeFile.Reference{RealPath: "file-1.txt"}), + file.NewVirtualLocationFromDirectory("file-2.txt", "link-2", stereoscopeFile.Reference{RealPath: "file-2.txt"}), // we already have this real file path via another link, so only one is returned - //NewVirtualLocationFromDirectory("file-2.txt", "link-indirect", file.Reference{RealPath: "file-2.txt"}), - NewVirtualLocationFromDirectory("file-3.txt", "link-within", file.Reference{RealPath: "file-3.txt"}), + //file.NewVirtualLocationFromDirectory("file-2.txt", "link-indirect", stereoscopeFile.Reference{RealPath: "file-2.txt"}), + file.NewVirtualLocationFromDirectory("file-3.txt", "link-within", stereoscopeFile.Reference{RealPath: "file-3.txt"}), }, }, { name: "by extension", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links are searched, but resolve to the real files actualLocations, err := resolver.FilesByGlob("**/*.txt") assert.NoError(t, err) return actualLocations }, - expected: []Location{ - NewLocation("file-1.txt"), - NewLocation("file-2.txt"), - NewLocation("file-3.txt"), - NewLocation("parent/file-4.txt"), + expected: []file.Location{ + file.NewLocation("file-1.txt"), + file.NewLocation("file-2.txt"), + file.NewLocation("file-3.txt"), + file.NewLocation("parent/file-4.txt"), }, }, { name: "by path to degree 1 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // links resolve to the final file actualLocations, err := resolver.FilesByPath("/link-2") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers - NewVirtualLocation("file-2.txt", "link-2"), + file.NewVirtualLocation("file-2.txt", "link-2"), }, }, { name: "by path to degree 2 link", - runner: func(resolver FileResolver) []Location { + runner: func(resolver file.Resolver) []file.Location { // multiple links resolves to the final file actualLocations, err := resolver.FilesByPath("/link-indirect") assert.NoError(t, err) return actualLocations }, - expected: []Location{ + expected: []file.Location{ // we have multiple copies across layers - NewVirtualLocation("file-2.txt", "link-indirect"), + file.NewVirtualLocation("file-2.txt", "link-indirect"), }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinks-from-image-symlinks-fixture") + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinks-from-image-symlinks-fixture") actual := test.runner(resolver) @@ -657,14 +661,14 @@ func Test_UnindexedDirectoryResolver_resolvesLinks(t *testing.T) { } func Test_UnindexedDirectoryResolver_DoNotAddVirtualPathsToTree(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinks-prune-indexing") + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinks-prune-indexing") allLocations := resolver.AllLocations() - var allRealPaths []file.Path + var allRealPaths []stereoscopeFile.Path for l := range allLocations { - allRealPaths = append(allRealPaths, file.Path(l.RealPath)) + allRealPaths = append(allRealPaths, stereoscopeFile.Path(l.RealPath)) } - pathSet := file.NewPathSet(allRealPaths...) + pathSet := stereoscopeFile.NewPathSet(allRealPaths...) assert.False(t, pathSet.Contains("before-path/file.txt"), @@ -678,9 +682,9 @@ func Test_UnindexedDirectoryResolver_DoNotAddVirtualPathsToTree(t *testing.T) { } func Test_UnindexedDirectoryResolver_FilesContents_errorOnDirRequest(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/system_paths") + resolver := NewFromUnindexedDirectory("./test-fixtures/system_paths") - dirLoc := NewLocation("arg/foo") + dirLoc := file.NewLocation("arg/foo") reader, err := resolver.FileContentsByLocation(dirLoc) require.Error(t, err) @@ -688,7 +692,7 @@ func Test_UnindexedDirectoryResolver_FilesContents_errorOnDirRequest(t *testing. } func Test_UnindexedDirectoryResolver_AllLocations(t *testing.T) { - resolver := NewUnindexedDirectoryResolver("./test-fixtures/symlinks-from-image-symlinks-fixture") + resolver := NewFromUnindexedDirectory("./test-fixtures/symlinks-from-image-symlinks-fixture") paths := strset.New() for loc := range resolver.AllLocations() { @@ -724,13 +728,13 @@ func Test_WritableUnindexedDirectoryResolver(t *testing.T) { p := "some/path/file" c := "some contents" - dr := NewUnindexedDirectoryResolver(tmpdir) + dr := NewFromUnindexedDirectory(tmpdir) locations, err := dr.FilesByPath(p) require.NoError(t, err) require.Len(t, locations, 0) - err = dr.Write(NewLocation(p), strings.NewReader(c)) + err = dr.Write(file.NewLocation(p), strings.NewReader(c)) require.NoError(t, err) locations, err = dr.FilesByPath(p) @@ -742,3 +746,37 @@ func Test_WritableUnindexedDirectoryResolver(t *testing.T) { bytes, err := io.ReadAll(reader) require.Equal(t, c, string(bytes)) } + +func testWithTimeout(t *testing.T, timeout time.Duration, test func(*testing.T)) { + done := make(chan bool) + go func() { + test(t) + done <- true + }() + + select { + case <-time.After(timeout): + t.Fatal("test timed out") + case <-done: + } +} + +func compareLocations(t *testing.T, expected, actual []file.Location) { + t.Helper() + ignoreUnexported := cmpopts.IgnoreFields(file.LocationData{}, "ref") + ignoreMetadata := cmpopts.IgnoreFields(file.LocationMetadata{}, "Annotations") + ignoreFS := cmpopts.IgnoreFields(file.Coordinates{}, "FileSystemID") + + sort.Sort(file.Locations(expected)) + sort.Sort(file.Locations(actual)) + + if d := cmp.Diff(expected, actual, + ignoreUnexported, + ignoreFS, + ignoreMetadata, + ); d != "" { + + t.Errorf("unexpected locations (-want +got):\n%s", d) + } + +} diff --git a/syft/linux/identify_release.go b/syft/linux/identify_release.go index 152b53324c6..cf2da477cc3 100644 --- a/syft/linux/identify_release.go +++ b/syft/linux/identify_release.go @@ -11,7 +11,7 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) // returns a distro or nil @@ -54,7 +54,7 @@ var identityFiles = []parseEntry{ } // IdentifyRelease parses distro-specific files to discover and raise linux distribution release details. -func IdentifyRelease(resolver source.FileResolver) *Release { +func IdentifyRelease(resolver file.Resolver) *Release { logger := log.Nested("operation", "identify-release") for _, entry := range identityFiles { locations, err := resolver.FilesByPath(entry.path) diff --git a/syft/pkg/binary_metadata.go b/syft/pkg/binary_metadata.go index de0a0a2d40b..a915acc5296 100644 --- a/syft/pkg/binary_metadata.go +++ b/syft/pkg/binary_metadata.go @@ -1,12 +1,12 @@ package pkg -import "github.com/anchore/syft/syft/source" +import "github.com/anchore/syft/syft/file" type BinaryMetadata struct { Matches []ClassifierMatch `mapstructure:"Matches" json:"matches"` } type ClassifierMatch struct { - Classifier string `mapstructure:"Classifier" json:"classifier"` - Location source.Location `mapstructure:"Location" json:"location"` + Classifier string `mapstructure:"Classifier" json:"classifier"` + Location file.Location `mapstructure:"Location" json:"location"` } diff --git a/syft/pkg/catalog_test.go b/syft/pkg/catalog_test.go index 5271ac262cb..cfde34d5d6e 100644 --- a/syft/pkg/catalog_test.go +++ b/syft/pkg/catalog_test.go @@ -9,7 +9,7 @@ import ( "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/cpe" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) type expectedIndexes struct { @@ -75,8 +75,8 @@ func TestCatalogDeleteRemovesPackages(t *testing.T) { Name: "debian", Version: "1", Type: DebPkg, - Locations: source.NewLocationSet( - source.NewVirtualLocation("/c/path", "/another/path1"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/c/path", "/another/path1"), ), }, { @@ -84,8 +84,8 @@ func TestCatalogDeleteRemovesPackages(t *testing.T) { Name: "debian", Version: "2", Type: DebPkg, - Locations: source.NewLocationSet( - source.NewVirtualLocation("/d/path", "/another/path2"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/d/path", "/another/path2"), ), }, }, @@ -110,8 +110,8 @@ func TestCatalogDeleteRemovesPackages(t *testing.T) { Name: "debian", Version: "1", Type: DebPkg, - Locations: source.NewLocationSet( - source.NewVirtualLocation("/c/path", "/another/path1"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/c/path", "/another/path1"), ), }, { @@ -119,8 +119,8 @@ func TestCatalogDeleteRemovesPackages(t *testing.T) { Name: "debian", Version: "2", Type: DebPkg, - Locations: source.NewLocationSet( - source.NewVirtualLocation("/d/path", "/another/path2"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/d/path", "/another/path2"), ), }, { @@ -128,8 +128,8 @@ func TestCatalogDeleteRemovesPackages(t *testing.T) { Name: "debian", Version: "3", Type: DebPkg, - Locations: source.NewLocationSet( - source.NewVirtualLocation("/e/path", "/another/path3"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/e/path", "/another/path3"), ), }, }, @@ -155,8 +155,8 @@ func TestCatalogDeleteRemovesPackages(t *testing.T) { Name: "debian", Version: "1", Type: DebPkg, - Locations: source.NewLocationSet( - source.NewVirtualLocation("/c/path", "/another/path1"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/c/path", "/another/path1"), ), }, { @@ -164,8 +164,8 @@ func TestCatalogDeleteRemovesPackages(t *testing.T) { Name: "debian", Version: "2", Type: DebPkg, - Locations: source.NewLocationSet( - source.NewVirtualLocation("/d/path", "/another/path2"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/d/path", "/another/path2"), ), }, }, @@ -206,16 +206,16 @@ func TestCatalogAddPopulatesIndex(t *testing.T) { var pkgs = []Package{ { - Locations: source.NewLocationSet( - source.NewVirtualLocation("/a/path", "/another/path"), - source.NewVirtualLocation("/b/path", "/bee/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/a/path", "/another/path"), + file.NewVirtualLocation("/b/path", "/bee/path"), ), Type: RpmPkg, }, { - Locations: source.NewLocationSet( - source.NewVirtualLocation("/c/path", "/another/path"), - source.NewVirtualLocation("/d/path", "/another/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/c/path", "/another/path"), + file.NewVirtualLocation("/d/path", "/another/path"), ), Type: NpmPkg, }, @@ -291,25 +291,25 @@ func assertIndexes(t *testing.T, c *Collection, expectedIndexes expectedIndexes) func TestCatalog_PathIndexDeduplicatesRealVsVirtualPaths(t *testing.T) { p1 := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/b/path", "/another/path"), - source.NewVirtualLocation("/b/path", "/b/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/b/path", "/another/path"), + file.NewVirtualLocation("/b/path", "/b/path"), ), Type: RpmPkg, Name: "Package-1", } p2 := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/b/path", "/b/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/b/path", "/b/path"), ), Type: RpmPkg, Name: "Package-2", } p2Dup := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/b/path", "/another/path"), - source.NewVirtualLocation("/b/path", "/c/path/b/dup"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/b/path", "/another/path"), + file.NewVirtualLocation("/b/path", "/c/path/b/dup"), ), Type: RpmPkg, Name: "Package-2", @@ -361,7 +361,7 @@ func TestCatalog_MergeRecords(t *testing.T) { var tests = []struct { name string pkgs []Package - expectedLocations []source.Location + expectedLocations []file.Location expectedCPECount int }{ { @@ -369,9 +369,9 @@ func TestCatalog_MergeRecords(t *testing.T) { pkgs: []Package{ { CPEs: []cpe.CPE{cpe.Must("cpe:2.3:a:package:1:1:*:*:*:*:*:*:*")}, - Locations: source.NewLocationSet( - source.NewVirtualLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewVirtualLocationFromCoordinates( + file.Coordinates{ RealPath: "/b/path", FileSystemID: "a", }, @@ -382,9 +382,9 @@ func TestCatalog_MergeRecords(t *testing.T) { }, { CPEs: []cpe.CPE{cpe.Must("cpe:2.3:b:package:1:1:*:*:*:*:*:*:*")}, - Locations: source.NewLocationSet( - source.NewVirtualLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewVirtualLocationFromCoordinates( + file.Coordinates{ RealPath: "/b/path", FileSystemID: "b", }, @@ -394,16 +394,16 @@ func TestCatalog_MergeRecords(t *testing.T) { Type: RpmPkg, }, }, - expectedLocations: []source.Location{ - source.NewVirtualLocationFromCoordinates( - source.Coordinates{ + expectedLocations: []file.Location{ + file.NewVirtualLocationFromCoordinates( + file.Coordinates{ RealPath: "/b/path", FileSystemID: "a", }, "/another/path", ), - source.NewVirtualLocationFromCoordinates( - source.Coordinates{ + file.NewVirtualLocationFromCoordinates( + file.Coordinates{ RealPath: "/b/path", FileSystemID: "b", }, diff --git a/syft/pkg/cataloger.go b/syft/pkg/cataloger.go index 28cc57c35dd..634a17e1dc1 100644 --- a/syft/pkg/cataloger.go +++ b/syft/pkg/cataloger.go @@ -2,7 +2,7 @@ package pkg import ( "github.com/anchore/syft/syft/artifact" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) // Cataloger describes behavior for an object to participate in parsing container image or file system @@ -12,5 +12,5 @@ type Cataloger interface { // Name returns a string that uniquely describes a cataloger Name() string // Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source. - Catalog(resolver source.FileResolver) ([]Package, []artifact.Relationship, error) + Catalog(resolver file.Resolver) ([]Package, []artifact.Relationship, error) } diff --git a/syft/pkg/cataloger/alpm/cataloger_test.go b/syft/pkg/cataloger/alpm/cataloger_test.go index 1dedded2eff..0b8a9156ea5 100644 --- a/syft/pkg/cataloger/alpm/cataloger_test.go +++ b/syft/pkg/cataloger/alpm/cataloger_test.go @@ -9,11 +9,10 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestAlpmCataloger(t *testing.T) { - dbLocation := source.NewLocation("var/lib/pacman/local/gmp-6.2.1-2/desc") + dbLocation := file.NewLocation("var/lib/pacman/local/gmp-6.2.1-2/desc") expectedPkgs := []pkg.Package{ { Name: "gmp", @@ -24,7 +23,7 @@ func TestAlpmCataloger(t *testing.T) { pkg.NewLicenseFromLocations("LGPL3", dbLocation), pkg.NewLicenseFromLocations("GPL", dbLocation), ), - Locations: source.NewLocationSet(dbLocation), + Locations: file.NewLocationSet(dbLocation), CPEs: nil, PURL: "", MetadataType: "AlpmMetadata", diff --git a/syft/pkg/cataloger/alpm/package.go b/syft/pkg/cataloger/alpm/package.go index 2c85db47c62..4ce9bd6b6f2 100644 --- a/syft/pkg/cataloger/alpm/package.go +++ b/syft/pkg/cataloger/alpm/package.go @@ -4,18 +4,18 @@ import ( "strings" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(m *parsedData, release *linux.Release, dbLocation source.Location) pkg.Package { +func newPackage(m *parsedData, release *linux.Release, dbLocation file.Location) pkg.Package { licenseCandidates := strings.Split(m.Licenses, "\n") p := pkg.Package{ Name: m.Package, Version: m.Version, - Locations: source.NewLocationSet(dbLocation), + Locations: file.NewLocationSet(dbLocation), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(dbLocation.WithoutAnnotations(), licenseCandidates...)...), Type: pkg.AlpmPkg, PURL: packageURL(m, release), diff --git a/syft/pkg/cataloger/alpm/parse_alpm_db.go b/syft/pkg/cataloger/alpm/parse_alpm_db.go index 987a52c076a..86c6dd3d2fd 100644 --- a/syft/pkg/cataloger/alpm/parse_alpm_db.go +++ b/syft/pkg/cataloger/alpm/parse_alpm_db.go @@ -17,7 +17,6 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseAlpmDB @@ -36,7 +35,7 @@ type parsedData struct { pkg.AlpmMetadata `mapstructure:",squash"` } -func parseAlpmDB(resolver source.FileResolver, env *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseAlpmDB(resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { data, err := parseAlpmDBEntry(reader) if err != nil { return nil, nil, err @@ -117,7 +116,7 @@ func newScanner(reader io.Reader) *bufio.Scanner { return scanner } -func getFileReader(path string, resolver source.FileResolver) (io.Reader, error) { +func getFileReader(path string, resolver file.Resolver) (io.Reader, error) { locs, err := resolver.FilesByPath(path) if err != nil { return nil, err diff --git a/syft/pkg/cataloger/apkdb/package.go b/syft/pkg/cataloger/apkdb/package.go index 4bc59ba170d..8cb75bbc579 100644 --- a/syft/pkg/cataloger/apkdb/package.go +++ b/syft/pkg/cataloger/apkdb/package.go @@ -4,13 +4,13 @@ import ( "strings" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/license" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(d parsedData, release *linux.Release, dbLocation source.Location) pkg.Package { +func newPackage(d parsedData, release *linux.Release, dbLocation file.Location) pkg.Package { // check if license is a valid spdx expression before splitting licenseStrings := []string{d.License} _, err := license.ParseExpression(d.License) @@ -22,7 +22,7 @@ func newPackage(d parsedData, release *linux.Release, dbLocation source.Location p := pkg.Package{ Name: d.Package, Version: d.Version, - Locations: source.NewLocationSet(dbLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(dbLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(dbLocation, licenseStrings...)...), PURL: packageURL(d.ApkMetadata, release), Type: pkg.ApkPkg, diff --git a/syft/pkg/cataloger/apkdb/parse_apk_db.go b/syft/pkg/cataloger/apkdb/parse_apk_db.go index 748ed7d5852..fd4184f87dd 100644 --- a/syft/pkg/cataloger/apkdb/parse_apk_db.go +++ b/syft/pkg/cataloger/apkdb/parse_apk_db.go @@ -16,7 +16,6 @@ import ( "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -35,7 +34,7 @@ type parsedData struct { // information on specific fields, see https://wiki.alpinelinux.org/wiki/Apk_spec. // //nolint:funlen,gocognit -func parseApkDB(resolver source.FileResolver, env *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseApkDB(resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { scanner := bufio.NewScanner(reader) var apks []parsedData @@ -134,7 +133,7 @@ func parseApkDB(resolver source.FileResolver, env *generic.Environment, reader s return pkgs, discoverPackageDependencies(pkgs), nil } -func findReleases(resolver source.FileResolver, dbPath string) []linux.Release { +func findReleases(resolver file.Resolver, dbPath string) []linux.Release { if resolver == nil { return nil } @@ -157,13 +156,13 @@ func findReleases(resolver source.FileResolver, dbPath string) []linux.Release { return nil } - return parseReleasesFromAPKRepository(source.LocationReadCloser{ + return parseReleasesFromAPKRepository(file.LocationReadCloser{ Location: location, ReadCloser: reposReader, }) } -func parseReleasesFromAPKRepository(reader source.LocationReadCloser) []linux.Release { +func parseReleasesFromAPKRepository(reader file.LocationReadCloser) []linux.Release { var releases []linux.Release reposB, err := io.ReadAll(reader) diff --git a/syft/pkg/cataloger/apkdb/parse_apk_db_test.go b/syft/pkg/cataloger/apkdb/parse_apk_db_test.go index ac344631514..865a02c97ee 100644 --- a/syft/pkg/cataloger/apkdb/parse_apk_db_test.go +++ b/syft/pkg/cataloger/apkdb/parse_apk_db_test.go @@ -18,7 +18,6 @@ import ( "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestExtraFileAttributes(t *testing.T) { @@ -680,8 +679,8 @@ func TestSinglePackageDetails(t *testing.T) { for _, test := range tests { t.Run(test.fixture, func(t *testing.T) { - fixtureLocation := source.NewLocation(test.fixture) - test.expected.Locations = source.NewLocationSet(fixtureLocation) + fixtureLocation := file.NewLocation(test.fixture) + test.expected.Locations = file.NewLocationSet(fixtureLocation) licenses := test.expected.Licenses.ToSlice() for i := range licenses { licenses[i].Locations.Add(fixtureLocation) @@ -694,8 +693,8 @@ func TestSinglePackageDetails(t *testing.T) { func TestMultiplePackages(t *testing.T) { fixture := "test-fixtures/multiple" - location := source.NewLocation(fixture) - fixtureLocationSet := source.NewLocationSet(location) + location := file.NewLocation(fixture) + fixtureLocationSet := file.NewLocationSet(location) expectedPkgs := []pkg.Package{ { Name: "libc-utils", @@ -1024,7 +1023,7 @@ func Test_discoverPackageDependencies(t *testing.T) { t.Run(test.name, func(t *testing.T) { pkgs, wantRelationships := test.genFn() gotRelationships := discoverPackageDependencies(pkgs) - d := cmp.Diff(wantRelationships, gotRelationships, cmpopts.IgnoreUnexported(pkg.Package{}, source.LocationSet{}, pkg.LicenseSet{})) + d := cmp.Diff(wantRelationships, gotRelationships, cmpopts.IgnoreUnexported(pkg.Package{}, file.LocationSet{}, pkg.LicenseSet{})) if d != "" { t.Fail() t.Log(d) @@ -1061,8 +1060,8 @@ func TestPackageDbDependenciesByParse(t *testing.T) { require.NoError(t, err) t.Cleanup(func() { require.NoError(t, f.Close()) }) - pkgs, relationships, err := parseApkDB(nil, nil, source.LocationReadCloser{ - Location: source.NewLocation(test.fixture), + pkgs, relationships, err := parseApkDB(nil, nil, file.LocationReadCloser{ + Location: file.NewLocation(test.fixture), ReadCloser: f, }) require.NoError(t, err) @@ -1172,12 +1171,12 @@ func toPackageNames(pkgs []pkg.Package) []string { return names } -func newLocationReadCloser(t *testing.T, path string) source.LocationReadCloser { +func newLocationReadCloser(t *testing.T, path string) file.LocationReadCloser { f, err := os.Open(path) require.NoError(t, err) t.Cleanup(func() { f.Close() }) - return source.NewLocationReadCloser(source.NewLocation(path), f) + return file.NewLocationReadCloser(file.NewLocation(path), f) } func Test_stripVersionSpecifier(t *testing.T) { @@ -1256,8 +1255,8 @@ https://foo.them.org/alpine/v3.14/community`, for _, tt := range tests { t.Run(tt.desc, func(t *testing.T) { reposReader := io.NopCloser(strings.NewReader(tt.repos)) - got := parseReleasesFromAPKRepository(source.LocationReadCloser{ - Location: source.NewLocation("test"), + got := parseReleasesFromAPKRepository(file.LocationReadCloser{ + Location: file.NewLocation("test"), ReadCloser: reposReader, }) assert.Equal(t, tt.want, got) diff --git a/syft/pkg/cataloger/binary/cataloger.go b/syft/pkg/cataloger/binary/cataloger.go index cf58af15ca7..0cf04b729ed 100644 --- a/syft/pkg/cataloger/binary/cataloger.go +++ b/syft/pkg/cataloger/binary/cataloger.go @@ -3,8 +3,8 @@ package binary import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) const catalogerName = "binary-cataloger" @@ -27,7 +27,7 @@ func (c Cataloger) Name() string { // Catalog is given an object to resolve file references and content, this function returns any discovered Packages // after analyzing the catalog source. -func (c Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (c Cataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { var packages []pkg.Package var relationships []artifact.Relationship @@ -68,7 +68,7 @@ func mergePackages(target *pkg.Package, extra *pkg.Package) { target.Metadata = meta } -func catalog(resolver source.FileResolver, cls classifier) (packages []pkg.Package, err error) { +func catalog(resolver file.Resolver, cls classifier) (packages []pkg.Package, err error) { locations, err := resolver.FilesByGlob(cls.FileGlob) if err != nil { return nil, err diff --git a/syft/pkg/cataloger/binary/cataloger_test.go b/syft/pkg/cataloger/binary/cataloger_test.go index 51b0751ef81..d6622423bb5 100644 --- a/syft/pkg/cataloger/binary/cataloger_test.go +++ b/syft/pkg/cataloger/binary/cataloger_test.go @@ -13,6 +13,7 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/stereoscope/pkg/imagetest" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/source" ) @@ -728,12 +729,12 @@ func TestClassifierCataloger_DefaultClassifiers_NegativeCases(t *testing.T) { assert.Equal(t, 0, len(actualResults)) } -func locations(locations ...string) source.LocationSet { - var locs []source.Location +func locations(locations ...string) file.LocationSet { + var locs []file.Location for _, s := range locations { - locs = append(locs, source.NewLocation(s)) + locs = append(locs, file.NewLocation(s)) } - return source.NewLocationSet(locs...) + return file.NewLocationSet(locs...) } // metadata paths are: realPath, virtualPath @@ -757,8 +758,8 @@ func match(classifier string, paths ...string) pkg.ClassifierMatch { } return pkg.ClassifierMatch{ Classifier: classifier, - Location: source.NewVirtualLocationFromCoordinates( - source.Coordinates{ + Location: file.NewVirtualLocationFromCoordinates( + file.Coordinates{ RealPath: realPath, }, virtualPath, @@ -817,10 +818,10 @@ func assertPackagesAreEqual(t *testing.T, expected pkg.Package, p pkg.Package) { if len(failMessages) > 0 { assert.Failf(t, strings.Join(failMessages, "; "), "diff: %s", cmp.Diff(expected, p, - cmp.Transformer("Locations", func(l source.LocationSet) []source.Location { + cmp.Transformer("Locations", func(l file.LocationSet) []file.Location { return l.ToSlice() }), - cmpopts.IgnoreUnexported(pkg.Package{}, source.Location{}), + cmpopts.IgnoreUnexported(pkg.Package{}, file.Location{}), cmpopts.IgnoreFields(pkg.Package{}, "CPEs", "FoundBy", "MetadataType", "Type"), )) } @@ -830,22 +831,22 @@ type panicyResolver struct { searchCalled bool } -func (p *panicyResolver) FilesByExtension(_ ...string) ([]source.Location, error) { +func (p *panicyResolver) FilesByExtension(_ ...string) ([]file.Location, error) { p.searchCalled = true return nil, errors.New("not implemented") } -func (p *panicyResolver) FilesByBasename(_ ...string) ([]source.Location, error) { +func (p *panicyResolver) FilesByBasename(_ ...string) ([]file.Location, error) { p.searchCalled = true return nil, errors.New("not implemented") } -func (p *panicyResolver) FilesByBasenameGlob(_ ...string) ([]source.Location, error) { +func (p *panicyResolver) FilesByBasenameGlob(_ ...string) ([]file.Location, error) { p.searchCalled = true return nil, errors.New("not implemented") } -func (p *panicyResolver) FileContentsByLocation(_ source.Location) (io.ReadCloser, error) { +func (p *panicyResolver) FileContentsByLocation(_ file.Location) (io.ReadCloser, error) { p.searchCalled = true return nil, errors.New("not implemented") } @@ -854,34 +855,34 @@ func (p *panicyResolver) HasPath(_ string) bool { return true } -func (p *panicyResolver) FilesByPath(_ ...string) ([]source.Location, error) { +func (p *panicyResolver) FilesByPath(_ ...string) ([]file.Location, error) { p.searchCalled = true return nil, errors.New("not implemented") } -func (p *panicyResolver) FilesByGlob(_ ...string) ([]source.Location, error) { +func (p *panicyResolver) FilesByGlob(_ ...string) ([]file.Location, error) { p.searchCalled = true return nil, errors.New("not implemented") } -func (p *panicyResolver) FilesByMIMEType(_ ...string) ([]source.Location, error) { +func (p *panicyResolver) FilesByMIMEType(_ ...string) ([]file.Location, error) { p.searchCalled = true return nil, errors.New("not implemented") } -func (p *panicyResolver) RelativeFileByPath(_ source.Location, _ string) *source.Location { +func (p *panicyResolver) RelativeFileByPath(_ file.Location, _ string) *file.Location { return nil } -func (p *panicyResolver) AllLocations() <-chan source.Location { +func (p *panicyResolver) AllLocations() <-chan file.Location { return nil } -func (p *panicyResolver) FileMetadataByLocation(_ source.Location) (source.FileMetadata, error) { - return source.FileMetadata{}, errors.New("not implemented") +func (p *panicyResolver) FileMetadataByLocation(_ file.Location) (file.Metadata, error) { + return file.Metadata{}, errors.New("not implemented") } -var _ source.FileResolver = (*panicyResolver)(nil) +var _ file.Resolver = (*panicyResolver)(nil) func Test_Cataloger_ResilientToErrors(t *testing.T) { c := NewCataloger() diff --git a/syft/pkg/cataloger/binary/classifier.go b/syft/pkg/cataloger/binary/classifier.go index 6ab18985c52..c98399f4ce9 100644 --- a/syft/pkg/cataloger/binary/classifier.go +++ b/syft/pkg/cataloger/binary/classifier.go @@ -15,9 +15,9 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/cpe" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader" - "github.com/anchore/syft/syft/source" ) var emptyPURL = packageurl.PackageURL{} @@ -53,10 +53,10 @@ type classifier struct { } // evidenceMatcher is a function called to catalog Packages that match some sort of evidence -type evidenceMatcher func(resolver source.FileResolver, classifier classifier, location source.Location) ([]pkg.Package, error) +type evidenceMatcher func(resolver file.Resolver, classifier classifier, location file.Location) ([]pkg.Package, error) func evidenceMatchers(matchers ...evidenceMatcher) evidenceMatcher { - return func(resolver source.FileResolver, classifier classifier, location source.Location) ([]pkg.Package, error) { + return func(resolver file.Resolver, classifier classifier, location file.Location) ([]pkg.Package, error) { for _, matcher := range matchers { match, err := matcher(resolver, classifier, location) if err != nil { @@ -72,7 +72,7 @@ func evidenceMatchers(matchers ...evidenceMatcher) evidenceMatcher { func fileNameTemplateVersionMatcher(fileNamePattern string, contentTemplate string) evidenceMatcher { pat := regexp.MustCompile(fileNamePattern) - return func(resolver source.FileResolver, classifier classifier, location source.Location) ([]pkg.Package, error) { + return func(resolver file.Resolver, classifier classifier, location file.Location) ([]pkg.Package, error) { if !pat.MatchString(location.RealPath) { return nil, nil } @@ -118,7 +118,7 @@ func fileNameTemplateVersionMatcher(fileNamePattern string, contentTemplate stri func fileContentsVersionMatcher(pattern string) evidenceMatcher { pat := regexp.MustCompile(pattern) - return func(resolver source.FileResolver, classifier classifier, location source.Location) ([]pkg.Package, error) { + return func(resolver file.Resolver, classifier classifier, location file.Location) ([]pkg.Package, error) { contents, err := getContents(resolver, location) if err != nil { return nil, fmt.Errorf("unable to get read contents for file: %w", err) @@ -138,7 +138,7 @@ func fileContentsVersionMatcher(pattern string) evidenceMatcher { //nolint:gocognit func sharedLibraryLookup(sharedLibraryPattern string, sharedLibraryMatcher evidenceMatcher) evidenceMatcher { pat := regexp.MustCompile(sharedLibraryPattern) - return func(resolver source.FileResolver, classifier classifier, location source.Location) (packages []pkg.Package, _ error) { + return func(resolver file.Resolver, classifier classifier, location file.Location) (packages []pkg.Package, _ error) { libs, err := sharedLibraries(resolver, location) if err != nil { return nil, err @@ -159,7 +159,7 @@ func sharedLibraryLookup(sharedLibraryPattern string, sharedLibraryMatcher evide } for _, p := range pkgs { // set the source binary as the first location - locationSet := source.NewLocationSet(location) + locationSet := file.NewLocationSet(location) locationSet.Add(p.Locations.ToSlice()...) p.Locations = locationSet meta, _ := p.Metadata.(pkg.BinaryMetadata) @@ -187,7 +187,7 @@ func mustPURL(purl string) packageurl.PackageURL { return p } -func getContents(resolver source.FileResolver, location source.Location) ([]byte, error) { +func getContents(resolver file.Resolver, location file.Location) ([]byte, error) { reader, err := resolver.FileContentsByLocation(location) if err != nil { return nil, err @@ -216,7 +216,7 @@ func singleCPE(cpeString string) []cpe.CPE { // sharedLibraries returns a list of all shared libraries found within a binary, currently // supporting: elf, macho, and windows pe -func sharedLibraries(resolver source.FileResolver, location source.Location) ([]string, error) { +func sharedLibraries(resolver file.Resolver, location file.Location) ([]string, error) { contents, err := getContents(resolver, location) if err != nil { return nil, err diff --git a/syft/pkg/cataloger/binary/classifier_test.go b/syft/pkg/cataloger/binary/classifier_test.go index 82260a0e633..fbf88c3b1a8 100644 --- a/syft/pkg/cataloger/binary/classifier_test.go +++ b/syft/pkg/cataloger/binary/classifier_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/cpe" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) func Test_ClassifierCPEs(t *testing.T) { @@ -63,12 +63,12 @@ func Test_ClassifierCPEs(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - resolver := source.NewMockResolverForPaths(test.fixture) - locations, err := resolver.FilesByPath(test.fixture) + resolver := file.NewMockResolverForPaths(test.fixture) + ls, err := resolver.FilesByPath(test.fixture) require.NoError(t, err) - require.Len(t, locations, 1) + require.Len(t, ls, 1) - pkgs, err := test.classifier.EvidenceMatcher(resolver, test.classifier, locations[0]) + pkgs, err := test.classifier.EvidenceMatcher(resolver, test.classifier, ls[0]) require.NoError(t, err) require.Len(t, pkgs, 1) diff --git a/syft/pkg/cataloger/binary/package.go b/syft/pkg/cataloger/binary/package.go index 7c1fb7abc60..a677b02a623 100644 --- a/syft/pkg/cataloger/binary/package.go +++ b/syft/pkg/cataloger/binary/package.go @@ -4,11 +4,11 @@ import ( "reflect" "github.com/anchore/syft/syft/cpe" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(classifier classifier, location source.Location, matchMetadata map[string]string) *pkg.Package { +func newPackage(classifier classifier, location file.Location, matchMetadata map[string]string) *pkg.Package { version, ok := matchMetadata["version"] if !ok { return nil @@ -26,7 +26,7 @@ func newPackage(classifier classifier, location source.Location, matchMetadata m p := pkg.Package{ Name: classifier.Package, Version: version, - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), ), Type: pkg.BinaryPkg, diff --git a/syft/pkg/cataloger/catalog.go b/syft/pkg/cataloger/catalog.go index 793efab369b..f982223e1a4 100644 --- a/syft/pkg/cataloger/catalog.go +++ b/syft/pkg/cataloger/catalog.go @@ -14,10 +14,10 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/event" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/common/cpe" - "github.com/anchore/syft/syft/source" ) // Monitor provides progress-related data for observing the progress of a Catalog() call (published on the event bus). @@ -50,7 +50,7 @@ func newMonitor() (*progress.Manual, *progress.Manual) { return &filesProcessed, &packagesDiscovered } -func runCataloger(cataloger pkg.Cataloger, resolver source.FileResolver) (catalogerResult *catalogResult, err error) { +func runCataloger(cataloger pkg.Cataloger, resolver file.Resolver) (catalogerResult *catalogResult, err error) { // handle individual cataloger panics defer func() { if e := recover(); e != nil { @@ -105,7 +105,7 @@ func runCataloger(cataloger pkg.Cataloger, resolver source.FileResolver) (catalo // request. // //nolint:funlen -func Catalog(resolver source.FileResolver, _ *linux.Release, parallelism int, catalogers ...pkg.Cataloger) (*pkg.Collection, []artifact.Relationship, error) { +func Catalog(resolver file.Resolver, _ *linux.Release, parallelism int, catalogers ...pkg.Cataloger) (*pkg.Collection, []artifact.Relationship, error) { catalog := pkg.NewCollection() var allRelationships []artifact.Relationship @@ -182,13 +182,13 @@ func Catalog(resolver source.FileResolver, _ *linux.Release, parallelism int, ca return catalog, allRelationships, errs } -func packageFileOwnershipRelationships(p pkg.Package, resolver source.FilePathResolver) ([]artifact.Relationship, error) { +func packageFileOwnershipRelationships(p pkg.Package, resolver file.PathResolver) ([]artifact.Relationship, error) { fileOwner, ok := p.Metadata.(pkg.FileOwner) if !ok { return nil, nil } - locations := map[artifact.ID]source.Location{} + locations := map[artifact.ID]file.Location{} for _, path := range fileOwner.OwnedFiles() { pathRefs, err := resolver.FilesByPath(path) diff --git a/syft/pkg/cataloger/catalog_test.go b/syft/pkg/cataloger/catalog_test.go index 9de59f36d74..950ec133aba 100644 --- a/syft/pkg/cataloger/catalog_test.go +++ b/syft/pkg/cataloger/catalog_test.go @@ -6,14 +6,14 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func Test_CatalogPanicHandling(t *testing.T) { catalog, relationships, err := Catalog( - source.NewMockResolverForPaths(), + file.NewMockResolverForPaths(), &linux.Release{}, 1, panickingCataloger{}, @@ -32,7 +32,7 @@ func (p panickingCataloger) Name() string { return "panicking-cataloger" } -func (p panickingCataloger) Catalog(_ source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (p panickingCataloger) Catalog(_ file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { panic("something bad happened") } @@ -44,7 +44,7 @@ func (p returningCataloger) Name() string { return "returning-cataloger" } -func (p returningCataloger) Catalog(_ source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (p returningCataloger) Catalog(_ file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { pkg1 := pkg.Package{ Name: "package-1", Version: "1.0", diff --git a/syft/pkg/cataloger/cataloger_test.go b/syft/pkg/cataloger/cataloger_test.go index 35cde7797d0..4f4b6ce83fd 100644 --- a/syft/pkg/cataloger/cataloger_test.go +++ b/syft/pkg/cataloger/cataloger_test.go @@ -6,8 +6,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) var _ pkg.Cataloger = (*dummy)(nil) @@ -20,7 +20,7 @@ func (d dummy) Name() string { return d.name } -func (d dummy) Catalog(_ source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (d dummy) Catalog(_ file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { panic("not implemented") } diff --git a/syft/pkg/cataloger/cpp/package.go b/syft/pkg/cataloger/cpp/package.go index ba54add772f..dbbdd0b90c8 100644 --- a/syft/pkg/cataloger/cpp/package.go +++ b/syft/pkg/cataloger/cpp/package.go @@ -4,11 +4,11 @@ import ( "strings" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newConanfilePackage(m pkg.ConanMetadata, locations ...source.Location) *pkg.Package { +func newConanfilePackage(m pkg.ConanMetadata, locations ...file.Location) *pkg.Package { fields := strings.Split(strings.TrimSpace(m.Ref), "/") if len(fields) < 2 { return nil @@ -23,7 +23,7 @@ func newConanfilePackage(m pkg.ConanMetadata, locations ...source.Location) *pkg p := pkg.Package{ Name: pkgName, Version: pkgVersion, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(pkgName, pkgVersion), Language: pkg.CPP, Type: pkg.ConanPkg, @@ -36,7 +36,7 @@ func newConanfilePackage(m pkg.ConanMetadata, locations ...source.Location) *pkg return &p } -func newConanlockPackage(m pkg.ConanLockMetadata, locations ...source.Location) *pkg.Package { +func newConanlockPackage(m pkg.ConanLockMetadata, locations ...file.Location) *pkg.Package { fields := strings.Split(strings.Split(m.Ref, "@")[0], "/") if len(fields) < 2 { return nil @@ -51,7 +51,7 @@ func newConanlockPackage(m pkg.ConanLockMetadata, locations ...source.Location) p := pkg.Package{ Name: pkgName, Version: pkgVersion, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(pkgName, pkgVersion), Language: pkg.CPP, Type: pkg.ConanPkg, diff --git a/syft/pkg/cataloger/cpp/parse_conanfile.go b/syft/pkg/cataloger/cpp/parse_conanfile.go index fdaf08026be..f9ae172f37b 100644 --- a/syft/pkg/cataloger/cpp/parse_conanfile.go +++ b/syft/pkg/cataloger/cpp/parse_conanfile.go @@ -8,9 +8,9 @@ import ( "strings" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseConanfile @@ -20,7 +20,7 @@ type Conanfile struct { } // parseConanfile is a parser function for conanfile.txt contents, returning all packages discovered. -func parseConanfile(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseConanfile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { r := bufio.NewReader(reader) inRequirements := false var pkgs []pkg.Package diff --git a/syft/pkg/cataloger/cpp/parse_conanfile_test.go b/syft/pkg/cataloger/cpp/parse_conanfile_test.go index edb9ff30a41..bca49223a5d 100644 --- a/syft/pkg/cataloger/cpp/parse_conanfile_test.go +++ b/syft/pkg/cataloger/cpp/parse_conanfile_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseConanfile(t *testing.T) { fixture := "test-fixtures/conanfile.txt" - fixtureLocationSet := source.NewLocationSet(source.NewLocation(fixture)) + fixtureLocationSet := file.NewLocationSet(file.NewLocation(fixture)) expected := []pkg.Package{ { Name: "catch2", diff --git a/syft/pkg/cataloger/cpp/parse_conanlock.go b/syft/pkg/cataloger/cpp/parse_conanlock.go index b3bcf31d53e..511000ea16a 100644 --- a/syft/pkg/cataloger/cpp/parse_conanlock.go +++ b/syft/pkg/cataloger/cpp/parse_conanlock.go @@ -5,9 +5,9 @@ import ( "strings" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseConanlock @@ -30,7 +30,7 @@ type conanLock struct { } // parseConanlock is a parser function for conan.lock contents, returning all packages discovered. -func parseConanlock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseConanlock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package var cl conanLock if err := json.NewDecoder(reader).Decode(&cl); err != nil { diff --git a/syft/pkg/cataloger/cpp/parse_conanlock_test.go b/syft/pkg/cataloger/cpp/parse_conanlock_test.go index c5a57fa795e..b699081dee5 100644 --- a/syft/pkg/cataloger/cpp/parse_conanlock_test.go +++ b/syft/pkg/cataloger/cpp/parse_conanlock_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseConanlock(t *testing.T) { @@ -16,7 +16,7 @@ func TestParseConanlock(t *testing.T) { Name: "zlib", Version: "1.2.12", PURL: "pkg:conan/zlib@1.2.12", - Locations: source.NewLocationSet(source.NewLocation(fixture)), + Locations: file.NewLocationSet(file.NewLocation(fixture)), Language: pkg.CPP, Type: pkg.ConanPkg, MetadataType: pkg.ConanLockMetadataType, diff --git a/syft/pkg/cataloger/dart/package.go b/syft/pkg/cataloger/dart/package.go index 1f78045536a..f01d80f602c 100644 --- a/syft/pkg/cataloger/dart/package.go +++ b/syft/pkg/cataloger/dart/package.go @@ -2,11 +2,11 @@ package dart import ( "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPubspecLockPackage(name string, raw pubspecLockPackage, locations ...source.Location) pkg.Package { +func newPubspecLockPackage(name string, raw pubspecLockPackage, locations ...file.Location) pkg.Package { metadata := pkg.DartPubMetadata{ Name: name, Version: raw.Version, @@ -17,7 +17,7 @@ func newPubspecLockPackage(name string, raw pubspecLockPackage, locations ...sou p := pkg.Package{ Name: name, Version: raw.Version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(metadata), Language: pkg.Dart, Type: pkg.DartPubPkg, diff --git a/syft/pkg/cataloger/dart/parse_pubspec_lock.go b/syft/pkg/cataloger/dart/parse_pubspec_lock.go index bde8caf663d..3493f8d1df8 100644 --- a/syft/pkg/cataloger/dart/parse_pubspec_lock.go +++ b/syft/pkg/cataloger/dart/parse_pubspec_lock.go @@ -9,9 +9,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parsePubspecLock @@ -38,7 +38,7 @@ type pubspecLockDescription struct { ResolvedRef string `yaml:"resolved-ref" mapstructure:"resolved-ref"` } -func parsePubspecLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePubspecLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package dec := yaml.NewDecoder(reader) diff --git a/syft/pkg/cataloger/dart/parse_pubspec_lock_test.go b/syft/pkg/cataloger/dart/parse_pubspec_lock_test.go index bbef7e0492c..a5a972e80eb 100644 --- a/syft/pkg/cataloger/dart/parse_pubspec_lock_test.go +++ b/syft/pkg/cataloger/dart/parse_pubspec_lock_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParsePubspecLock(t *testing.T) { fixture := "test-fixtures/pubspec.lock" - fixtureLocationSet := source.NewLocationSet(source.NewLocation(fixture)) + fixtureLocationSet := file.NewLocationSet(file.NewLocation(fixture)) expected := []pkg.Package{ { Name: "ale", diff --git a/syft/pkg/cataloger/deb/cataloger_test.go b/syft/pkg/cataloger/deb/cataloger_test.go index ab3415d5402..64a3c5f8768 100644 --- a/syft/pkg/cataloger/deb/cataloger_test.go +++ b/syft/pkg/cataloger/deb/cataloger_test.go @@ -6,11 +6,10 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestDpkgCataloger(t *testing.T) { - licenseLocation := source.NewVirtualLocation("/usr/share/doc/libpam-runtime/copyright", "/usr/share/doc/libpam-runtime/copyright") + licenseLocation := file.NewVirtualLocation("/usr/share/doc/libpam-runtime/copyright", "/usr/share/doc/libpam-runtime/copyright") expected := []pkg.Package{ { Name: "libpam-runtime", @@ -21,11 +20,11 @@ func TestDpkgCataloger(t *testing.T) { pkg.NewLicenseFromLocations("GPL-2", licenseLocation), pkg.NewLicenseFromLocations("LGPL-2.1", licenseLocation), ), - Locations: source.NewLocationSet( - source.NewVirtualLocation("/var/lib/dpkg/status", "/var/lib/dpkg/status"), - source.NewVirtualLocation("/var/lib/dpkg/info/libpam-runtime.md5sums", "/var/lib/dpkg/info/libpam-runtime.md5sums"), - source.NewVirtualLocation("/var/lib/dpkg/info/libpam-runtime.conffiles", "/var/lib/dpkg/info/libpam-runtime.conffiles"), - source.NewVirtualLocation("/usr/share/doc/libpam-runtime/copyright", "/usr/share/doc/libpam-runtime/copyright"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/var/lib/dpkg/status", "/var/lib/dpkg/status"), + file.NewVirtualLocation("/var/lib/dpkg/info/libpam-runtime.md5sums", "/var/lib/dpkg/info/libpam-runtime.md5sums"), + file.NewVirtualLocation("/var/lib/dpkg/info/libpam-runtime.conffiles", "/var/lib/dpkg/info/libpam-runtime.conffiles"), + file.NewVirtualLocation("/usr/share/doc/libpam-runtime/copyright", "/usr/share/doc/libpam-runtime/copyright"), ), Type: pkg.DebPkg, MetadataType: pkg.DpkgMetadataType, diff --git a/syft/pkg/cataloger/deb/package.go b/syft/pkg/cataloger/deb/package.go index 1685051c959..b37d8f46b7c 100644 --- a/syft/pkg/cataloger/deb/package.go +++ b/syft/pkg/cataloger/deb/package.go @@ -10,9 +10,9 @@ import ( "github.com/anchore/packageurl-go" "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) const ( @@ -21,14 +21,14 @@ const ( docsPath = "/usr/share/doc" ) -func newDpkgPackage(d pkg.DpkgMetadata, dbLocation source.Location, resolver source.FileResolver, release *linux.Release) pkg.Package { +func newDpkgPackage(d pkg.DpkgMetadata, dbLocation file.Location, resolver file.Resolver, release *linux.Release) pkg.Package { // TODO: separate pr to license refactor, but explore extracting dpkg-specific license parsing into a separate function licenses := make([]pkg.License, 0) p := pkg.Package{ Name: d.Package, Version: d.Version, Licenses: pkg.NewLicenseSet(licenses...), - Locations: source.NewLocationSet(dbLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(dbLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), PURL: packageURL(d, release), Type: pkg.DebPkg, MetadataType: pkg.DpkgMetadataType, @@ -83,7 +83,7 @@ func packageURL(m pkg.DpkgMetadata, distro *linux.Release) string { ).ToString() } -func addLicenses(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) { +func addLicenses(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) { metadata, ok := p.Metadata.(pkg.DpkgMetadata) if !ok { log.WithFields("package", p).Warn("unable to extract DPKG metadata to add licenses") @@ -105,7 +105,7 @@ func addLicenses(resolver source.FileResolver, dbLocation source.Location, p *pk } } -func mergeFileListing(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) { +func mergeFileListing(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) { metadata, ok := p.Metadata.(pkg.DpkgMetadata) if !ok { log.WithFields("package", p).Warn("unable to extract DPKG metadata to file listing") @@ -137,10 +137,10 @@ loopNewFiles: p.Locations.Add(infoLocations...) } -func getAdditionalFileListing(resolver source.FileResolver, dbLocation source.Location, m pkg.DpkgMetadata) ([]pkg.DpkgFileRecord, []source.Location) { +func getAdditionalFileListing(resolver file.Resolver, dbLocation file.Location, m pkg.DpkgMetadata) ([]pkg.DpkgFileRecord, []file.Location) { // ensure the default value for a collection is never nil since this may be shown as JSON var files = make([]pkg.DpkgFileRecord, 0) - var locations []source.Location + var locations []file.Location md5Reader, md5Location := fetchMd5Contents(resolver, dbLocation, m) @@ -168,7 +168,7 @@ func getAdditionalFileListing(resolver source.FileResolver, dbLocation source.Lo } //nolint:dupl -func fetchMd5Contents(resolver source.FileResolver, dbLocation source.Location, m pkg.DpkgMetadata) (io.ReadCloser, *source.Location) { +func fetchMd5Contents(resolver file.Resolver, dbLocation file.Location, m pkg.DpkgMetadata) (io.ReadCloser, *file.Location) { var md5Reader io.ReadCloser var err error @@ -204,7 +204,7 @@ func fetchMd5Contents(resolver source.FileResolver, dbLocation source.Location, } //nolint:dupl -func fetchConffileContents(resolver source.FileResolver, dbLocation source.Location, m pkg.DpkgMetadata) (io.ReadCloser, *source.Location) { +func fetchConffileContents(resolver file.Resolver, dbLocation file.Location, m pkg.DpkgMetadata) (io.ReadCloser, *file.Location) { var reader io.ReadCloser var err error @@ -239,7 +239,7 @@ func fetchConffileContents(resolver source.FileResolver, dbLocation source.Locat return reader, &l } -func fetchCopyrightContents(resolver source.FileResolver, dbLocation source.Location, m pkg.DpkgMetadata) (io.ReadCloser, *source.Location) { +func fetchCopyrightContents(resolver file.Resolver, dbLocation file.Location, m pkg.DpkgMetadata) (io.ReadCloser, *file.Location) { if resolver == nil { return nil, nil } diff --git a/syft/pkg/cataloger/deb/parse_dpkg_db.go b/syft/pkg/cataloger/deb/parse_dpkg_db.go index cd4c1ff535e..0a7dccb2d30 100644 --- a/syft/pkg/cataloger/deb/parse_dpkg_db.go +++ b/syft/pkg/cataloger/deb/parse_dpkg_db.go @@ -14,9 +14,9 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var ( @@ -24,7 +24,7 @@ var ( sourceRegexp = regexp.MustCompile(`(?P\S+)( \((?P.*)\))?`) ) -func parseDpkgDB(resolver source.FileResolver, env *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseDpkgDB(resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { metadata, err := parseDpkgStatus(reader) if err != nil { return nil, nil, fmt.Errorf("unable to catalog dpkg DB=%q: %w", reader.RealPath, err) diff --git a/syft/pkg/cataloger/deb/parse_dpkg_db_test.go b/syft/pkg/cataloger/deb/parse_dpkg_db_test.go index fc4e51633ad..0a2c58bd895 100644 --- a/syft/pkg/cataloger/deb/parse_dpkg_db_test.go +++ b/syft/pkg/cataloger/deb/parse_dpkg_db_test.go @@ -15,7 +15,6 @@ import ( "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func Test_parseDpkgStatus(t *testing.T) { @@ -308,7 +307,7 @@ Installed-Size: 10kib Type: "deb", PURL: "pkg:deb/debian/apt?distro=debian-10", Licenses: pkg.NewLicenseSet(), - Locations: source.NewLocationSet(source.NewLocation("place")), + Locations: file.NewLocationSet(file.NewLocation("place")), MetadataType: "DpkgMetadata", Metadata: pkg.DpkgMetadata{ Package: "apt", diff --git a/syft/pkg/cataloger/dotnet/package.go b/syft/pkg/cataloger/dotnet/package.go index 15ef7b71071..c8cb261a6fd 100644 --- a/syft/pkg/cataloger/dotnet/package.go +++ b/syft/pkg/cataloger/dotnet/package.go @@ -4,11 +4,11 @@ import ( "strings" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newDotnetDepsPackage(nameVersion string, lib dotnetDepsLibrary, locations ...source.Location) *pkg.Package { +func newDotnetDepsPackage(nameVersion string, lib dotnetDepsLibrary, locations ...file.Location) *pkg.Package { if lib.Type != "package" { return nil } @@ -28,7 +28,7 @@ func newDotnetDepsPackage(nameVersion string, lib dotnetDepsLibrary, locations . p := &pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(m), Language: pkg.Dotnet, Type: pkg.DotnetPkg, diff --git a/syft/pkg/cataloger/dotnet/parse_dotnet_deps.go b/syft/pkg/cataloger/dotnet/parse_dotnet_deps.go index 0e322d3db25..2c7e1cf0bf9 100644 --- a/syft/pkg/cataloger/dotnet/parse_dotnet_deps.go +++ b/syft/pkg/cataloger/dotnet/parse_dotnet_deps.go @@ -6,9 +6,9 @@ import ( "sort" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseDotnetDeps @@ -24,7 +24,7 @@ type dotnetDepsLibrary struct { HashPath string `json:"hashPath"` } -func parseDotnetDeps(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseDotnetDeps(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package dec := json.NewDecoder(reader) diff --git a/syft/pkg/cataloger/dotnet/parse_dotnet_deps_test.go b/syft/pkg/cataloger/dotnet/parse_dotnet_deps_test.go index 0065f110f6b..b8535374472 100644 --- a/syft/pkg/cataloger/dotnet/parse_dotnet_deps_test.go +++ b/syft/pkg/cataloger/dotnet/parse_dotnet_deps_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseDotnetDeps(t *testing.T) { fixture := "test-fixtures/TestLibrary.deps.json" - fixtureLocationSet := source.NewLocationSet(source.NewLocation(fixture)) + fixtureLocationSet := file.NewLocationSet(file.NewLocation(fixture)) expected := []pkg.Package{ { Name: "AWSSDK.Core", diff --git a/syft/pkg/cataloger/elixir/package.go b/syft/pkg/cataloger/elixir/package.go index fc1ca514736..85dcd1f4253 100644 --- a/syft/pkg/cataloger/elixir/package.go +++ b/syft/pkg/cataloger/elixir/package.go @@ -2,16 +2,16 @@ package elixir import ( "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(d pkg.MixLockMetadata, locations ...source.Location) pkg.Package { +func newPackage(d pkg.MixLockMetadata, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: d.Name, Version: d.Version, Language: pkg.Elixir, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(d), Type: pkg.HexPkg, MetadataType: pkg.MixLockMetadataType, diff --git a/syft/pkg/cataloger/elixir/parse_mix_lock.go b/syft/pkg/cataloger/elixir/parse_mix_lock.go index 6de1fc8f703..46b4f4aa36a 100644 --- a/syft/pkg/cataloger/elixir/parse_mix_lock.go +++ b/syft/pkg/cataloger/elixir/parse_mix_lock.go @@ -9,9 +9,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -20,7 +20,7 @@ var _ generic.Parser = parseMixLock var mixLockDelimiter = regexp.MustCompile(`[%{}\n" ,:]+`) // parseMixLock parses a mix.lock and returns the discovered Elixir packages. -func parseMixLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseMixLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { r := bufio.NewReader(reader) var packages []pkg.Package diff --git a/syft/pkg/cataloger/elixir/parse_mix_lock_test.go b/syft/pkg/cataloger/elixir/parse_mix_lock_test.go index 2f5de43d4e1..4b01f04cd1e 100644 --- a/syft/pkg/cataloger/elixir/parse_mix_lock_test.go +++ b/syft/pkg/cataloger/elixir/parse_mix_lock_test.go @@ -4,13 +4,13 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseMixLock(t *testing.T) { - locations := source.NewLocationSet(source.NewLocation("test-fixtures/mix.lock")) + locations := file.NewLocationSet(file.NewLocation("test-fixtures/mix.lock")) expected := []pkg.Package{ { Name: "castore", diff --git a/syft/pkg/cataloger/erlang/package.go b/syft/pkg/cataloger/erlang/package.go index 2eb89053819..5fa28e59503 100644 --- a/syft/pkg/cataloger/erlang/package.go +++ b/syft/pkg/cataloger/erlang/package.go @@ -2,16 +2,16 @@ package erlang import ( "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(d pkg.RebarLockMetadata, locations ...source.Location) pkg.Package { +func newPackage(d pkg.RebarLockMetadata, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: d.Name, Version: d.Version, Language: pkg.Erlang, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(d), Type: pkg.HexPkg, MetadataType: pkg.RebarLockMetadataType, diff --git a/syft/pkg/cataloger/erlang/parse_rebar_lock.go b/syft/pkg/cataloger/erlang/parse_rebar_lock.go index 547a4d3ec4f..a2066f2cad5 100644 --- a/syft/pkg/cataloger/erlang/parse_rebar_lock.go +++ b/syft/pkg/cataloger/erlang/parse_rebar_lock.go @@ -3,15 +3,15 @@ package erlang import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // parseRebarLock parses a rebar.lock and returns the discovered Elixir packages. // //nolint:funlen -func parseRebarLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseRebarLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { doc, err := parseErlang(reader) if err != nil { return nil, nil, err diff --git a/syft/pkg/cataloger/erlang/parse_rebar_lock_test.go b/syft/pkg/cataloger/erlang/parse_rebar_lock_test.go index b1293143277..dc4ee9104a7 100644 --- a/syft/pkg/cataloger/erlang/parse_rebar_lock_test.go +++ b/syft/pkg/cataloger/erlang/parse_rebar_lock_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseRebarLock(t *testing.T) { @@ -263,7 +263,7 @@ func TestParseRebarLock(t *testing.T) { var expectedRelationships []artifact.Relationship for idx := range test.expected { - test.expected[idx].Locations = source.NewLocationSet(source.NewLocation(test.fixture)) + test.expected[idx].Locations = file.NewLocationSet(file.NewLocation(test.fixture)) } pkgtest.TestFileParser(t, test.fixture, parseRebarLock, test.expected, expectedRelationships) diff --git a/syft/pkg/cataloger/generic/cataloger.go b/syft/pkg/cataloger/generic/cataloger.go index d2069ffff52..b898133f74d 100644 --- a/syft/pkg/cataloger/generic/cataloger.go +++ b/syft/pkg/cataloger/generic/cataloger.go @@ -4,15 +4,15 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -type processor func(resolver source.FileResolver, env Environment) []request +type processor func(resolver file.Resolver, env Environment) []request type request struct { - source.Location + file.Location Parser } @@ -25,7 +25,7 @@ type Cataloger struct { func (c *Cataloger) WithParserByGlobs(parser Parser, globs ...string) *Cataloger { c.processor = append(c.processor, - func(resolver source.FileResolver, env Environment) []request { + func(resolver file.Resolver, env Environment) []request { var requests []request for _, g := range globs { log.WithFields("glob", g).Trace("searching for paths matching glob") @@ -45,7 +45,7 @@ func (c *Cataloger) WithParserByGlobs(parser Parser, globs ...string) *Cataloger func (c *Cataloger) WithParserByMimeTypes(parser Parser, types ...string) *Cataloger { c.processor = append(c.processor, - func(resolver source.FileResolver, env Environment) []request { + func(resolver file.Resolver, env Environment) []request { var requests []request log.WithFields("mimetypes", types).Trace("searching for paths matching mimetype") matches, err := resolver.FilesByMIMEType(types...) @@ -62,7 +62,7 @@ func (c *Cataloger) WithParserByMimeTypes(parser Parser, types ...string) *Catal func (c *Cataloger) WithParserByPath(parser Parser, paths ...string) *Cataloger { c.processor = append(c.processor, - func(resolver source.FileResolver, env Environment) []request { + func(resolver file.Resolver, env Environment) []request { var requests []request for _, p := range paths { log.WithFields("path", p).Trace("searching for path") @@ -80,7 +80,7 @@ func (c *Cataloger) WithParserByPath(parser Parser, paths ...string) *Cataloger return c } -func makeRequests(parser Parser, locations []source.Location) []request { +func makeRequests(parser Parser, locations []file.Location) []request { var requests []request for _, l := range locations { requests = append(requests, request{ @@ -104,7 +104,7 @@ func (c *Cataloger) Name() string { } // Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source. -func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (c *Cataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { var packages []pkg.Package var relationships []artifact.Relationship @@ -126,7 +126,7 @@ func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []arti continue } - discoveredPackages, discoveredRelationships, err := parser(resolver, &env, source.NewLocationReadCloser(location, contentReader)) + discoveredPackages, discoveredRelationships, err := parser(resolver, &env, file.NewLocationReadCloser(location, contentReader)) internal.CloseAndLogError(contentReader, location.VirtualPath) if err != nil { logger.WithFields("location", location.RealPath, "error", err).Warnf("cataloger failed") @@ -144,7 +144,7 @@ func (c *Cataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []arti } // selectFiles takes a set of file trees and resolves and file references of interest for future cataloging -func (c *Cataloger) selectFiles(resolver source.FileResolver) []request { +func (c *Cataloger) selectFiles(resolver file.Resolver) []request { var requests []request for _, proc := range c.processor { requests = append(requests, proc(resolver, Environment{})...) diff --git a/syft/pkg/cataloger/generic/cataloger_test.go b/syft/pkg/cataloger/generic/cataloger_test.go index fd864787a42..d2aabf28c8d 100644 --- a/syft/pkg/cataloger/generic/cataloger_test.go +++ b/syft/pkg/cataloger/generic/cataloger_test.go @@ -2,22 +2,22 @@ package generic import ( "fmt" - "io/ioutil" + "io" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func Test_Cataloger(t *testing.T) { allParsedPaths := make(map[string]bool) - parser := func(resolver source.FileResolver, env *Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { + parser := func(resolver file.Resolver, env *Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { allParsedPaths[reader.AccessPath()] = true - contents, err := ioutil.ReadAll(reader) + contents, err := io.ReadAll(reader) require.NoError(t, err) if len(contents) == 0 { @@ -26,7 +26,7 @@ func Test_Cataloger(t *testing.T) { p := pkg.Package{ Name: string(contents), - Locations: source.NewLocationSet(reader.Location), + Locations: file.NewLocationSet(reader.Location), } r := artifact.Relationship{ From: p, @@ -40,7 +40,7 @@ func Test_Cataloger(t *testing.T) { upstream := "some-other-cataloger" expectedSelection := []string{"test-fixtures/last/path.txt", "test-fixtures/another-path.txt", "test-fixtures/a-path.txt", "test-fixtures/empty.txt"} - resolver := source.NewMockResolverForPaths(expectedSelection...) + resolver := file.NewMockResolverForPaths(expectedSelection...) cataloger := NewCataloger(upstream). WithParserByPath(parser, "test-fixtures/another-path.txt", "test-fixtures/last/path.txt"). WithParserByGlobs(parser, "**/a-path.txt", "**/empty.txt") diff --git a/syft/pkg/cataloger/generic/parser.go b/syft/pkg/cataloger/generic/parser.go index 32b62f579aa..c95808fc175 100644 --- a/syft/pkg/cataloger/generic/parser.go +++ b/syft/pkg/cataloger/generic/parser.go @@ -2,13 +2,13 @@ package generic import ( "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) type Environment struct { LinuxRelease *linux.Release } -type Parser func(source.FileResolver, *Environment, source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) +type Parser func(file.Resolver, *Environment, file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) diff --git a/syft/pkg/cataloger/golang/cataloger.go b/syft/pkg/cataloger/golang/cataloger.go index d28a9ed9aae..bde2a9b5715 100644 --- a/syft/pkg/cataloger/golang/cataloger.go +++ b/syft/pkg/cataloger/golang/cataloger.go @@ -7,9 +7,9 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/event" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // NewGoModFileCataloger returns a new Go module cataloger object. @@ -45,7 +45,7 @@ func (p *progressingCataloger) Name() string { return p.cataloger.Name() } -func (p *progressingCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (p *progressingCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { defer p.progress.SetCompleted() return p.cataloger.Catalog(resolver) } diff --git a/syft/pkg/cataloger/golang/licenses.go b/syft/pkg/cataloger/golang/licenses.go index e85ad7ec7f0..829a73dd3f3 100644 --- a/syft/pkg/cataloger/golang/licenses.go +++ b/syft/pkg/cataloger/golang/licenses.go @@ -22,13 +22,14 @@ import ( "github.com/anchore/syft/internal/licenses" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/event" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/internal/fileresolver" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) type goLicenses struct { opts GoCatalogerOpts - localModCacheResolver source.WritableFileResolver + localModCacheResolver file.WritableResolver progress *event.CatalogerTask } @@ -55,27 +56,27 @@ func remotesForModule(proxies []string, noProxy []string, module string) []strin return proxies } -func modCacheResolver(modCacheDir string) source.WritableFileResolver { - var r source.WritableFileResolver +func modCacheResolver(modCacheDir string) file.WritableResolver { + var r file.WritableResolver if modCacheDir == "" { log.Trace("unable to determine mod cache directory, skipping mod cache resolver") - r = source.EmptyResolver{} + r = fileresolver.Empty{} } else { stat, err := os.Stat(modCacheDir) if os.IsNotExist(err) || stat == nil || !stat.IsDir() { log.Tracef("unable to open mod cache directory: %s, skipping mod cache resolver", modCacheDir) - r = source.EmptyResolver{} + r = fileresolver.Empty{} } else { - r = source.NewUnindexedDirectoryResolver(modCacheDir) + r = fileresolver.NewFromUnindexedDirectory(modCacheDir) } } return r } -func (c *goLicenses) getLicenses(resolver source.FileResolver, moduleName, moduleVersion string) (licenses []pkg.License, err error) { +func (c *goLicenses) getLicenses(resolver file.Resolver, moduleName, moduleVersion string) (licenses []pkg.License, err error) { licenses, err = findLicenses(resolver, fmt.Sprintf(`**/go/pkg/mod/%s@%s/*`, processCaps(moduleName), moduleVersion), ) @@ -131,7 +132,7 @@ func (c *goLicenses) getLicensesFromRemote(moduleName, moduleVersion string) ([] if err != nil { return err } - return c.localModCacheResolver.Write(source.NewLocation(path.Join(dir, filePath)), f) + return c.localModCacheResolver.Write(file.NewLocation(path.Join(dir, filePath)), f) }) if err != nil { @@ -156,7 +157,7 @@ func requireCollection(licenses []pkg.License) []pkg.License { return licenses } -func findLicenses(resolver source.FileResolver, globMatch string) (out []pkg.License, err error) { +func findLicenses(resolver file.Resolver, globMatch string) (out []pkg.License, err error) { out = make([]pkg.License, 0) if resolver == nil { return diff --git a/syft/pkg/cataloger/golang/licenses_test.go b/syft/pkg/cataloger/golang/licenses_test.go index 8f4545198bf..37df6547dee 100644 --- a/syft/pkg/cataloger/golang/licenses_test.go +++ b/syft/pkg/cataloger/golang/licenses_test.go @@ -14,14 +14,15 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/internal" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/internal/fileresolver" "github.com/anchore/syft/syft/license" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func Test_LocalLicenseSearch(t *testing.T) { - loc1 := source.NewLocation("github.com/someorg/somename@v0.3.2/LICENSE") - loc2 := source.NewLocation("github.com/!cap!o!r!g/!cap!project@v4.111.5/LICENSE.txt") + loc1 := file.NewLocation("github.com/someorg/somename@v0.3.2/LICENSE") + loc2 := file.NewLocation("github.com/!cap!o!r!g/!cap!project@v4.111.5/LICENSE.txt") tests := []struct { name string @@ -35,7 +36,7 @@ func Test_LocalLicenseSearch(t *testing.T) { Value: "Apache-2.0", SPDXExpression: "Apache-2.0", Type: license.Concluded, - Locations: source.NewLocationSet(loc1), + Locations: file.NewLocationSet(loc1), URLs: internal.NewStringSet(), }, }, @@ -46,7 +47,7 @@ func Test_LocalLicenseSearch(t *testing.T) { Value: "MIT", SPDXExpression: "MIT", Type: license.Concluded, - Locations: source.NewLocationSet(loc2), + Locations: file.NewLocationSet(loc2), URLs: internal.NewStringSet(), }, }, @@ -63,7 +64,7 @@ func Test_LocalLicenseSearch(t *testing.T) { localModCacheDir: path.Join(wd, "test-fixtures", "licenses", "pkg", "mod"), }, ) - licenses, err := l.getLicenses(source.EmptyResolver{}, test.name, test.version) + licenses, err := l.getLicenses(fileresolver.Empty{}, test.name, test.version) require.NoError(t, err) require.Len(t, licenses, 1) @@ -74,8 +75,8 @@ func Test_LocalLicenseSearch(t *testing.T) { } func Test_RemoteProxyLicenseSearch(t *testing.T) { - loc1 := source.NewLocation("github.com/someorg/somename@v0.3.2/LICENSE") - loc2 := source.NewLocation("github.com/!cap!o!r!g/!cap!project@v4.111.5/LICENSE.txt") + loc1 := file.NewLocation("github.com/someorg/somename@v0.3.2/LICENSE") + loc2 := file.NewLocation("github.com/!cap!o!r!g/!cap!project@v4.111.5/LICENSE.txt") server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { buf := &bytes.Buffer{} @@ -126,7 +127,7 @@ func Test_RemoteProxyLicenseSearch(t *testing.T) { Value: "Apache-2.0", SPDXExpression: "Apache-2.0", Type: license.Concluded, - Locations: source.NewLocationSet(loc1), + Locations: file.NewLocationSet(loc1), URLs: internal.NewStringSet(), }, }, @@ -137,7 +138,7 @@ func Test_RemoteProxyLicenseSearch(t *testing.T) { Value: "MIT", SPDXExpression: "MIT", Type: license.Concluded, - Locations: source.NewLocationSet(loc2), + Locations: file.NewLocationSet(loc2), URLs: internal.NewStringSet(), }, }, @@ -153,7 +154,7 @@ func Test_RemoteProxyLicenseSearch(t *testing.T) { localModCacheDir: modDir, }) - licenses, err := l.getLicenses(source.EmptyResolver{}, test.name, test.version) + licenses, err := l.getLicenses(fileresolver.Empty{}, test.name, test.version) require.NoError(t, err) require.Len(t, licenses, 1) diff --git a/syft/pkg/cataloger/golang/package.go b/syft/pkg/cataloger/golang/package.go index a7b1ee44e82..30ba083b48f 100644 --- a/syft/pkg/cataloger/golang/package.go +++ b/syft/pkg/cataloger/golang/package.go @@ -7,11 +7,11 @@ import ( "github.com/anchore/packageurl-go" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func (c *goBinaryCataloger) newGoBinaryPackage(resolver source.FileResolver, dep *debug.Module, mainModule, goVersion, architecture string, buildSettings map[string]string, locations ...source.Location) pkg.Package { +func (c *goBinaryCataloger) newGoBinaryPackage(resolver file.Resolver, dep *debug.Module, mainModule, goVersion, architecture string, buildSettings map[string]string, locations ...file.Location) pkg.Package { if dep.Replace != nil { dep = dep.Replace } @@ -28,7 +28,7 @@ func (c *goBinaryCataloger) newGoBinaryPackage(resolver source.FileResolver, dep PURL: packageURL(dep.Path, dep.Version), Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), MetadataType: pkg.GolangBinMetadataType, Metadata: pkg.GolangBinMetadata{ GoCompiledVersion: goVersion, diff --git a/syft/pkg/cataloger/golang/parse_go_binary.go b/syft/pkg/cataloger/golang/parse_go_binary.go index 542fcbe0232..e2e1c53eba6 100644 --- a/syft/pkg/cataloger/golang/parse_go_binary.go +++ b/syft/pkg/cataloger/golang/parse_go_binary.go @@ -18,11 +18,11 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/golang/internal/xcoff" "github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader" - "github.com/anchore/syft/syft/source" ) const GOARCH = "GOARCH" @@ -49,7 +49,7 @@ type goBinaryCataloger struct { } // Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing rpm db installation. -func (c *goBinaryCataloger) parseGoBinary(resolver source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func (c *goBinaryCataloger) parseGoBinary(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package unionReader, err := unionreader.GetUnionReader(reader.ReadCloser) @@ -66,7 +66,7 @@ func (c *goBinaryCataloger) parseGoBinary(resolver source.FileResolver, _ *gener return pkgs, nil, nil } -func (c *goBinaryCataloger) makeGoMainPackage(resolver source.FileResolver, mod *debug.BuildInfo, arch string, location source.Location) pkg.Package { +func (c *goBinaryCataloger) makeGoMainPackage(resolver file.Resolver, mod *debug.BuildInfo, arch string, location file.Location) pkg.Package { gbs := getBuildSettings(mod.Settings) main := c.newGoBinaryPackage( resolver, @@ -258,7 +258,7 @@ func createMainModuleFromPath(path string) (mod debug.Module) { return } -func (c *goBinaryCataloger) buildGoPkgInfo(resolver source.FileResolver, location source.Location, mod *debug.BuildInfo, arch string) []pkg.Package { +func (c *goBinaryCataloger) buildGoPkgInfo(resolver file.Resolver, location file.Location, mod *debug.BuildInfo, arch string) []pkg.Package { var pkgs []pkg.Package if mod == nil { return pkgs diff --git a/syft/pkg/cataloger/golang/parse_go_binary_test.go b/syft/pkg/cataloger/golang/parse_go_binary_test.go index cb0d9e3eb33..5f483b3c4f5 100644 --- a/syft/pkg/cataloger/golang/parse_go_binary_test.go +++ b/syft/pkg/cataloger/golang/parse_go_binary_test.go @@ -14,8 +14,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/internal/fileresolver" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) // make will run the default make target for the given test fixture path @@ -135,9 +136,9 @@ func TestBuildGoPkgInfo(t *testing.T) { Type: pkg.GoModulePkg, Version: "(devel)", PURL: "pkg:golang/github.com/anchore/syft@(devel)", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -182,9 +183,9 @@ func TestBuildGoPkgInfo(t *testing.T) { PURL: "pkg:golang/github.com/adrg/xdg", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -225,9 +226,9 @@ func TestBuildGoPkgInfo(t *testing.T) { PURL: "pkg:golang/github.com/adrg/xdg@v0.2.1", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -261,9 +262,9 @@ func TestBuildGoPkgInfo(t *testing.T) { PURL: "pkg:golang/github.com/a/b/c@(devel)", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -320,9 +321,9 @@ func TestBuildGoPkgInfo(t *testing.T) { Type: pkg.GoModulePkg, Version: "v0.0.0-20221014195457-41bc6bb41035", PURL: "pkg:golang/github.com/anchore/syft@v0.0.0-20221014195457-41bc6bb41035", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -367,9 +368,9 @@ func TestBuildGoPkgInfo(t *testing.T) { Type: pkg.GoModulePkg, Version: "v0.79.0", PURL: "pkg:golang/github.com/anchore/syft@v0.79.0", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -413,9 +414,9 @@ func TestBuildGoPkgInfo(t *testing.T) { Type: pkg.GoModulePkg, Version: "v0.0.0-20221014195457-41bc6bb41035", PURL: "pkg:golang/github.com/anchore/syft@v0.0.0-20221014195457-41bc6bb41035", - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -468,9 +469,9 @@ func TestBuildGoPkgInfo(t *testing.T) { PURL: "pkg:golang/github.com/adrg/xdg@v0.2.1", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -490,9 +491,9 @@ func TestBuildGoPkgInfo(t *testing.T) { PURL: "pkg:golang/github.com/anchore/client-go@v0.0.0-20210222170800-9c70f9b80bcf", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -545,9 +546,9 @@ func TestBuildGoPkgInfo(t *testing.T) { PURL: "pkg:golang/golang.org/x/sys@v0.0.0-20211006194710-c8a6f5223071", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -566,9 +567,9 @@ func TestBuildGoPkgInfo(t *testing.T) { PURL: "pkg:golang/golang.org/x/term@v0.0.0-20210916214954-140adaaadfaf", Language: pkg.Go, Type: pkg.GoModulePkg, - Locations: source.NewLocationSet( - source.NewLocationFromCoordinates( - source.Coordinates{ + Locations: file.NewLocationSet( + file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, @@ -593,15 +594,15 @@ func TestBuildGoPkgInfo(t *testing.T) { p := &test.expected[i] p.SetID() } - location := source.NewLocationFromCoordinates( - source.Coordinates{ + location := file.NewLocationFromCoordinates( + file.Coordinates{ RealPath: "/a-path", FileSystemID: "layer-id", }, ) c := goBinaryCataloger{} - pkgs := c.buildGoPkgInfo(source.EmptyResolver{}, location, test.mod, test.arch) + pkgs := c.buildGoPkgInfo(fileresolver.Empty{}, location, test.mod, test.arch) assert.Equal(t, test.expected, pkgs) }) } diff --git a/syft/pkg/cataloger/golang/parse_go_mod.go b/syft/pkg/cataloger/golang/parse_go_mod.go index 3fdc45b9a71..7ef4ac0a70f 100644 --- a/syft/pkg/cataloger/golang/parse_go_mod.go +++ b/syft/pkg/cataloger/golang/parse_go_mod.go @@ -11,9 +11,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) type goModCataloger struct { @@ -23,7 +23,7 @@ type goModCataloger struct { // parseGoModFile takes a go.mod and lists all packages discovered. // //nolint:funlen -func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func (c *goModCataloger) parseGoModFile(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { packages := make(map[string]pkg.Package) contents, err := io.ReadAll(reader) @@ -31,7 +31,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic return nil, nil, fmt.Errorf("failed to read go module: %w", err) } - file, err := modfile.Parse(reader.RealPath, contents, nil) + f, err := modfile.Parse(reader.RealPath, contents, nil) if err != nil { return nil, nil, fmt.Errorf("failed to parse go module: %w", err) } @@ -41,7 +41,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic log.Debugf("unable to get go.sum: %v", err) } - for _, m := range file.Require { + for _, m := range f.Require { licenses, err := c.licenses.getLicenses(resolver, m.Mod.Path, m.Mod.Version) if err != nil { log.Tracef("error getting licenses for package: %s %v", m.Mod.Path, err) @@ -51,7 +51,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic Name: m.Mod.Path, Version: m.Mod.Version, Licenses: pkg.NewLicenseSet(licenses...), - Locations: source.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), PURL: packageURL(m.Mod.Path, m.Mod.Version), Language: pkg.Go, Type: pkg.GoModulePkg, @@ -63,7 +63,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic } // remove any old packages and replace with new ones... - for _, m := range file.Replace { + for _, m := range f.Replace { licenses, err := c.licenses.getLicenses(resolver, m.New.Path, m.New.Version) if err != nil { log.Tracef("error getting licenses for package: %s %v", m.New.Path, err) @@ -73,7 +73,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic Name: m.New.Path, Version: m.New.Version, Licenses: pkg.NewLicenseSet(licenses...), - Locations: source.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), PURL: packageURL(m.New.Path, m.New.Version), Language: pkg.Go, Type: pkg.GoModulePkg, @@ -85,7 +85,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic } // remove any packages from the exclude fields - for _, m := range file.Exclude { + for _, m := range f.Exclude { delete(packages, m.Mod.Path) } @@ -104,7 +104,7 @@ func (c *goModCataloger) parseGoModFile(resolver source.FileResolver, _ *generic return pkgsSlice, nil, nil } -func parseGoSumFile(resolver source.FileResolver, reader source.LocationReadCloser) (map[string]string, error) { +func parseGoSumFile(resolver file.Resolver, reader file.LocationReadCloser) (map[string]string, error) { out := map[string]string{} if resolver == nil { diff --git a/syft/pkg/cataloger/golang/parse_go_mod_test.go b/syft/pkg/cataloger/golang/parse_go_mod_test.go index 83b75beb108..f22b7ca2a56 100644 --- a/syft/pkg/cataloger/golang/parse_go_mod_test.go +++ b/syft/pkg/cataloger/golang/parse_go_mod_test.go @@ -3,9 +3,9 @@ package golang import ( "testing" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseGoMod(t *testing.T) { @@ -20,7 +20,7 @@ func TestParseGoMod(t *testing.T) { Name: "github.com/bmatcuk/doublestar", Version: "v1.3.1", PURL: "pkg:golang/github.com/bmatcuk/doublestar@v1.3.1", - Locations: source.NewLocationSet(source.NewLocation("test-fixtures/one-package")), + Locations: file.NewLocationSet(file.NewLocation("test-fixtures/one-package")), Language: pkg.Go, Type: pkg.GoModulePkg, MetadataType: pkg.GolangModMetadataType, @@ -36,7 +36,7 @@ func TestParseGoMod(t *testing.T) { Name: "github.com/anchore/go-testutils", Version: "v0.0.0-20200624184116-66aa578126db", PURL: "pkg:golang/github.com/anchore/go-testutils@v0.0.0-20200624184116-66aa578126db", - Locations: source.NewLocationSet(source.NewLocation("test-fixtures/many-packages")), + Locations: file.NewLocationSet(file.NewLocation("test-fixtures/many-packages")), Language: pkg.Go, Type: pkg.GoModulePkg, MetadataType: pkg.GolangModMetadataType, @@ -46,7 +46,7 @@ func TestParseGoMod(t *testing.T) { Name: "github.com/anchore/go-version", Version: "v1.2.2-0.20200701162849-18adb9c92b9b", PURL: "pkg:golang/github.com/anchore/go-version@v1.2.2-0.20200701162849-18adb9c92b9b", - Locations: source.NewLocationSet(source.NewLocation("test-fixtures/many-packages")), + Locations: file.NewLocationSet(file.NewLocation("test-fixtures/many-packages")), Language: pkg.Go, Type: pkg.GoModulePkg, MetadataType: pkg.GolangModMetadataType, @@ -56,7 +56,7 @@ func TestParseGoMod(t *testing.T) { Name: "github.com/anchore/stereoscope", Version: "v0.0.0-20200706164556-7cf39d7f4639", PURL: "pkg:golang/github.com/anchore/stereoscope@v0.0.0-20200706164556-7cf39d7f4639", - Locations: source.NewLocationSet(source.NewLocation("test-fixtures/many-packages")), + Locations: file.NewLocationSet(file.NewLocation("test-fixtures/many-packages")), Language: pkg.Go, Type: pkg.GoModulePkg, MetadataType: pkg.GolangModMetadataType, @@ -66,7 +66,7 @@ func TestParseGoMod(t *testing.T) { Name: "github.com/bmatcuk/doublestar", Version: "v8.8.8", PURL: "pkg:golang/github.com/bmatcuk/doublestar@v8.8.8", - Locations: source.NewLocationSet(source.NewLocation("test-fixtures/many-packages")), + Locations: file.NewLocationSet(file.NewLocation("test-fixtures/many-packages")), Language: pkg.Go, Type: pkg.GoModulePkg, MetadataType: pkg.GolangModMetadataType, @@ -76,7 +76,7 @@ func TestParseGoMod(t *testing.T) { Name: "github.com/go-test/deep", Version: "v1.0.6", PURL: "pkg:golang/github.com/go-test/deep@v1.0.6", - Locations: source.NewLocationSet(source.NewLocation("test-fixtures/many-packages")), + Locations: file.NewLocationSet(file.NewLocation("test-fixtures/many-packages")), Language: pkg.Go, Type: pkg.GoModulePkg, MetadataType: pkg.GolangModMetadataType, @@ -109,7 +109,7 @@ func Test_GoSumHashes(t *testing.T) { Name: "github.com/CycloneDX/cyclonedx-go", Version: "v0.6.0", PURL: "pkg:golang/github.com/CycloneDX/cyclonedx-go@v0.6.0", - Locations: source.NewLocationSet(source.NewLocation("go.mod")), + Locations: file.NewLocationSet(file.NewLocation("go.mod")), FoundBy: "go-mod-file-cataloger", Language: pkg.Go, Type: pkg.GoModulePkg, @@ -120,7 +120,7 @@ func Test_GoSumHashes(t *testing.T) { Name: "github.com/acarl005/stripansi", Version: "v0.0.0-20180116102854-5a71ef0e047d", PURL: "pkg:golang/github.com/acarl005/stripansi@v0.0.0-20180116102854-5a71ef0e047d", - Locations: source.NewLocationSet(source.NewLocation("go.mod")), + Locations: file.NewLocationSet(file.NewLocation("go.mod")), FoundBy: "go-mod-file-cataloger", Language: pkg.Go, Type: pkg.GoModulePkg, @@ -133,7 +133,7 @@ func Test_GoSumHashes(t *testing.T) { Name: "github.com/mgutz/ansi", Version: "v0.0.0-20200706080929-d51e80ef957d", PURL: "pkg:golang/github.com/mgutz/ansi@v0.0.0-20200706080929-d51e80ef957d", - Locations: source.NewLocationSet(source.NewLocation("go.mod")), + Locations: file.NewLocationSet(file.NewLocation("go.mod")), FoundBy: "go-mod-file-cataloger", Language: pkg.Go, Type: pkg.GoModulePkg, diff --git a/syft/pkg/cataloger/haskell/package.go b/syft/pkg/cataloger/haskell/package.go index c7c1aa1581a..ed47921b9f0 100644 --- a/syft/pkg/cataloger/haskell/package.go +++ b/syft/pkg/cataloger/haskell/package.go @@ -2,15 +2,15 @@ package haskell import ( "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(name, version string, m *pkg.HackageMetadata, locations ...source.Location) pkg.Package { +func newPackage(name, version string, m *pkg.HackageMetadata, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(name, version), Language: pkg.Haskell, Type: pkg.HackagePkg, diff --git a/syft/pkg/cataloger/haskell/parse_cabal_freeze.go b/syft/pkg/cataloger/haskell/parse_cabal_freeze.go index d95446984cc..abb2c82c9b2 100644 --- a/syft/pkg/cataloger/haskell/parse_cabal_freeze.go +++ b/syft/pkg/cataloger/haskell/parse_cabal_freeze.go @@ -8,15 +8,15 @@ import ( "strings" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseCabalFreeze // parseCabalFreeze is a parser function for cabal.project.freeze contents, returning all packages discovered. -func parseCabalFreeze(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseCabalFreeze(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { r := bufio.NewReader(reader) var pkgs []pkg.Package for { diff --git a/syft/pkg/cataloger/haskell/parse_cabal_freeze_test.go b/syft/pkg/cataloger/haskell/parse_cabal_freeze_test.go index 2c4a96c77b8..acb58b74e97 100644 --- a/syft/pkg/cataloger/haskell/parse_cabal_freeze_test.go +++ b/syft/pkg/cataloger/haskell/parse_cabal_freeze_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseCabalFreeze(t *testing.T) { fixture := "test-fixtures/cabal.project.freeze" - locationSet := source.NewLocationSet(source.NewLocation(fixture)) + locationSet := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { diff --git a/syft/pkg/cataloger/haskell/parse_stack_lock.go b/syft/pkg/cataloger/haskell/parse_stack_lock.go index de41a57672d..3eabd79784f 100644 --- a/syft/pkg/cataloger/haskell/parse_stack_lock.go +++ b/syft/pkg/cataloger/haskell/parse_stack_lock.go @@ -8,9 +8,9 @@ import ( "gopkg.in/yaml.v3" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseStackLock @@ -38,7 +38,7 @@ type completedSnapshot struct { } // parseStackLock is a parser function for stack.yaml.lock contents, returning all packages discovered. -func parseStackLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseStackLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { bytes, err := io.ReadAll(reader) if err != nil { return nil, nil, fmt.Errorf("failed to load stack.yaml.lock file: %w", err) diff --git a/syft/pkg/cataloger/haskell/parse_stack_lock_test.go b/syft/pkg/cataloger/haskell/parse_stack_lock_test.go index 2cdfbc75b86..d41b8704261 100644 --- a/syft/pkg/cataloger/haskell/parse_stack_lock_test.go +++ b/syft/pkg/cataloger/haskell/parse_stack_lock_test.go @@ -4,15 +4,15 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseStackLock(t *testing.T) { url := "https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/19/14.yaml" fixture := "test-fixtures/stack.yaml.lock" - locationSet := source.NewLocationSet(source.NewLocation(fixture)) + locationSet := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { diff --git a/syft/pkg/cataloger/haskell/parse_stack_yaml.go b/syft/pkg/cataloger/haskell/parse_stack_yaml.go index 8404f4bf47c..c31bc6a5cf3 100644 --- a/syft/pkg/cataloger/haskell/parse_stack_yaml.go +++ b/syft/pkg/cataloger/haskell/parse_stack_yaml.go @@ -7,9 +7,9 @@ import ( "gopkg.in/yaml.v3" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseStackYaml @@ -19,7 +19,7 @@ type stackYaml struct { } // parseStackYaml is a parser function for stack.yaml contents, returning all packages discovered. -func parseStackYaml(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseStackYaml(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { bytes, err := io.ReadAll(reader) if err != nil { return nil, nil, fmt.Errorf("failed to load stack.yaml file: %w", err) diff --git a/syft/pkg/cataloger/haskell/parse_stack_yaml_test.go b/syft/pkg/cataloger/haskell/parse_stack_yaml_test.go index 1e035a7a60e..9946de99be8 100644 --- a/syft/pkg/cataloger/haskell/parse_stack_yaml_test.go +++ b/syft/pkg/cataloger/haskell/parse_stack_yaml_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseStackYaml(t *testing.T) { fixture := "test-fixtures/stack.yaml" - locationSet := source.NewLocationSet(source.NewLocation(fixture)) + locationSet := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { diff --git a/syft/pkg/cataloger/internal/pkgtest/observing_resolver.go b/syft/pkg/cataloger/internal/pkgtest/observing_resolver.go index fd0a5428a08..6e4c23ebe83 100644 --- a/syft/pkg/cataloger/internal/pkgtest/observing_resolver.go +++ b/syft/pkg/cataloger/internal/pkgtest/observing_resolver.go @@ -7,23 +7,23 @@ import ( "github.com/scylladb/go-set/strset" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) -var _ source.FileResolver = (*ObservingResolver)(nil) +var _ file.Resolver = (*ObservingResolver)(nil) type ObservingResolver struct { - decorated source.FileResolver + decorated file.Resolver pathQueries map[string][]string - pathResponses []source.Location - contentQueries []source.Location + pathResponses []file.Location + contentQueries []file.Location emptyPathResponses map[string][]string } -func NewObservingResolver(resolver source.FileResolver) *ObservingResolver { +func NewObservingResolver(resolver file.Resolver) *ObservingResolver { return &ObservingResolver{ decorated: resolver, - pathResponses: make([]source.Location, 0), + pathResponses: make([]file.Location, 0), emptyPathResponses: make(map[string][]string), pathQueries: make(map[string][]string), } @@ -138,11 +138,11 @@ func (r *ObservingResolver) addPathQuery(name string, input ...string) { r.pathQueries[name] = append(r.pathQueries[name], input...) } -func (r *ObservingResolver) addPathResponse(locs ...source.Location) { +func (r *ObservingResolver) addPathResponse(locs ...file.Location) { r.pathResponses = append(r.pathResponses, locs...) } -func (r *ObservingResolver) addEmptyPathResponse(name string, locs []source.Location, paths ...string) { +func (r *ObservingResolver) addEmptyPathResponse(name string, locs []file.Location, paths ...string) { if len(locs) == 0 { results := r.emptyPathResponses[name] results = append(results, paths...) @@ -150,7 +150,7 @@ func (r *ObservingResolver) addEmptyPathResponse(name string, locs []source.Loca } } -func (r *ObservingResolver) FilesByPath(paths ...string) ([]source.Location, error) { +func (r *ObservingResolver) FilesByPath(paths ...string) ([]file.Location, error) { name := "FilesByPath" r.addPathQuery(name, paths...) @@ -161,7 +161,7 @@ func (r *ObservingResolver) FilesByPath(paths ...string) ([]source.Location, err return locs, err } -func (r *ObservingResolver) FilesByGlob(patterns ...string) ([]source.Location, error) { +func (r *ObservingResolver) FilesByGlob(patterns ...string) ([]file.Location, error) { name := "FilesByGlob" r.addPathQuery(name, patterns...) @@ -172,7 +172,7 @@ func (r *ObservingResolver) FilesByGlob(patterns ...string) ([]source.Location, return locs, err } -func (r *ObservingResolver) FilesByMIMEType(types ...string) ([]source.Location, error) { +func (r *ObservingResolver) FilesByMIMEType(types ...string) ([]file.Location, error) { name := "FilesByMIMEType" r.addPathQuery(name, types...) @@ -183,7 +183,7 @@ func (r *ObservingResolver) FilesByMIMEType(types ...string) ([]source.Location, return locs, err } -func (r *ObservingResolver) RelativeFileByPath(l source.Location, path string) *source.Location { +func (r *ObservingResolver) RelativeFileByPath(l file.Location, path string) *file.Location { name := "RelativeFileByPath" r.addPathQuery(name, path) @@ -201,7 +201,7 @@ func (r *ObservingResolver) RelativeFileByPath(l source.Location, path string) * // For the content resolver methods... -func (r *ObservingResolver) FileContentsByLocation(location source.Location) (io.ReadCloser, error) { +func (r *ObservingResolver) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { r.contentQueries = append(r.contentQueries, location) reader, err := r.decorated.FileContentsByLocation(location) return reader, err @@ -209,7 +209,7 @@ func (r *ObservingResolver) FileContentsByLocation(location source.Location) (io // For the remaining resolver methods... -func (r *ObservingResolver) AllLocations() <-chan source.Location { +func (r *ObservingResolver) AllLocations() <-chan file.Location { return r.decorated.AllLocations() } @@ -217,6 +217,6 @@ func (r *ObservingResolver) HasPath(s string) bool { return r.decorated.HasPath(s) } -func (r *ObservingResolver) FileMetadataByLocation(location source.Location) (source.FileMetadata, error) { +func (r *ObservingResolver) FileMetadataByLocation(location file.Location) (file.Metadata, error) { return r.decorated.FileMetadataByLocation(location) } diff --git a/syft/pkg/cataloger/internal/pkgtest/test_generic_parser.go b/syft/pkg/cataloger/internal/pkgtest/test_generic_parser.go index 5d230b0119a..9545c66b6fd 100644 --- a/syft/pkg/cataloger/internal/pkgtest/test_generic_parser.go +++ b/syft/pkg/cataloger/internal/pkgtest/test_generic_parser.go @@ -14,13 +14,14 @@ import ( "github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/source" ) -type locationComparer func(x, y source.Location) bool +type locationComparer func(x, y file.Location) bool type licenseComparer func(x, y pkg.License) bool type CatalogTester struct { @@ -32,8 +33,8 @@ type CatalogTester struct { ignoreUnfulfilledPathResponses map[string][]string ignoreAnyUnfulfilledPaths []string env *generic.Environment - reader source.LocationReadCloser - resolver source.FileResolver + reader file.LocationReadCloser + resolver file.Resolver wantErr require.ErrorAssertionFunc compareOptions []cmp.Option locationComparer locationComparer @@ -58,13 +59,13 @@ func NewCatalogTester() *CatalogTester { } } -func DefaultLocationComparer(x, y source.Location) bool { +func DefaultLocationComparer(x, y file.Location) bool { return cmp.Equal(x.Coordinates, y.Coordinates) && cmp.Equal(x.VirtualPath, y.VirtualPath) } func DefaultLicenseComparer(x, y pkg.License) bool { return cmp.Equal(x, y, cmp.Comparer(DefaultLocationComparer), cmp.Comparer( - func(x, y source.LocationSet) bool { + func(x, y file.LocationSet) bool { xs := x.ToSlice() ys := y.ToSlice() if len(xs) != len(ys) { @@ -100,16 +101,16 @@ func (p *CatalogTester) FromFile(t *testing.T, path string) *CatalogTester { fixture, err := os.Open(path) require.NoError(t, err) - p.reader = source.LocationReadCloser{ - Location: source.NewLocation(fixture.Name()), + p.reader = file.LocationReadCloser{ + Location: file.NewLocation(fixture.Name()), ReadCloser: fixture, } return p } func (p *CatalogTester) FromString(location, data string) *CatalogTester { - p.reader = source.LocationReadCloser{ - Location: source.NewLocation(location), + p.reader = file.LocationReadCloser{ + Location: file.NewLocation(location), ReadCloser: io.NopCloser(strings.NewReader(data)), } return p @@ -139,7 +140,7 @@ func (p *CatalogTester) WithErrorAssertion(a require.ErrorAssertionFunc) *Catalo return p } -func (p *CatalogTester) WithResolver(r source.FileResolver) *CatalogTester { +func (p *CatalogTester) WithResolver(r file.Resolver) *CatalogTester { p.resolver = r return p } @@ -158,14 +159,14 @@ func (p *CatalogTester) WithImageResolver(t *testing.T, fixtureName string) *Cat } func (p *CatalogTester) IgnoreLocationLayer() *CatalogTester { - p.locationComparer = func(x, y source.Location) bool { + p.locationComparer = func(x, y file.Location) bool { return cmp.Equal(x.Coordinates.RealPath, y.Coordinates.RealPath) && cmp.Equal(x.VirtualPath, y.VirtualPath) } // we need to update the license comparer to use the ignored location layer p.licenseComparer = func(x, y pkg.License) bool { return cmp.Equal(x, y, cmp.Comparer(p.locationComparer), cmp.Comparer( - func(x, y source.LocationSet) bool { + func(x, y file.LocationSet) bool { xs := x.ToSlice() ys := y.ToSlice() if len(xs) != len(ys) { @@ -259,7 +260,7 @@ func (p *CatalogTester) assertPkgs(t *testing.T, pkgs []pkg.Package, relationshi cmpopts.IgnoreFields(pkg.Package{}, "id"), // note: ID is not deterministic for test purposes cmpopts.SortSlices(pkg.Less), cmp.Comparer( - func(x, y source.LocationSet) bool { + func(x, y file.LocationSet) bool { xs := x.ToSlice() ys := y.ToSlice() @@ -345,7 +346,7 @@ func AssertPackagesEqual(t *testing.T, a, b pkg.Package) { opts := []cmp.Option{ cmpopts.IgnoreFields(pkg.Package{}, "id"), // note: ID is not deterministic for test purposes cmp.Comparer( - func(x, y source.LocationSet) bool { + func(x, y file.LocationSet) bool { xs := x.ToSlice() ys := y.ToSlice() diff --git a/syft/pkg/cataloger/java/archive_parser.go b/syft/pkg/cataloger/java/archive_parser.go index d6c0ad926f5..a1efd022d0c 100644 --- a/syft/pkg/cataloger/java/archive_parser.go +++ b/syft/pkg/cataloger/java/archive_parser.go @@ -7,13 +7,12 @@ import ( "path" "strings" - "github.com/anchore/syft/internal/file" + intFile "github.com/anchore/syft/internal/file" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" - syftFile "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseJavaArchive @@ -43,8 +42,8 @@ var javaArchiveHashes = []crypto.Hash{ } type archiveParser struct { - fileManifest file.ZipFileManifest - location source.Location + fileManifest intFile.ZipFileManifest + location file.Location archivePath string contentPath string fileInfo archiveFilename @@ -52,7 +51,7 @@ type archiveParser struct { } // parseJavaArchive is a parser function for java archive contents, returning all Java libraries and nested archives. -func parseJavaArchive(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseJavaArchive(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { parser, cleanupFn, err := newJavaArchiveParser(reader, true) // note: even on error, we should always run cleanup functions defer cleanupFn() @@ -72,7 +71,7 @@ func uniquePkgKey(p *pkg.Package) string { // newJavaArchiveParser returns a new java archive parser object for the given archive. Can be configured to discover // and parse nested archives or ignore them. -func newJavaArchiveParser(reader source.LocationReadCloser, detectNested bool) (*archiveParser, func(), error) { +func newJavaArchiveParser(reader file.LocationReadCloser, detectNested bool) (*archiveParser, func(), error) { // fetch the last element of the virtual path virtualElements := strings.Split(reader.AccessPath(), ":") currentFilepath := virtualElements[len(virtualElements)-1] @@ -82,7 +81,7 @@ func newJavaArchiveParser(reader source.LocationReadCloser, detectNested bool) ( return nil, cleanupFn, fmt.Errorf("unable to process java archive: %w", err) } - fileManifest, err := file.NewZipFileManifest(archivePath) + fileManifest, err := intFile.NewZipFileManifest(archivePath) if err != nil { return nil, cleanupFn, fmt.Errorf("unable to read files from java archive: %w", err) } @@ -160,7 +159,7 @@ func (j *archiveParser) discoverMainPackage() (*pkg.Package, error) { } // fetch the manifest file - contents, err := file.ContentsFromZip(j.archivePath, manifestMatches...) + contents, err := intFile.ContentsFromZip(j.archivePath, manifestMatches...) if err != nil { return nil, fmt.Errorf("unable to extract java manifests (%s): %w", j.location, err) } @@ -180,7 +179,7 @@ func (j *archiveParser) discoverMainPackage() (*pkg.Package, error) { defer archiveCloser.Close() // grab and assign digest for the entire archive - digests, err := syftFile.DigestsFromFile(archiveCloser, javaArchiveHashes) + digests, err := file.NewDigestsFromFile(archiveCloser, javaArchiveHashes) if err != nil { log.Warnf("failed to create digest for file=%q: %+v", j.archivePath, err) } @@ -192,7 +191,7 @@ func (j *archiveParser) discoverMainPackage() (*pkg.Package, error) { Version: selectVersion(manifest, j.fileInfo), Language: pkg.Java, Licenses: pkg.NewLicenseSet(licenses...), - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( j.location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), ), Type: j.fileInfo.pkgType(), @@ -250,9 +249,9 @@ func (j *archiveParser) discoverPkgsFromNestedArchives(parentPkg *pkg.Package) ( // discoverPkgsFromZip finds Java archives within Java archives, returning all listed Java packages found and // associating each discovered package to the given parent package. -func discoverPkgsFromZip(location source.Location, archivePath, contentPath string, fileManifest file.ZipFileManifest, parentPkg *pkg.Package) ([]pkg.Package, []artifact.Relationship, error) { +func discoverPkgsFromZip(location file.Location, archivePath, contentPath string, fileManifest intFile.ZipFileManifest, parentPkg *pkg.Package) ([]pkg.Package, []artifact.Relationship, error) { // search and parse pom.properties files & fetch the contents - openers, err := file.ExtractFromZipToUniqueTempFile(archivePath, contentPath, fileManifest.GlobMatch(archiveFormatGlobs...)...) + openers, err := intFile.ExtractFromZipToUniqueTempFile(archivePath, contentPath, fileManifest.GlobMatch(archiveFormatGlobs...)...) if err != nil { return nil, nil, fmt.Errorf("unable to extract files from zip: %w", err) } @@ -261,7 +260,7 @@ func discoverPkgsFromZip(location source.Location, archivePath, contentPath stri } // discoverPkgsFromOpeners finds Java archives within the given files and associates them with the given parent package. -func discoverPkgsFromOpeners(location source.Location, openers map[string]file.Opener, parentPkg *pkg.Package) ([]pkg.Package, []artifact.Relationship, error) { +func discoverPkgsFromOpeners(location file.Location, openers map[string]intFile.Opener, parentPkg *pkg.Package) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package var relationships []artifact.Relationship @@ -290,7 +289,7 @@ func discoverPkgsFromOpeners(location source.Location, openers map[string]file.O } // discoverPkgsFromOpener finds Java archives within the given file. -func discoverPkgsFromOpener(location source.Location, pathWithinArchive string, archiveOpener file.Opener) ([]pkg.Package, []artifact.Relationship, error) { +func discoverPkgsFromOpener(location file.Location, pathWithinArchive string, archiveOpener intFile.Opener) ([]pkg.Package, []artifact.Relationship, error) { archiveReadCloser, err := archiveOpener.Open() if err != nil { return nil, nil, fmt.Errorf("unable to open archived file from tempdir: %w", err) @@ -302,9 +301,9 @@ func discoverPkgsFromOpener(location source.Location, pathWithinArchive string, }() nestedPath := fmt.Sprintf("%s:%s", location.AccessPath(), pathWithinArchive) - nestedLocation := source.NewLocationFromCoordinates(location.Coordinates) + nestedLocation := file.NewLocationFromCoordinates(location.Coordinates) nestedLocation.VirtualPath = nestedPath - nestedPkgs, nestedRelationships, err := parseJavaArchive(nil, nil, source.LocationReadCloser{ + nestedPkgs, nestedRelationships, err := parseJavaArchive(nil, nil, file.LocationReadCloser{ Location: nestedLocation, ReadCloser: archiveReadCloser, }) @@ -315,8 +314,8 @@ func discoverPkgsFromOpener(location source.Location, pathWithinArchive string, return nestedPkgs, nestedRelationships, nil } -func pomPropertiesByParentPath(archivePath string, location source.Location, extractPaths []string) (map[string]pkg.PomProperties, error) { - contentsOfMavenPropertiesFiles, err := file.ContentsFromZip(archivePath, extractPaths...) +func pomPropertiesByParentPath(archivePath string, location file.Location, extractPaths []string) (map[string]pkg.PomProperties, error) { + contentsOfMavenPropertiesFiles, err := intFile.ContentsFromZip(archivePath, extractPaths...) if err != nil { return nil, fmt.Errorf("unable to extract maven files: %w", err) } @@ -344,8 +343,8 @@ func pomPropertiesByParentPath(archivePath string, location source.Location, ext return propertiesByParentPath, nil } -func pomProjectByParentPath(archivePath string, location source.Location, extractPaths []string) (map[string]pkg.PomProject, error) { - contentsOfMavenProjectFiles, err := file.ContentsFromZip(archivePath, extractPaths...) +func pomProjectByParentPath(archivePath string, location file.Location, extractPaths []string) (map[string]pkg.PomProject, error) { + contentsOfMavenProjectFiles, err := intFile.ContentsFromZip(archivePath, extractPaths...) if err != nil { return nil, fmt.Errorf("unable to extract maven files: %w", err) } @@ -374,7 +373,7 @@ func pomProjectByParentPath(archivePath string, location source.Location, extrac // packagesFromPomProperties processes a single Maven POM properties for a given parent package, returning all listed Java packages found and // associating each discovered package to the given parent package. Note the pom.xml is optional, the pom.properties is not. -func newPackageFromMavenData(pomProperties pkg.PomProperties, pomProject *pkg.PomProject, parentPkg *pkg.Package, location source.Location) *pkg.Package { +func newPackageFromMavenData(pomProperties pkg.PomProperties, pomProject *pkg.PomProject, parentPkg *pkg.Package, location file.Location) *pkg.Package { // keep the artifact name within the virtual path if this package does not match the parent package vPathSuffix := "" if !strings.HasPrefix(pomProperties.ArtifactID, parentPkg.Name) { @@ -386,7 +385,7 @@ func newPackageFromMavenData(pomProperties pkg.PomProperties, pomProject *pkg.Po p := pkg.Package{ Name: pomProperties.ArtifactID, Version: pomProperties.Version, - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), ), Language: pkg.Java, diff --git a/syft/pkg/cataloger/java/archive_parser_test.go b/syft/pkg/cataloger/java/archive_parser_test.go index 5385dec7382..422de7d480f 100644 --- a/syft/pkg/cataloger/java/archive_parser_test.go +++ b/syft/pkg/cataloger/java/archive_parser_test.go @@ -16,9 +16,9 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/internal" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func generateJavaBuildFixture(t *testing.T, fixturePath string) { @@ -100,7 +100,7 @@ func TestParseJar(t *testing.T) { Version: "1.0-SNAPSHOT", PURL: "pkg:maven/io.jenkins.plugins/example-jenkins-plugin@1.0-SNAPSHOT", Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT License", source.NewLocation("test-fixtures/java-builds/packages/example-jenkins-plugin.hpi")), + pkg.NewLicenseFromLocations("MIT License", file.NewLocation("test-fixtures/java-builds/packages/example-jenkins-plugin.hpi")), ), Language: pkg.Java, Type: pkg.JenkinsPluginPkg, @@ -272,12 +272,12 @@ func TestParseJar(t *testing.T) { for k := range test.expected { p := test.expected[k] - p.Locations.Add(source.NewLocation(test.fixture)) + p.Locations.Add(file.NewLocation(test.fixture)) test.expected[k] = p } - parser, cleanupFn, err := newJavaArchiveParser(source.LocationReadCloser{ - Location: source.NewLocation(fixture.Name()), + parser, cleanupFn, err := newJavaArchiveParser(file.LocationReadCloser{ + Location: file.NewLocation(fixture.Name()), ReadCloser: fixture, }, false) defer cleanupFn() @@ -546,8 +546,8 @@ func TestParseNestedJar(t *testing.T) { fixture, err := os.Open(test.fixture) require.NoError(t, err) - actual, _, err := parseJavaArchive(nil, nil, source.LocationReadCloser{ - Location: source.NewLocation(fixture.Name()), + actual, _, err := parseJavaArchive(nil, nil, file.LocationReadCloser{ + Location: file.NewLocation(fixture.Name()), ReadCloser: fixture, }) require.NoError(t, err) @@ -975,7 +975,7 @@ func Test_newPackageFromMavenData(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - locations := source.NewLocationSet(source.NewLocation(virtualPath)) + locations := file.NewLocationSet(file.NewLocation(virtualPath)) if test.expectedPackage != nil { test.expectedPackage.Locations = locations if test.expectedPackage.Metadata.(pkg.JavaMetadata).Parent != nil { @@ -987,7 +987,7 @@ func Test_newPackageFromMavenData(t *testing.T) { } test.expectedParent.Locations = locations - actualPackage := newPackageFromMavenData(test.props, test.project, test.parent, source.NewLocation(virtualPath)) + actualPackage := newPackageFromMavenData(test.props, test.project, test.parent, file.NewLocation(virtualPath)) if test.expectedPackage == nil { require.Nil(t, actualPackage) } else { diff --git a/syft/pkg/cataloger/java/graalvm_native_image_cataloger.go b/syft/pkg/cataloger/java/graalvm_native_image_cataloger.go index 2e8b63c932b..db462ea8c99 100644 --- a/syft/pkg/cataloger/java/graalvm_native_image_cataloger.go +++ b/syft/pkg/cataloger/java/graalvm_native_image_cataloger.go @@ -17,9 +17,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/cpe" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader" - "github.com/anchore/syft/syft/source" ) type nativeImageCycloneDX struct { @@ -571,7 +571,7 @@ func fetchPkgs(reader unionreader.UnionReader, filename string) []pkg.Package { } // Catalog attempts to find any native image executables reachable from a resolver. -func (c *NativeImageCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (c *NativeImageCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package fileMatches, err := resolver.FilesByMIMEType(internal.ExecutableMIMETypeSet.List()...) if err != nil { diff --git a/syft/pkg/cataloger/java/parse_gradle_lockfile.go b/syft/pkg/cataloger/java/parse_gradle_lockfile.go index 803639ab43a..65adf7aebb2 100644 --- a/syft/pkg/cataloger/java/parse_gradle_lockfile.go +++ b/syft/pkg/cataloger/java/parse_gradle_lockfile.go @@ -5,9 +5,9 @@ import ( "strings" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) const gradleLockfileGlob = "**/gradle.lockfile*" @@ -19,7 +19,7 @@ type LockfileDependency struct { Version string } -func parseGradleLockfile(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseGradleLockfile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package // Create a new scanner to read the file @@ -51,7 +51,7 @@ func parseGradleLockfile(_ source.FileResolver, _ *generic.Environment, reader s mappedPkg := pkg.Package{ Name: dep.Name, Version: dep.Version, - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), ), Language: pkg.Java, diff --git a/syft/pkg/cataloger/java/parse_gradle_lockfile_test.go b/syft/pkg/cataloger/java/parse_gradle_lockfile_test.go index 65129efcff2..babc3d3e558 100644 --- a/syft/pkg/cataloger/java/parse_gradle_lockfile_test.go +++ b/syft/pkg/cataloger/java/parse_gradle_lockfile_test.go @@ -3,9 +3,9 @@ package java import ( "testing" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func Test_parserGradleLockfile(t *testing.T) { @@ -44,7 +44,7 @@ func Test_parserGradleLockfile(t *testing.T) { for _, test := range tests { t.Run(test.input, func(t *testing.T) { for i := range test.expected { - test.expected[i].Locations.Add(source.NewLocation(test.input)) + test.expected[i].Locations.Add(file.NewLocation(test.input)) } pkgtest.TestFileParser(t, test.input, parseGradleLockfile, test.expected, nil) }) diff --git a/syft/pkg/cataloger/java/parse_pom_xml.go b/syft/pkg/cataloger/java/parse_pom_xml.go index 8df940869ed..b0316c860d2 100644 --- a/syft/pkg/cataloger/java/parse_pom_xml.go +++ b/syft/pkg/cataloger/java/parse_pom_xml.go @@ -12,16 +12,16 @@ import ( "golang.org/x/net/html/charset" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) const pomXMLGlob = "*pom.xml" var propertyMatcher = regexp.MustCompile("[$][{][^}]+[}]") -func parserPomXML(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parserPomXML(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { pom, err := decodePomXML(reader) if err != nil { return nil, nil, err @@ -65,7 +65,7 @@ func newPomProject(path string, p gopom.Project) *pkg.PomProject { } } -func newPackageFromPom(pom gopom.Project, dep gopom.Dependency, locations ...source.Location) pkg.Package { +func newPackageFromPom(pom gopom.Project, dep gopom.Dependency, locations ...file.Location) pkg.Package { m := pkg.JavaMetadata{ PomProperties: &pkg.PomProperties{ GroupID: resolveProperty(pom, dep.GroupID), @@ -78,7 +78,7 @@ func newPackageFromPom(pom gopom.Project, dep gopom.Dependency, locations ...sou p := pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(name, version, m), Language: pkg.Java, Type: pkg.JavaPkg, // TODO: should we differentiate between packages from jar/war/zip versus packages from a pom.xml that were not installed yet? diff --git a/syft/pkg/cataloger/java/parse_pom_xml_test.go b/syft/pkg/cataloger/java/parse_pom_xml_test.go index 2e4d7a846b6..01b19e6bb8c 100644 --- a/syft/pkg/cataloger/java/parse_pom_xml_test.go +++ b/syft/pkg/cataloger/java/parse_pom_xml_test.go @@ -7,9 +7,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/vifraa/gopom" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func Test_parserPomXML(t *testing.T) { @@ -49,7 +49,7 @@ func Test_parserPomXML(t *testing.T) { for _, test := range tests { t.Run(test.input, func(t *testing.T) { for i := range test.expected { - test.expected[i].Locations.Add(source.NewLocation(test.input)) + test.expected[i].Locations.Add(file.NewLocation(test.input)) } pkgtest.TestFileParser(t, test.input, parserPomXML, test.expected, nil) }) @@ -181,7 +181,7 @@ func Test_parseCommonsTextPomXMLProject(t *testing.T) { for _, test := range tests { t.Run(test.input, func(t *testing.T) { for i := range test.expected { - test.expected[i].Locations.Add(source.NewLocation(test.input)) + test.expected[i].Locations.Add(file.NewLocation(test.input)) } pkgtest.TestFileParser(t, test.input, parserPomXML, test.expected, nil) }) diff --git a/syft/pkg/cataloger/java/tar_wrapped_archive_parser.go b/syft/pkg/cataloger/java/tar_wrapped_archive_parser.go index 99c723f44b5..05ab6dd22a5 100644 --- a/syft/pkg/cataloger/java/tar_wrapped_archive_parser.go +++ b/syft/pkg/cataloger/java/tar_wrapped_archive_parser.go @@ -3,11 +3,11 @@ package java import ( "fmt" - "github.com/anchore/syft/internal/file" + intFile "github.com/anchore/syft/internal/file" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var genericTarGlobs = []string{ @@ -45,7 +45,7 @@ var genericTarGlobs = []string{ // note: for compressed tars this is an extremely expensive operation and can lead to performance degradation. This is // due to the fact that there is no central directory header (say as in zip), which means that in order to get // a file listing within the archive you must decompress the entire archive and seek through all of the entries. -func parseTarWrappedJavaArchive(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseTarWrappedJavaArchive(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { contentPath, archivePath, cleanupFn, err := saveArchiveToTmp(reader.AccessPath(), reader) // note: even on error, we should always run cleanup functions defer cleanupFn() @@ -57,8 +57,8 @@ func parseTarWrappedJavaArchive(_ source.FileResolver, _ *generic.Environment, r return discoverPkgsFromTar(reader.Location, archivePath, contentPath) } -func discoverPkgsFromTar(location source.Location, archivePath, contentPath string) ([]pkg.Package, []artifact.Relationship, error) { - openers, err := file.ExtractGlobsFromTarToUniqueTempFile(archivePath, contentPath, archiveFormatGlobs...) +func discoverPkgsFromTar(location file.Location, archivePath, contentPath string) ([]pkg.Package, []artifact.Relationship, error) { + openers, err := intFile.ExtractGlobsFromTarToUniqueTempFile(archivePath, contentPath, archiveFormatGlobs...) if err != nil { return nil, nil, fmt.Errorf("unable to extract files from tar: %w", err) } diff --git a/syft/pkg/cataloger/java/tar_wrapped_archive_parser_test.go b/syft/pkg/cataloger/java/tar_wrapped_archive_parser_test.go index 6f40c175d48..1a3d1d1f32c 100644 --- a/syft/pkg/cataloger/java/tar_wrapped_archive_parser_test.go +++ b/syft/pkg/cataloger/java/tar_wrapped_archive_parser_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) func Test_parseTarWrappedJavaArchive(t *testing.T) { @@ -40,8 +40,8 @@ func Test_parseTarWrappedJavaArchive(t *testing.T) { t.Fatalf("failed to open fixture: %+v", err) } - actualPkgs, _, err := parseTarWrappedJavaArchive(nil, nil, source.LocationReadCloser{ - Location: source.NewLocation(test.fixture), + actualPkgs, _, err := parseTarWrappedJavaArchive(nil, nil, file.LocationReadCloser{ + Location: file.NewLocation(test.fixture), ReadCloser: fixture, }) require.NoError(t, err) diff --git a/syft/pkg/cataloger/java/zip_wrapped_archive_parser.go b/syft/pkg/cataloger/java/zip_wrapped_archive_parser.go index dffe5df74a6..930427f38f5 100644 --- a/syft/pkg/cataloger/java/zip_wrapped_archive_parser.go +++ b/syft/pkg/cataloger/java/zip_wrapped_archive_parser.go @@ -3,11 +3,11 @@ package java import ( "fmt" - "github.com/anchore/syft/internal/file" + intFile "github.com/anchore/syft/internal/file" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var genericZipGlobs = []string{ @@ -17,7 +17,7 @@ var genericZipGlobs = []string{ // TODO: when the generic archive cataloger is implemented, this should be removed (https://github.com/anchore/syft/issues/246) // parseZipWrappedJavaArchive is a parser function for java archive contents contained within arbitrary zip files. -func parseZipWrappedJavaArchive(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseZipWrappedJavaArchive(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { contentPath, archivePath, cleanupFn, err := saveArchiveToTmp(reader.AccessPath(), reader) // note: even on error, we should always run cleanup functions defer cleanupFn() @@ -29,7 +29,7 @@ func parseZipWrappedJavaArchive(_ source.FileResolver, _ *generic.Environment, r // functions support zips with shell scripts prepended to the file. Specifically, the helpers use the central // header at the end of the file to determine where the beginning of the zip payload is (unlike the standard lib // or archiver). - fileManifest, err := file.NewZipFileManifest(archivePath) + fileManifest, err := intFile.NewZipFileManifest(archivePath) if err != nil { return nil, nil, fmt.Errorf("unable to read files from java archive: %w", err) } diff --git a/syft/pkg/cataloger/java/zip_wrapped_archive_parser_test.go b/syft/pkg/cataloger/java/zip_wrapped_archive_parser_test.go index aa1e5108945..2f5b3328ac9 100644 --- a/syft/pkg/cataloger/java/zip_wrapped_archive_parser_test.go +++ b/syft/pkg/cataloger/java/zip_wrapped_archive_parser_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) func Test_parseZipWrappedJavaArchive(t *testing.T) { @@ -33,8 +33,8 @@ func Test_parseZipWrappedJavaArchive(t *testing.T) { t.Fatalf("failed to open fixture: %+v", err) } - actualPkgs, _, err := parseZipWrappedJavaArchive(nil, nil, source.LocationReadCloser{ - Location: source.NewLocation(test.fixture), + actualPkgs, _, err := parseZipWrappedJavaArchive(nil, nil, file.LocationReadCloser{ + Location: file.NewLocation(test.fixture), ReadCloser: fixture, }) require.NoError(t, err) diff --git a/syft/pkg/cataloger/javascript/cataloger_test.go b/syft/pkg/cataloger/javascript/cataloger_test.go index 5b9c18f0ed2..ca5169bafe5 100644 --- a/syft/pkg/cataloger/javascript/cataloger_test.go +++ b/syft/pkg/cataloger/javascript/cataloger_test.go @@ -3,13 +3,13 @@ package javascript import ( "testing" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func Test_JavascriptCataloger(t *testing.T) { - locationSet := source.NewLocationSet(source.NewLocation("package-lock.json")) + locationSet := file.NewLocationSet(file.NewLocation("package-lock.json")) expectedPkgs := []pkg.Package{ { Name: "@actions/core", @@ -20,7 +20,7 @@ func Test_JavascriptCataloger(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation("package-lock.json")), + pkg.NewLicenseFromLocations("MIT", file.NewLocation("package-lock.json")), ), MetadataType: pkg.NpmPackageLockJSONMetadataType, Metadata: pkg.NpmPackageLockJSONMetadata{Resolved: "https://registry.npmjs.org/@actions/core/-/core-1.6.0.tgz", Integrity: "sha512-NB1UAZomZlCV/LmJqkLhNTqtKfFXJZAUPcfl/zqG7EfsQdeUJtaWO98SGbuQ3pydJ3fHl2CvI/51OKYlCYYcaw=="}, @@ -45,7 +45,7 @@ func Test_JavascriptCataloger(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation("package-lock.json")), + pkg.NewLicenseFromLocations("MIT", file.NewLocation("package-lock.json")), ), MetadataType: pkg.NpmPackageLockJSONMetadataType, Metadata: pkg.NpmPackageLockJSONMetadata{Resolved: "https://registry.npmjs.org/cowsay/-/cowsay-1.4.0.tgz", Integrity: "sha512-rdg5k5PsHFVJheO/pmE3aDg2rUDDTfPJau6yYkZYlHFktUz+UxbE+IgnUAEyyCyv4noL5ltxXD0gZzmHPCy/9g=="}, diff --git a/syft/pkg/cataloger/javascript/package.go b/syft/pkg/cataloger/javascript/package.go index 468854a3927..4eaea055beb 100644 --- a/syft/pkg/cataloger/javascript/package.go +++ b/syft/pkg/cataloger/javascript/package.go @@ -8,11 +8,11 @@ import ( "github.com/anchore/packageurl-go" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackageJSONPackage(u packageJSON, indexLocation source.Location) pkg.Package { +func newPackageJSONPackage(u packageJSON, indexLocation file.Location) pkg.Package { licenseCandidates, err := u.licensesFromJSON() if err != nil { log.Warnf("unable to extract licenses from javascript package.json: %+v", err) @@ -23,7 +23,7 @@ func newPackageJSONPackage(u packageJSON, indexLocation source.Location) pkg.Pac Name: u.Name, Version: u.Version, PURL: packageURL(u.Name, u.Version), - Locations: source.NewLocationSet(indexLocation), + Locations: file.NewLocationSet(indexLocation), Language: pkg.JavaScript, Licenses: pkg.NewLicenseSet(license...), Type: pkg.NpmPkg, @@ -44,7 +44,7 @@ func newPackageJSONPackage(u packageJSON, indexLocation source.Location) pkg.Pac return p } -func newPackageLockV1Package(resolver source.FileResolver, location source.Location, name string, u lockDependency) pkg.Package { +func newPackageLockV1Package(resolver file.Resolver, location file.Location, name string, u lockDependency) pkg.Package { version := u.Version const aliasPrefixPackageLockV1 = "npm:" @@ -66,7 +66,7 @@ func newPackageLockV1Package(resolver source.FileResolver, location source.Locat pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), PURL: packageURL(name, version), Language: pkg.JavaScript, Type: pkg.NpmPkg, @@ -76,14 +76,14 @@ func newPackageLockV1Package(resolver source.FileResolver, location source.Locat ) } -func newPackageLockV2Package(resolver source.FileResolver, location source.Location, name string, u lockPackage) pkg.Package { +func newPackageLockV2Package(resolver file.Resolver, location file.Location, name string, u lockPackage) pkg.Package { return finalizeLockPkg( resolver, location, pkg.Package{ Name: name, Version: u.Version, - Locations: source.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(location, u.License...)...), PURL: packageURL(name, u.Version), Language: pkg.JavaScript, @@ -94,14 +94,14 @@ func newPackageLockV2Package(resolver source.FileResolver, location source.Locat ) } -func newPnpmPackage(resolver source.FileResolver, location source.Location, name, version string) pkg.Package { +func newPnpmPackage(resolver file.Resolver, location file.Location, name, version string) pkg.Package { return finalizeLockPkg( resolver, location, pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), PURL: packageURL(name, version), Language: pkg.JavaScript, Type: pkg.NpmPkg, @@ -109,14 +109,14 @@ func newPnpmPackage(resolver source.FileResolver, location source.Location, name ) } -func newYarnLockPackage(resolver source.FileResolver, location source.Location, name, version string) pkg.Package { +func newYarnLockPackage(resolver file.Resolver, location file.Location, name, version string) pkg.Package { return finalizeLockPkg( resolver, location, pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), PURL: packageURL(name, version), Language: pkg.JavaScript, Type: pkg.NpmPkg, @@ -124,14 +124,14 @@ func newYarnLockPackage(resolver source.FileResolver, location source.Location, ) } -func finalizeLockPkg(resolver source.FileResolver, location source.Location, p pkg.Package) pkg.Package { +func finalizeLockPkg(resolver file.Resolver, location file.Location, p pkg.Package) pkg.Package { licenseCandidate := addLicenses(p.Name, resolver, location) p.Licenses.Add(pkg.NewLicensesFromLocation(location, licenseCandidate...)...) p.SetID() return p } -func addLicenses(name string, resolver source.FileResolver, location source.Location) (allLicenses []string) { +func addLicenses(name string, resolver file.Resolver, location file.Location) (allLicenses []string) { if resolver == nil { return allLicenses } diff --git a/syft/pkg/cataloger/javascript/parse_package_json.go b/syft/pkg/cataloger/javascript/parse_package_json.go index 59c8a5c508d..0c05aedc0e3 100644 --- a/syft/pkg/cataloger/javascript/parse_package_json.go +++ b/syft/pkg/cataloger/javascript/parse_package_json.go @@ -12,9 +12,9 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -51,7 +51,7 @@ type repository struct { var authorPattern = regexp.MustCompile(`^\s*(?P[^<(]*)(\s+<(?P.*)>)?(\s\((?P.*)\))?\s*$`) // parsePackageJSON parses a package.json and returns the discovered JavaScript packages. -func parsePackageJSON(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePackageJSON(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package dec := json.NewDecoder(reader) diff --git a/syft/pkg/cataloger/javascript/parse_package_json_test.go b/syft/pkg/cataloger/javascript/parse_package_json_test.go index c0e0b17b088..3a57f3c8272 100644 --- a/syft/pkg/cataloger/javascript/parse_package_json_test.go +++ b/syft/pkg/cataloger/javascript/parse_package_json_test.go @@ -5,9 +5,9 @@ import ( "github.com/stretchr/testify/assert" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParsePackageJSON(t *testing.T) { @@ -24,7 +24,7 @@ func TestParsePackageJSON(t *testing.T) { Type: pkg.NpmPkg, Language: pkg.JavaScript, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("Artistic-2.0", source.NewLocation("test-fixtures/pkg-json/package.json")), + pkg.NewLicenseFromLocations("Artistic-2.0", file.NewLocation("test-fixtures/pkg-json/package.json")), ), MetadataType: pkg.NpmPackageJSONMetadataType, Metadata: pkg.NpmPackageJSONMetadata{ @@ -46,7 +46,7 @@ func TestParsePackageJSON(t *testing.T) { Type: pkg.NpmPkg, Language: pkg.JavaScript, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("ISC", source.NewLocation("test-fixtures/pkg-json/package-license-object.json")), + pkg.NewLicenseFromLocations("ISC", file.NewLocation("test-fixtures/pkg-json/package-license-object.json")), ), MetadataType: pkg.NpmPackageJSONMetadataType, Metadata: pkg.NpmPackageJSONMetadata{ @@ -67,8 +67,8 @@ func TestParsePackageJSON(t *testing.T) { PURL: "pkg:npm/npm@6.14.6", Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation("test-fixtures/pkg-json/package-license-objects.json")), - pkg.NewLicenseFromLocations("Apache-2.0", source.NewLocation("test-fixtures/pkg-json/package-license-objects.json")), + pkg.NewLicenseFromLocations("MIT", file.NewLocation("test-fixtures/pkg-json/package-license-objects.json")), + pkg.NewLicenseFromLocations("Apache-2.0", file.NewLocation("test-fixtures/pkg-json/package-license-objects.json")), ), Language: pkg.JavaScript, MetadataType: pkg.NpmPackageJSONMetadataType, @@ -128,7 +128,7 @@ func TestParsePackageJSON(t *testing.T) { PURL: "pkg:npm/npm@6.14.6", Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("Artistic-2.0", source.NewLocation("test-fixtures/pkg-json/package-nested-author.json")), + pkg.NewLicenseFromLocations("Artistic-2.0", file.NewLocation("test-fixtures/pkg-json/package-nested-author.json")), ), Language: pkg.JavaScript, MetadataType: pkg.NpmPackageJSONMetadataType, @@ -150,7 +150,7 @@ func TestParsePackageJSON(t *testing.T) { PURL: "pkg:npm/function-bind@1.1.1", Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation("test-fixtures/pkg-json/package-repo-string.json")), + pkg.NewLicenseFromLocations("MIT", file.NewLocation("test-fixtures/pkg-json/package-repo-string.json")), ), Language: pkg.JavaScript, MetadataType: pkg.NpmPackageJSONMetadataType, @@ -172,7 +172,7 @@ func TestParsePackageJSON(t *testing.T) { PURL: "pkg:npm/npm@6.14.6", Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("Artistic-2.0", source.NewLocation("test-fixtures/pkg-json/package-private.json")), + pkg.NewLicenseFromLocations("Artistic-2.0", file.NewLocation("test-fixtures/pkg-json/package-private.json")), ), Language: pkg.JavaScript, MetadataType: pkg.NpmPackageJSONMetadataType, @@ -191,7 +191,7 @@ func TestParsePackageJSON(t *testing.T) { for _, test := range tests { t.Run(test.Fixture, func(t *testing.T) { - test.ExpectedPkg.Locations.Add(source.NewLocation(test.Fixture)) + test.ExpectedPkg.Locations.Add(file.NewLocation(test.Fixture)) pkgtest.TestFileParser(t, test.Fixture, parsePackageJSON, []pkg.Package{test.ExpectedPkg}, nil) }) } diff --git a/syft/pkg/cataloger/javascript/parse_package_lock.go b/syft/pkg/cataloger/javascript/parse_package_lock.go index 7ca2669b38a..91663b1b250 100644 --- a/syft/pkg/cataloger/javascript/parse_package_lock.go +++ b/syft/pkg/cataloger/javascript/parse_package_lock.go @@ -9,9 +9,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -44,7 +44,7 @@ type lockPackage struct { type packageLockLicense []string // parsePackageLock parses a package-lock.json and returns the discovered JavaScript packages. -func parsePackageLock(resolver source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePackageLock(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { // in the case we find package-lock.json files in the node_modules directories, skip those // as the whole purpose of the lock file is for the specific dependencies of the root project if pathContainsNodeModulesDirectory(reader.AccessPath()) { diff --git a/syft/pkg/cataloger/javascript/parse_package_lock_test.go b/syft/pkg/cataloger/javascript/parse_package_lock_test.go index dec36fb5f3f..baa27b397b4 100644 --- a/syft/pkg/cataloger/javascript/parse_package_lock_test.go +++ b/syft/pkg/cataloger/javascript/parse_package_lock_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParsePackageLock(t *testing.T) { @@ -114,7 +114,7 @@ func TestParsePackageLock(t *testing.T) { } fixture := "test-fixtures/pkg-lock/package-lock.json" for i := range expectedPkgs { - expectedPkgs[i].Locations.Add(source.NewLocation(fixture)) + expectedPkgs[i].Locations.Add(file.NewLocation(fixture)) } pkgtest.TestFileParser(t, fixture, parsePackageLock, expectedPkgs, expectedRelationships) @@ -140,7 +140,7 @@ func TestParsePackageLockV2(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), MetadataType: "NpmPackageLockJsonMetadata", Metadata: pkg.NpmPackageLockJSONMetadata{Resolved: "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", Integrity: "sha1-XxnSuFqY6VWANvajysyIGUIPBc8="}, @@ -152,7 +152,7 @@ func TestParsePackageLockV2(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), MetadataType: "NpmPackageLockJsonMetadata", Metadata: pkg.NpmPackageLockJSONMetadata{Resolved: "https://registry.npmjs.org/@types/react/-/react-18.0.17.tgz", Integrity: "sha1-RYPZwyLWfv5LOak10iPtzHBQzPQ="}, @@ -164,7 +164,7 @@ func TestParsePackageLockV2(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), MetadataType: "NpmPackageLockJsonMetadata", Metadata: pkg.NpmPackageLockJSONMetadata{Resolved: "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", Integrity: "sha1-GmL4lSVyPd4kuhsBsJK/XfitTTk="}, @@ -176,14 +176,14 @@ func TestParsePackageLockV2(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), MetadataType: "NpmPackageLockJsonMetadata", Metadata: pkg.NpmPackageLockJSONMetadata{Resolved: "https://registry.npmjs.org/csstype/-/csstype-3.1.0.tgz", Integrity: "sha1-TdysNxjXh8+d8NG30VAzklyPKfI="}, }, } for i := range expectedPkgs { - expectedPkgs[i].Locations.Add(source.NewLocation(fixture)) + expectedPkgs[i].Locations.Add(file.NewLocation(fixture)) } pkgtest.TestFileParser(t, fixture, parsePackageLock, expectedPkgs, expectedRelationships) } @@ -239,7 +239,7 @@ func TestParsePackageLockV3(t *testing.T) { }, } for i := range expectedPkgs { - expectedPkgs[i].Locations.Add(source.NewLocation(fixture)) + expectedPkgs[i].Locations.Add(file.NewLocation(fixture)) } pkgtest.TestFileParser(t, fixture, parsePackageLock, expectedPkgs, expectedRelationships) } @@ -287,7 +287,7 @@ func TestParsePackageLockAlias(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("ISC", source.NewLocation(packageLockV2)), + pkg.NewLicenseFromLocations("ISC", file.NewLocation(packageLockV2)), ), MetadataType: "NpmPackageLockJsonMetadata", Metadata: pkg.NpmPackageLockJSONMetadata{}, @@ -302,7 +302,7 @@ func TestParsePackageLockAlias(t *testing.T) { } for i := range expected { - expected[i].Locations.Add(source.NewLocation(pl)) + expected[i].Locations.Add(file.NewLocation(pl)) } pkgtest.TestFileParser(t, pl, parsePackageLock, expected, expectedRelationships) } @@ -318,7 +318,7 @@ func TestParsePackageLockLicenseWithArray(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("ISC", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("ISC", file.NewLocation(fixture)), ), PURL: "pkg:npm/tmp@1.0.0", MetadataType: "NpmPackageLockJsonMetadata", @@ -331,8 +331,8 @@ func TestParsePackageLockLicenseWithArray(t *testing.T) { Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), - pkg.NewLicenseFromLocations("Apache2", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), + pkg.NewLicenseFromLocations("Apache2", file.NewLocation(fixture)), ), PURL: "pkg:npm/pause-stream@0.0.11", MetadataType: "NpmPackageLockJsonMetadata", @@ -344,7 +344,7 @@ func TestParsePackageLockLicenseWithArray(t *testing.T) { Language: pkg.JavaScript, Type: pkg.NpmPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), PURL: "pkg:npm/through@2.3.8", MetadataType: "NpmPackageLockJsonMetadata", @@ -352,7 +352,7 @@ func TestParsePackageLockLicenseWithArray(t *testing.T) { }, } for i := range expectedPkgs { - expectedPkgs[i].Locations.Add(source.NewLocation(fixture)) + expectedPkgs[i].Locations.Add(file.NewLocation(fixture)) } pkgtest.TestFileParser(t, fixture, parsePackageLock, expectedPkgs, expectedRelationships) } diff --git a/syft/pkg/cataloger/javascript/parse_pnpm_lock.go b/syft/pkg/cataloger/javascript/parse_pnpm_lock.go index 418f6286285..1b786752e67 100644 --- a/syft/pkg/cataloger/javascript/parse_pnpm_lock.go +++ b/syft/pkg/cataloger/javascript/parse_pnpm_lock.go @@ -11,9 +11,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -25,7 +25,7 @@ type pnpmLockYaml struct { Packages map[string]interface{} `json:"packages" yaml:"packages"` } -func parsePnpmLock(resolver source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePnpmLock(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { bytes, err := io.ReadAll(reader) if err != nil { return nil, nil, fmt.Errorf("failed to load pnpm-lock.yaml file: %w", err) diff --git a/syft/pkg/cataloger/javascript/parse_pnpm_lock_test.go b/syft/pkg/cataloger/javascript/parse_pnpm_lock_test.go index bcf1fe40ad0..7c0ed1c4db8 100644 --- a/syft/pkg/cataloger/javascript/parse_pnpm_lock_test.go +++ b/syft/pkg/cataloger/javascript/parse_pnpm_lock_test.go @@ -4,16 +4,16 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParsePnpmLock(t *testing.T) { var expectedRelationships []artifact.Relationship fixture := "test-fixtures/pnpm/pnpm-lock.yaml" - locationSet := source.NewLocationSet(source.NewLocation(fixture)) + locationSet := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { @@ -57,7 +57,7 @@ func TestParsePnpmV6Lock(t *testing.T) { var expectedRelationships []artifact.Relationship fixture := "test-fixtures/pnpm-v6/pnpm-lock.yaml" - locationSet := source.NewLocationSet(source.NewLocation(fixture)) + locationSet := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { diff --git a/syft/pkg/cataloger/javascript/parse_yarn_lock.go b/syft/pkg/cataloger/javascript/parse_yarn_lock.go index 048f8f05c76..a90392fe2c1 100644 --- a/syft/pkg/cataloger/javascript/parse_yarn_lock.go +++ b/syft/pkg/cataloger/javascript/parse_yarn_lock.go @@ -7,9 +7,9 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -42,7 +42,7 @@ const ( noVersion = "" ) -func parseYarnLock(resolver source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseYarnLock(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { // in the case we find yarn.lock files in the node_modules directories, skip those // as the whole purpose of the lock file is for the specific dependencies of the project if pathContainsNodeModulesDirectory(reader.AccessPath()) { diff --git a/syft/pkg/cataloger/javascript/parse_yarn_lock_test.go b/syft/pkg/cataloger/javascript/parse_yarn_lock_test.go index ded8850b1f4..cb2dacc407c 100644 --- a/syft/pkg/cataloger/javascript/parse_yarn_lock_test.go +++ b/syft/pkg/cataloger/javascript/parse_yarn_lock_test.go @@ -6,15 +6,15 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseYarnBerry(t *testing.T) { var expectedRelationships []artifact.Relationship fixture := "test-fixtures/yarn-berry/yarn.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { @@ -98,7 +98,7 @@ func TestParseYarnBerry(t *testing.T) { func TestParseYarnLock(t *testing.T) { var expectedRelationships []artifact.Relationship fixture := "test-fixtures/yarn/yarn.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { diff --git a/syft/pkg/cataloger/kernel/cataloger.go b/syft/pkg/cataloger/kernel/cataloger.go index 492c2043364..67c5bb5b727 100644 --- a/syft/pkg/cataloger/kernel/cataloger.go +++ b/syft/pkg/cataloger/kernel/cataloger.go @@ -8,9 +8,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ pkg.Cataloger = (*LinuxKernelCataloger)(nil) @@ -53,7 +53,7 @@ func (l LinuxKernelCataloger) Name() string { return "linux-kernel-cataloger" } -func (l LinuxKernelCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (l LinuxKernelCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { var allPackages []pkg.Package var allRelationships []artifact.Relationship var errs error diff --git a/syft/pkg/cataloger/kernel/cataloger_test.go b/syft/pkg/cataloger/kernel/cataloger_test.go index b223acf1a49..f819e605a45 100644 --- a/syft/pkg/cataloger/kernel/cataloger_test.go +++ b/syft/pkg/cataloger/kernel/cataloger_test.go @@ -4,6 +4,7 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" "github.com/anchore/syft/syft/source" @@ -14,8 +15,8 @@ func Test_KernelCataloger(t *testing.T) { Name: "linux-kernel", Version: "6.0.7-301.fc37.x86_64", FoundBy: "linux-kernel-cataloger", - Locations: source.NewLocationSet( - source.NewVirtualLocation( + Locations: file.NewLocationSet( + file.NewVirtualLocation( "/lib/modules/6.0.7-301.fc37.x86_64/vmlinuz", "/lib/modules/6.0.7-301.fc37.x86_64/vmlinuz", ), @@ -42,8 +43,8 @@ func Test_KernelCataloger(t *testing.T) { Name: "ttynull", Version: "", FoundBy: "linux-kernel-cataloger", - Locations: source.NewLocationSet( - source.NewVirtualLocation("/lib/modules/6.0.7-301.fc37.x86_64/kernel/drivers/tty/ttynull.ko", + Locations: file.NewLocationSet( + file.NewVirtualLocation("/lib/modules/6.0.7-301.fc37.x86_64/kernel/drivers/tty/ttynull.ko", "/lib/modules/6.0.7-301.fc37.x86_64/kernel/drivers/tty/ttynull.ko", ), ), diff --git a/syft/pkg/cataloger/kernel/package.go b/syft/pkg/cataloger/kernel/package.go index 3ea60668827..92dcb5ef14a 100644 --- a/syft/pkg/cataloger/kernel/package.go +++ b/syft/pkg/cataloger/kernel/package.go @@ -4,17 +4,17 @@ import ( "strings" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) const linuxKernelPackageName = "linux-kernel" -func newLinuxKernelPackage(metadata pkg.LinuxKernelMetadata, archiveLocation source.Location) pkg.Package { +func newLinuxKernelPackage(metadata pkg.LinuxKernelMetadata, archiveLocation file.Location) pkg.Package { p := pkg.Package{ Name: linuxKernelPackageName, Version: metadata.Version, - Locations: source.NewLocationSet(archiveLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(archiveLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), PURL: packageURL(linuxKernelPackageName, metadata.Version), Type: pkg.LinuxKernelPkg, MetadataType: pkg.LinuxKernelMetadataType, @@ -26,11 +26,11 @@ func newLinuxKernelPackage(metadata pkg.LinuxKernelMetadata, archiveLocation sou return p } -func newLinuxKernelModulePackage(metadata pkg.LinuxKernelModuleMetadata, kmLocation source.Location) pkg.Package { +func newLinuxKernelModulePackage(metadata pkg.LinuxKernelModuleMetadata, kmLocation file.Location) pkg.Package { p := pkg.Package{ Name: metadata.Name, Version: metadata.Version, - Locations: source.NewLocationSet(kmLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(kmLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(kmLocation, metadata.License)...), PURL: packageURL(metadata.Name, metadata.Version), Type: pkg.LinuxKernelModulePkg, diff --git a/syft/pkg/cataloger/kernel/parse_linux_kernel_file.go b/syft/pkg/cataloger/kernel/parse_linux_kernel_file.go index 0be32c5b658..54c26eb4297 100644 --- a/syft/pkg/cataloger/kernel/parse_linux_kernel_file.go +++ b/syft/pkg/cataloger/kernel/parse_linux_kernel_file.go @@ -9,15 +9,15 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader" - "github.com/anchore/syft/syft/source" ) const linuxKernelMagicName = "Linux kernel" -func parseLinuxKernelFile(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseLinuxKernelFile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { unionReader, err := unionreader.GetUnionReader(reader) if err != nil { return nil, nil, fmt.Errorf("unable to get union reader for file: %w", err) diff --git a/syft/pkg/cataloger/kernel/parse_linux_kernel_module_file.go b/syft/pkg/cataloger/kernel/parse_linux_kernel_module_file.go index 3adeb5632f7..34974f6272c 100644 --- a/syft/pkg/cataloger/kernel/parse_linux_kernel_module_file.go +++ b/syft/pkg/cataloger/kernel/parse_linux_kernel_module_file.go @@ -6,15 +6,15 @@ import ( "strings" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader" - "github.com/anchore/syft/syft/source" ) const modinfoName = ".modinfo" -func parseLinuxKernelModuleFile(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseLinuxKernelModuleFile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { unionReader, err := unionreader.GetUnionReader(reader) if err != nil { return nil, nil, fmt.Errorf("unable to get union reader for file: %w", err) diff --git a/syft/pkg/cataloger/nix/cataloger.go b/syft/pkg/cataloger/nix/cataloger.go index b4b440c2687..5d920f2300c 100644 --- a/syft/pkg/cataloger/nix/cataloger.go +++ b/syft/pkg/cataloger/nix/cataloger.go @@ -7,8 +7,8 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) const ( @@ -27,10 +27,10 @@ func (c *StoreCataloger) Name() string { return catalogerName } -func (c *StoreCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, []artifact.Relationship, error) { +func (c *StoreCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) { // we want to search for only directories, which isn't possible via the stereoscope API, so we need to apply the glob manually on all returned paths var pkgs []pkg.Package - var filesByPath = make(map[string]*source.LocationSet) + var filesByPath = make(map[string]*file.LocationSet) for location := range resolver.AllLocations() { matchesStorePath, err := doublestar.Match(nixStoreGlob, location.RealPath) if err != nil { @@ -40,7 +40,7 @@ func (c *StoreCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, [ parentStorePath := findParentNixStorePath(location.RealPath) if parentStorePath != "" { if _, ok := filesByPath[parentStorePath]; !ok { - s := source.NewLocationSet() + s := file.NewLocationSet() filesByPath[parentStorePath] = &s } filesByPath[parentStorePath].Add(location) @@ -80,7 +80,7 @@ func (c *StoreCataloger) Catalog(resolver source.FileResolver) ([]pkg.Package, [ return pkgs, nil, nil } -func appendFiles(p *pkg.Package, location ...source.Location) { +func appendFiles(p *pkg.Package, location ...file.Location) { metadata, ok := p.Metadata.(pkg.NixStoreMetadata) if !ok { log.WithFields("package", p.Name).Warn("nix package metadata missing") diff --git a/syft/pkg/cataloger/nix/cataloger_test.go b/syft/pkg/cataloger/nix/cataloger_test.go index 10b544fc056..f43babde93b 100644 --- a/syft/pkg/cataloger/nix/cataloger_test.go +++ b/syft/pkg/cataloger/nix/cataloger_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestCataloger_Catalog(t *testing.T) { @@ -23,7 +23,7 @@ func TestCataloger_Catalog(t *testing.T) { Name: "glibc", Version: "2.34-210", PURL: "pkg:nix/glibc@2.34-210?output=bin&outputhash=h0cnbmfcn93xm5dg2x27ixhag1cwndga", - Locations: source.NewLocationSet(source.NewLocation("nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin")), + Locations: file.NewLocationSet(file.NewLocation("nix/store/h0cnbmfcn93xm5dg2x27ixhag1cwndga-glibc-2.34-210-bin")), FoundBy: catalogerName, Type: pkg.NixPkg, MetadataType: pkg.NixStoreMetadataType, diff --git a/syft/pkg/cataloger/nix/package.go b/syft/pkg/cataloger/nix/package.go index 6e473d6fdcd..090dfe1379c 100644 --- a/syft/pkg/cataloger/nix/package.go +++ b/syft/pkg/cataloger/nix/package.go @@ -2,16 +2,16 @@ package nix import ( "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newNixStorePackage(storePath nixStorePath, locations ...source.Location) pkg.Package { +func newNixStorePackage(storePath nixStorePath, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: storePath.name, Version: storePath.version, FoundBy: catalogerName, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), Type: pkg.NixPkg, PURL: packageURL(storePath), MetadataType: pkg.NixStoreMetadataType, diff --git a/syft/pkg/cataloger/php/package.go b/syft/pkg/cataloger/php/package.go index 507fd26dab2..7255d58d53e 100644 --- a/syft/pkg/cataloger/php/package.go +++ b/syft/pkg/cataloger/php/package.go @@ -4,15 +4,15 @@ import ( "strings" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newComposerLockPackage(m parsedData, indexLocation source.Location) pkg.Package { +func newComposerLockPackage(m parsedData, indexLocation file.Location) pkg.Package { p := pkg.Package{ Name: m.Name, Version: m.Version, - Locations: source.NewLocationSet(indexLocation), + Locations: file.NewLocationSet(indexLocation), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(indexLocation, m.License...)...), PURL: packageURL(m), Language: pkg.PHP, diff --git a/syft/pkg/cataloger/php/parse_composer_lock.go b/syft/pkg/cataloger/php/parse_composer_lock.go index 248b7519eb8..836befe138f 100644 --- a/syft/pkg/cataloger/php/parse_composer_lock.go +++ b/syft/pkg/cataloger/php/parse_composer_lock.go @@ -7,9 +7,9 @@ import ( "io" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseComposerLock @@ -25,7 +25,7 @@ type composerLock struct { } // parseComposerLock is a parser function for Composer.lock contents, returning "Default" php packages discovered. -func parseComposerLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseComposerLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { pkgs := make([]pkg.Package, 0) dec := json.NewDecoder(reader) diff --git a/syft/pkg/cataloger/php/parse_composer_lock_test.go b/syft/pkg/cataloger/php/parse_composer_lock_test.go index ad7814a97d5..f1038a5d4c3 100644 --- a/syft/pkg/cataloger/php/parse_composer_lock_test.go +++ b/syft/pkg/cataloger/php/parse_composer_lock_test.go @@ -4,15 +4,15 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseComposerFileLock(t *testing.T) { var expectedRelationships []artifact.Relationship fixture := "test-fixtures/composer.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { Name: "adoy/fastcgi-client", @@ -20,7 +20,7 @@ func TestParseComposerFileLock(t *testing.T) { PURL: "pkg:composer/adoy/fastcgi-client@1.0.2", Locations: locations, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), Language: pkg.PHP, Type: pkg.PhpComposerPkg, @@ -61,7 +61,7 @@ func TestParseComposerFileLock(t *testing.T) { PURL: "pkg:composer/alcaeus/mongo-php-adapter@1.1.11", Language: pkg.PHP, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), Type: pkg.PhpComposerPkg, MetadataType: pkg.PhpComposerJSONMetadataType, diff --git a/syft/pkg/cataloger/php/parse_installed_json.go b/syft/pkg/cataloger/php/parse_installed_json.go index 8c1213200f4..060e01903e9 100644 --- a/syft/pkg/cataloger/php/parse_installed_json.go +++ b/syft/pkg/cataloger/php/parse_installed_json.go @@ -7,9 +7,9 @@ import ( "io" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseComposerLock @@ -41,7 +41,7 @@ func (w *installedJSONComposerV2) UnmarshalJSON(data []byte) error { } // parseInstalledJSON is a parser function for Composer.lock contents, returning "Default" php packages discovered. -func parseInstalledJSON(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseInstalledJSON(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package dec := json.NewDecoder(reader) diff --git a/syft/pkg/cataloger/php/parse_installed_json_test.go b/syft/pkg/cataloger/php/parse_installed_json_test.go index dde72021eaf..984856ed497 100644 --- a/syft/pkg/cataloger/php/parse_installed_json_test.go +++ b/syft/pkg/cataloger/php/parse_installed_json_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseInstalledJsonComposerV1(t *testing.T) { @@ -130,7 +130,7 @@ func TestParseInstalledJsonComposerV1(t *testing.T) { for _, fixture := range fixtures { t.Run(fixture, func(t *testing.T) { - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) for i := range expectedPkgs { expectedPkgs[i].Locations = locations locationLicenses := pkg.NewLicenseSet() diff --git a/syft/pkg/cataloger/portage/cataloger_test.go b/syft/pkg/cataloger/portage/cataloger_test.go index b2ff5f26d17..c556c940a14 100644 --- a/syft/pkg/cataloger/portage/cataloger_test.go +++ b/syft/pkg/cataloger/portage/cataloger_test.go @@ -7,20 +7,19 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestPortageCataloger(t *testing.T) { - expectedLicenseLocation := source.NewLocation("var/db/pkg/app-containers/skopeo-1.5.1/LICENSE") + expectedLicenseLocation := file.NewLocation("var/db/pkg/app-containers/skopeo-1.5.1/LICENSE") expectedPkgs := []pkg.Package{ { Name: "app-containers/skopeo", Version: "1.5.1", FoundBy: "portage-cataloger", PURL: "pkg:ebuild/app-containers/skopeo@1.5.1", - Locations: source.NewLocationSet( - source.NewLocation("var/db/pkg/app-containers/skopeo-1.5.1/CONTENTS"), - source.NewLocation("var/db/pkg/app-containers/skopeo-1.5.1/SIZE"), + Locations: file.NewLocationSet( + file.NewLocation("var/db/pkg/app-containers/skopeo-1.5.1/CONTENTS"), + file.NewLocation("var/db/pkg/app-containers/skopeo-1.5.1/SIZE"), expectedLicenseLocation, ), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(expectedLicenseLocation, "Apache-2.0", "BSD", "BSD-2", "CC-BY-SA-4.0", "ISC", "MIT")...), diff --git a/syft/pkg/cataloger/portage/parse_portage_contents.go b/syft/pkg/cataloger/portage/parse_portage_contents.go index ac93c6ea05e..941cce394bb 100644 --- a/syft/pkg/cataloger/portage/parse_portage_contents.go +++ b/syft/pkg/cataloger/portage/parse_portage_contents.go @@ -15,7 +15,6 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var ( @@ -23,7 +22,7 @@ var ( _ generic.Parser = parsePortageContents ) -func parsePortageContents(resolver source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePortageContents(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { cpvMatch := cpvRe.FindStringSubmatch(reader.Location.RealPath) if cpvMatch == nil { return nil, nil, fmt.Errorf("failed to match package and version in %s", reader.Location.RealPath) @@ -39,7 +38,7 @@ func parsePortageContents(resolver source.FileResolver, _ *generic.Environment, Name: name, Version: version, PURL: packageURL(name, version), - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), ), Type: pkg.PortagePkg, @@ -58,7 +57,7 @@ func parsePortageContents(resolver source.FileResolver, _ *generic.Environment, return []pkg.Package{p}, nil, nil } -func addFiles(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) { +func addFiles(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) { contentsReader, err := resolver.FileContentsByLocation(dbLocation) if err != nil { log.WithFields("path", dbLocation.RealPath).Warnf("failed to fetch portage contents (package=%s): %+v", p.Name, err) @@ -91,7 +90,7 @@ func addFiles(resolver source.FileResolver, dbLocation source.Location, p *pkg.P p.Locations.Add(dbLocation) } -func addLicenses(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) { +func addLicenses(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) { parentPath := filepath.Dir(dbLocation.RealPath) location := resolver.RelativeFileByPath(dbLocation, path.Join(parentPath, "LICENSE")) @@ -121,7 +120,7 @@ func addLicenses(resolver source.FileResolver, dbLocation source.Location, p *pk p.Locations.Add(location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation)) } -func addSize(resolver source.FileResolver, dbLocation source.Location, p *pkg.Package) { +func addSize(resolver file.Resolver, dbLocation file.Location, p *pkg.Package) { parentPath := filepath.Dir(dbLocation.RealPath) location := resolver.RelativeFileByPath(dbLocation, path.Join(parentPath, "SIZE")) diff --git a/syft/pkg/cataloger/python/cataloger_test.go b/syft/pkg/cataloger/python/cataloger_test.go index 10522f21524..da15f299313 100644 --- a/syft/pkg/cataloger/python/cataloger_test.go +++ b/syft/pkg/cataloger/python/cataloger_test.go @@ -5,9 +5,9 @@ import ( "github.com/stretchr/testify/require" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func Test_PackageCataloger(t *testing.T) { @@ -46,7 +46,7 @@ func Test_PackageCataloger(t *testing.T) { Type: pkg.PythonPkg, Language: pkg.Python, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("Apache 2.0", source.NewLocation("test-fixtures/egg-info/PKG-INFO")), + pkg.NewLicenseFromLocations("Apache 2.0", file.NewLocation("test-fixtures/egg-info/PKG-INFO")), ), FoundBy: "python-package-cataloger", MetadataType: pkg.PythonPackageMetadataType, @@ -84,7 +84,7 @@ func Test_PackageCataloger(t *testing.T) { Type: pkg.PythonPkg, Language: pkg.Python, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("BSD License", source.NewLocation("test-fixtures/dist-info/METADATA")), + pkg.NewLicenseFromLocations("BSD License", file.NewLocation("test-fixtures/dist-info/METADATA")), ), FoundBy: "python-package-cataloger", MetadataType: pkg.PythonPackageMetadataType, @@ -122,7 +122,7 @@ func Test_PackageCataloger(t *testing.T) { Type: pkg.PythonPkg, Language: pkg.Python, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("BSD License", source.NewLocation("test-fixtures/malformed-record/dist-info/METADATA")), + pkg.NewLicenseFromLocations("BSD License", file.NewLocation("test-fixtures/malformed-record/dist-info/METADATA")), ), FoundBy: "python-package-cataloger", MetadataType: pkg.PythonPackageMetadataType, @@ -154,7 +154,7 @@ func Test_PackageCataloger(t *testing.T) { Type: pkg.PythonPkg, Language: pkg.Python, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("BSD License", source.NewLocation("test-fixtures/partial.dist-info/METADATA")), + pkg.NewLicenseFromLocations("BSD License", file.NewLocation("test-fixtures/partial.dist-info/METADATA")), ), FoundBy: "python-package-cataloger", MetadataType: pkg.PythonPackageMetadataType, @@ -178,7 +178,7 @@ func Test_PackageCataloger(t *testing.T) { Type: pkg.PythonPkg, Language: pkg.Python, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("Apache 2.0", source.NewLocation("test-fixtures/test.egg-info")), + pkg.NewLicenseFromLocations("Apache 2.0", file.NewLocation("test-fixtures/test.egg-info")), ), FoundBy: "python-package-cataloger", MetadataType: pkg.PythonPackageMetadataType, @@ -196,12 +196,12 @@ func Test_PackageCataloger(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - resolver := source.NewMockResolverForPaths(test.fixtures...) + resolver := file.NewMockResolverForPaths(test.fixtures...) locations, err := resolver.FilesByPath(test.fixtures...) require.NoError(t, err) - test.expectedPackage.Locations = source.NewLocationSet(locations...) + test.expectedPackage.Locations = file.NewLocationSet(locations...) pkgtest.NewCatalogTester(). WithResolver(resolver). @@ -225,7 +225,7 @@ func Test_PackageCataloger_IgnorePackage(t *testing.T) { for _, test := range tests { t.Run(test.MetadataFixture, func(t *testing.T) { - resolver := source.NewMockResolverForPaths(test.MetadataFixture) + resolver := file.NewMockResolverForPaths(test.MetadataFixture) actual, _, err := NewPythonPackageCataloger().Catalog(resolver) require.NoError(t, err) diff --git a/syft/pkg/cataloger/python/package.go b/syft/pkg/cataloger/python/package.go index 68f7f1dccfe..e20f878601f 100644 --- a/syft/pkg/cataloger/python/package.go +++ b/syft/pkg/cataloger/python/package.go @@ -4,15 +4,15 @@ import ( "fmt" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackageForIndex(name, version string, locations ...source.Location) pkg.Package { +func newPackageForIndex(name, version string, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(name, version, nil), Language: pkg.Python, Type: pkg.PythonPkg, @@ -23,11 +23,11 @@ func newPackageForIndex(name, version string, locations ...source.Location) pkg. return p } -func newPackageForIndexWithMetadata(name, version string, metadata pkg.PythonPipfileLockMetadata, locations ...source.Location) pkg.Package { +func newPackageForIndexWithMetadata(name, version string, metadata pkg.PythonPipfileLockMetadata, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(name, version, nil), Language: pkg.Python, Type: pkg.PythonPkg, @@ -40,11 +40,11 @@ func newPackageForIndexWithMetadata(name, version string, metadata pkg.PythonPip return p } -func newPackageForRequirementsWithMetadata(name, version string, metadata pkg.PythonRequirementsMetadata, locations ...source.Location) pkg.Package { +func newPackageForRequirementsWithMetadata(name, version string, metadata pkg.PythonRequirementsMetadata, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: name, Version: version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(name, version, nil), Language: pkg.Python, Type: pkg.PythonPkg, @@ -57,12 +57,12 @@ func newPackageForRequirementsWithMetadata(name, version string, metadata pkg.Py return p } -func newPackageForPackage(m parsedData, sources ...source.Location) pkg.Package { +func newPackageForPackage(m parsedData, sources ...file.Location) pkg.Package { p := pkg.Package{ Name: m.Name, Version: m.Version, PURL: packageURL(m.Name, m.Version, &m.PythonPackageMetadata), - Locations: source.NewLocationSet(sources...), + Locations: file.NewLocationSet(sources...), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(m.LicenseLocation, m.Licenses)...), Language: pkg.Python, Type: pkg.PythonPkg, diff --git a/syft/pkg/cataloger/python/parse_pipfile_lock.go b/syft/pkg/cataloger/python/parse_pipfile_lock.go index c957405a647..77c8cd4fe8a 100644 --- a/syft/pkg/cataloger/python/parse_pipfile_lock.go +++ b/syft/pkg/cataloger/python/parse_pipfile_lock.go @@ -8,9 +8,9 @@ import ( "strings" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) type pipfileLock struct { @@ -41,7 +41,7 @@ type Dependency struct { var _ generic.Parser = parsePipfileLock // parsePipfileLock is a parser function for Pipfile.lock contents, returning "Default" python packages discovered. -func parsePipfileLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePipfileLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { pkgs := make([]pkg.Package, 0) dec := json.NewDecoder(reader) diff --git a/syft/pkg/cataloger/python/parse_pipfile_lock_test.go b/syft/pkg/cataloger/python/parse_pipfile_lock_test.go index 15b327845db..783c7dfd0ec 100644 --- a/syft/pkg/cataloger/python/parse_pipfile_lock_test.go +++ b/syft/pkg/cataloger/python/parse_pipfile_lock_test.go @@ -4,15 +4,15 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParsePipFileLock(t *testing.T) { fixture := "test-fixtures/pipfile-lock/Pipfile.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { Name: "aio-pika", diff --git a/syft/pkg/cataloger/python/parse_poetry_lock.go b/syft/pkg/cataloger/python/parse_poetry_lock.go index 0e29de0178c..4bc929cbee4 100644 --- a/syft/pkg/cataloger/python/parse_poetry_lock.go +++ b/syft/pkg/cataloger/python/parse_poetry_lock.go @@ -6,9 +6,9 @@ import ( "github.com/pelletier/go-toml" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -25,7 +25,7 @@ type poetryMetadata struct { } // parsePoetryLock is a parser function for poetry.lock contents, returning all python packages discovered. -func parsePoetryLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePoetryLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { tree, err := toml.LoadReader(reader) if err != nil { return nil, nil, fmt.Errorf("unable to load poetry.lock for parsing: %w", err) diff --git a/syft/pkg/cataloger/python/parse_poetry_lock_test.go b/syft/pkg/cataloger/python/parse_poetry_lock_test.go index 0a3478e1bdf..fd6d1bdc805 100644 --- a/syft/pkg/cataloger/python/parse_poetry_lock_test.go +++ b/syft/pkg/cataloger/python/parse_poetry_lock_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParsePoetryLock(t *testing.T) { fixture := "test-fixtures/poetry/poetry.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { Name: "added-value", diff --git a/syft/pkg/cataloger/python/parse_requirements.go b/syft/pkg/cataloger/python/parse_requirements.go index c2b5a122a0a..33e1371b07b 100644 --- a/syft/pkg/cataloger/python/parse_requirements.go +++ b/syft/pkg/cataloger/python/parse_requirements.go @@ -9,9 +9,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseRequirementsTxt @@ -23,7 +23,7 @@ var ( // parseRequirementsTxt takes a Python requirements.txt file, returning all Python packages that are locked to a // specific version. -func parseRequirementsTxt(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseRequirementsTxt(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var packages []pkg.Package scanner := bufio.NewScanner(reader) diff --git a/syft/pkg/cataloger/python/parse_requirements_test.go b/syft/pkg/cataloger/python/parse_requirements_test.go index b25179c5070..b38cae3d306 100644 --- a/syft/pkg/cataloger/python/parse_requirements_test.go +++ b/syft/pkg/cataloger/python/parse_requirements_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseRequirementsTxt(t *testing.T) { fixture := "test-fixtures/requires/requirements.txt" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { Name: "flask", diff --git a/syft/pkg/cataloger/python/parse_setup.go b/syft/pkg/cataloger/python/parse_setup.go index ee91f6ada2a..e5150b2743c 100644 --- a/syft/pkg/cataloger/python/parse_setup.go +++ b/syft/pkg/cataloger/python/parse_setup.go @@ -7,9 +7,9 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // integrity check @@ -22,7 +22,7 @@ var _ generic.Parser = parseSetup // " mypy2 == v0.770", ' mypy3== v0.770', --> match(name=mypy2 version=v0.770), match(name=mypy3, version=v0.770) var pinnedDependency = regexp.MustCompile(`['"]\W?(\w+\W?==\W?[\w.]*)`) -func parseSetup(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseSetup(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var packages []pkg.Package scanner := bufio.NewScanner(reader) diff --git a/syft/pkg/cataloger/python/parse_setup_test.go b/syft/pkg/cataloger/python/parse_setup_test.go index a3fdfd85b33..66500729631 100644 --- a/syft/pkg/cataloger/python/parse_setup_test.go +++ b/syft/pkg/cataloger/python/parse_setup_test.go @@ -6,9 +6,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseSetup(t *testing.T) { @@ -65,7 +65,7 @@ func TestParseSetup(t *testing.T) { for _, tt := range tests { t.Run(tt.fixture, func(t *testing.T) { - locations := source.NewLocationSet(source.NewLocation(tt.fixture)) + locations := file.NewLocationSet(file.NewLocation(tt.fixture)) for i := range tt.expected { tt.expected[i].Locations = locations } diff --git a/syft/pkg/cataloger/python/parse_wheel_egg.go b/syft/pkg/cataloger/python/parse_wheel_egg.go index 911e7801ca5..f3fc20ead54 100644 --- a/syft/pkg/cataloger/python/parse_wheel_egg.go +++ b/syft/pkg/cataloger/python/parse_wheel_egg.go @@ -10,13 +10,13 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // parseWheelOrEgg takes the primary metadata file reference and returns the python package it represents. -func parseWheelOrEgg(resolver source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseWheelOrEgg(resolver file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { pd, sources, err := assembleEggOrWheelMetadata(resolver, reader.Location) if err != nil { return nil, nil, err @@ -37,7 +37,7 @@ func parseWheelOrEgg(resolver source.FileResolver, _ *generic.Environment, reade } // fetchRecordFiles finds a corresponding installed-files.txt file for the given python package metadata file and returns the set of file records contained. -func fetchInstalledFiles(resolver source.FileResolver, metadataLocation source.Location, sitePackagesRootPath string) (files []pkg.PythonFileRecord, sources []source.Location, err error) { +func fetchInstalledFiles(resolver file.Resolver, metadataLocation file.Location, sitePackagesRootPath string) (files []pkg.PythonFileRecord, sources []file.Location, err error) { // we've been given a file reference to a specific wheel METADATA file. note: this may be for a directory // or for an image... for an image the METADATA file may be present within multiple layers, so it is important // to reconcile the installed-files.txt path to the same layer (or the next adjacent lower layer). @@ -68,7 +68,7 @@ func fetchInstalledFiles(resolver source.FileResolver, metadataLocation source.L } // fetchRecordFiles finds a corresponding RECORD file for the given python package metadata file and returns the set of file records contained. -func fetchRecordFiles(resolver source.FileResolver, metadataLocation source.Location) (files []pkg.PythonFileRecord, sources []source.Location, err error) { +func fetchRecordFiles(resolver file.Resolver, metadataLocation file.Location) (files []pkg.PythonFileRecord, sources []file.Location, err error) { // we've been given a file reference to a specific wheel METADATA file. note: this may be for a directory // or for an image... for an image the METADATA file may be present within multiple layers, so it is important // to reconcile the RECORD path to the same layer (or the next adjacent lower layer). @@ -95,7 +95,7 @@ func fetchRecordFiles(resolver source.FileResolver, metadataLocation source.Loca } // fetchTopLevelPackages finds a corresponding top_level.txt file for the given python package metadata file and returns the set of package names contained. -func fetchTopLevelPackages(resolver source.FileResolver, metadataLocation source.Location) (pkgs []string, sources []source.Location, err error) { +func fetchTopLevelPackages(resolver file.Resolver, metadataLocation file.Location) (pkgs []string, sources []file.Location, err error) { // a top_level.txt file specifies the python top-level packages (provided by this python package) installed into site-packages parentDir := filepath.Dir(metadataLocation.RealPath) topLevelPath := filepath.Join(parentDir, "top_level.txt") @@ -125,7 +125,7 @@ func fetchTopLevelPackages(resolver source.FileResolver, metadataLocation source return pkgs, sources, nil } -func fetchDirectURLData(resolver source.FileResolver, metadataLocation source.Location) (d *pkg.PythonDirectURLOriginInfo, sources []source.Location, err error) { +func fetchDirectURLData(resolver file.Resolver, metadataLocation file.Location) (d *pkg.PythonDirectURLOriginInfo, sources []file.Location, err error) { parentDir := filepath.Dir(metadataLocation.RealPath) directURLPath := filepath.Join(parentDir, "direct_url.json") directURLLocation := resolver.RelativeFileByPath(metadataLocation, directURLPath) @@ -160,8 +160,8 @@ func fetchDirectURLData(resolver source.FileResolver, metadataLocation source.Lo } // assembleEggOrWheelMetadata discovers and accumulates python package metadata from multiple file sources and returns a single metadata object as well as a list of files where the metadata was derived from. -func assembleEggOrWheelMetadata(resolver source.FileResolver, metadataLocation source.Location) (*parsedData, []source.Location, error) { - var sources = []source.Location{ +func assembleEggOrWheelMetadata(resolver file.Resolver, metadataLocation file.Location) (*parsedData, []file.Location, error) { + var sources = []file.Location{ metadataLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), } diff --git a/syft/pkg/cataloger/python/parse_wheel_egg_metadata.go b/syft/pkg/cataloger/python/parse_wheel_egg_metadata.go index 55ac924f002..e8d2cafafbf 100644 --- a/syft/pkg/cataloger/python/parse_wheel_egg_metadata.go +++ b/syft/pkg/cataloger/python/parse_wheel_egg_metadata.go @@ -9,15 +9,15 @@ import ( "github.com/mitchellh/mapstructure" - "github.com/anchore/syft/internal/file" + intFile "github.com/anchore/syft/internal/file" "github.com/anchore/syft/internal/log" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) type parsedData struct { Licenses string `mapstructure:"License"` - LicenseLocation source.Location + LicenseLocation file.Location pkg.PythonPackageMetadata `mapstructure:",squash"` } @@ -81,7 +81,7 @@ func parseWheelOrEggMetadata(path string, reader io.Reader) (parsedData, error) pd.SitePackagesRootPath = determineSitePackagesRootPath(path) if pd.Licenses != "" { - pd.LicenseLocation = source.NewLocation(path) + pd.LicenseLocation = file.NewLocation(path) } return pd, nil @@ -91,7 +91,7 @@ func parseWheelOrEggMetadata(path string, reader io.Reader) (parsedData, error) // of egg metadata (as opposed to a directory that contains more metadata // files). func isEggRegularFile(path string) bool { - return file.GlobMatch(eggInfoGlob, path) + return intFile.GlobMatch(eggInfoGlob, path) } // determineSitePackagesRootPath returns the path of the site packages root, diff --git a/syft/pkg/cataloger/python/parse_wheel_egg_metadata_test.go b/syft/pkg/cataloger/python/parse_wheel_egg_metadata_test.go index cb776b66937..e9db5446667 100644 --- a/syft/pkg/cataloger/python/parse_wheel_egg_metadata_test.go +++ b/syft/pkg/cataloger/python/parse_wheel_egg_metadata_test.go @@ -6,8 +6,8 @@ import ( "github.com/go-test/deep" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func TestParseWheelEggMetadata(t *testing.T) { @@ -19,7 +19,7 @@ func TestParseWheelEggMetadata(t *testing.T) { Fixture: "test-fixtures/egg-info/PKG-INFO", ExpectedMetadata: parsedData{ "Apache 2.0", - source.NewLocation("test-fixtures/egg-info/PKG-INFO"), + file.NewLocation("test-fixtures/egg-info/PKG-INFO"), pkg.PythonPackageMetadata{ Name: "requests", Version: "2.22.0", @@ -34,7 +34,7 @@ func TestParseWheelEggMetadata(t *testing.T) { Fixture: "test-fixtures/dist-info/METADATA", ExpectedMetadata: parsedData{ "BSD License", - source.NewLocation("test-fixtures/dist-info/METADATA"), + file.NewLocation("test-fixtures/dist-info/METADATA"), pkg.PythonPackageMetadata{ Name: "Pygments", Version: "2.6.1", @@ -135,7 +135,7 @@ func TestParseWheelEggMetadataInvalid(t *testing.T) { Fixture: "test-fixtures/egg-info/PKG-INFO-INVALID", ExpectedMetadata: parsedData{ "", - source.Location{}, + file.Location{}, pkg.PythonPackageMetadata{ Name: "mxnet", Version: "1.8.0", diff --git a/syft/pkg/cataloger/r/cataloger_test.go b/syft/pkg/cataloger/r/cataloger_test.go index 1581e8dc657..0e2a193d6e0 100644 --- a/syft/pkg/cataloger/r/cataloger_test.go +++ b/syft/pkg/cataloger/r/cataloger_test.go @@ -4,9 +4,9 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestRPackageCataloger(t *testing.T) { @@ -15,7 +15,7 @@ func TestRPackageCataloger(t *testing.T) { Name: "base", Version: "4.3.0", FoundBy: "r-package-cataloger", - Locations: source.NewLocationSet(source.NewLocation("base/DESCRIPTION")), + Locations: file.NewLocationSet(file.NewLocation("base/DESCRIPTION")), Licenses: pkg.NewLicenseSet([]pkg.License{pkg.NewLicense("Part of R 4.3.0")}...), Language: pkg.R, Type: pkg.Rpkg, @@ -34,7 +34,7 @@ func TestRPackageCataloger(t *testing.T) { Name: "stringr", Version: "1.5.0.9000", FoundBy: "r-package-cataloger", - Locations: source.NewLocationSet(source.NewLocation("stringr/DESCRIPTION")), + Locations: file.NewLocationSet(file.NewLocation("stringr/DESCRIPTION")), Licenses: pkg.NewLicenseSet([]pkg.License{pkg.NewLicense("MIT")}...), Language: pkg.R, Type: pkg.Rpkg, diff --git a/syft/pkg/cataloger/r/package.go b/syft/pkg/cataloger/r/package.go index b916cc9da69..9fc45d3e26a 100644 --- a/syft/pkg/cataloger/r/package.go +++ b/syft/pkg/cataloger/r/package.go @@ -4,12 +4,12 @@ import ( "strings" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(pd parseData, locations ...source.Location) pkg.Package { - locationSet := source.NewLocationSet() +func newPackage(pd parseData, locations ...file.Location) pkg.Package { + locationSet := file.NewLocationSet() for _, loc := range locations { locationSet.Add(loc.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)) } @@ -45,7 +45,7 @@ func packageURL(m parseData) string { // Multiple licences can be specified separated by ‘|’ // (surrounded by spaces) in which case the user can choose any of the above cases. // https://cran.rstudio.com/doc/manuals/r-devel/R-exts.html#Licensing -func parseLicenseData(license string, locations ...source.Location) []pkg.License { +func parseLicenseData(license string, locations ...file.Location) []pkg.License { licenses := make([]pkg.License, 0) // check if multiple licenses are separated by | diff --git a/syft/pkg/cataloger/r/parse_description.go b/syft/pkg/cataloger/r/parse_description.go index b062b039559..182cd4bde2f 100644 --- a/syft/pkg/cataloger/r/parse_description.go +++ b/syft/pkg/cataloger/r/parse_description.go @@ -7,9 +7,9 @@ import ( "strings" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) /* some examples of license strings found in DESCRIPTION files: @@ -28,10 +28,10 @@ License: Part of R 4.3.0 License: Unlimited */ -func parseDescriptionFile(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseDescriptionFile(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { values := extractFieldsFromDescriptionFile(reader) m := parseDataFromDescriptionMap(values) - p := newPackage(m, []source.Location{reader.Location}...) + p := newPackage(m, []file.Location{reader.Location}...) if p.Name == "" || p.Version == "" { return nil, nil, nil } diff --git a/syft/pkg/cataloger/r/parse_description_test.go b/syft/pkg/cataloger/r/parse_description_test.go index 4263995240d..483c54adbac 100644 --- a/syft/pkg/cataloger/r/parse_description_test.go +++ b/syft/pkg/cataloger/r/parse_description_test.go @@ -8,8 +8,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) func Test_parseDescriptionFile(t *testing.T) { @@ -53,8 +53,8 @@ func Test_parseDescriptionFile(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { f, err := os.Open(tt.fixture) - input := source.LocationReadCloser{ - Location: source.NewLocation(tt.fixture), + input := file.LocationReadCloser{ + Location: file.NewLocation(tt.fixture), ReadCloser: f, } got, _, err := parseDescriptionFile(nil, nil, input) diff --git a/syft/pkg/cataloger/rpm/package.go b/syft/pkg/cataloger/rpm/package.go index 53c0925b906..136af9f5755 100644 --- a/syft/pkg/cataloger/rpm/package.go +++ b/syft/pkg/cataloger/rpm/package.go @@ -8,18 +8,18 @@ import ( rpmdb "github.com/knqyf263/go-rpmdb/pkg" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(dbOrRpmLocation source.Location, pd parsedData, distro *linux.Release) pkg.Package { +func newPackage(dbOrRpmLocation file.Location, pd parsedData, distro *linux.Release) pkg.Package { p := pkg.Package{ Name: pd.Name, Version: toELVersion(pd.RpmMetadata), Licenses: pkg.NewLicenseSet(pd.Licenses...), PURL: packageURL(pd.RpmMetadata, distro), - Locations: source.NewLocationSet(dbOrRpmLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(dbOrRpmLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, Metadata: pd.RpmMetadata, @@ -34,7 +34,7 @@ type parsedData struct { pkg.RpmMetadata } -func newParsedDataFromEntry(licenseLocation source.Location, entry rpmdb.PackageInfo, files []pkg.RpmdbFileRecord) parsedData { +func newParsedDataFromEntry(licenseLocation file.Location, entry rpmdb.PackageInfo, files []pkg.RpmdbFileRecord) parsedData { return parsedData{ Licenses: pkg.NewLicensesFromLocation(licenseLocation, entry.License), RpmMetadata: pkg.RpmMetadata{ diff --git a/syft/pkg/cataloger/rpm/parse_rpm.go b/syft/pkg/cataloger/rpm/parse_rpm.go index 6e866c5cabb..06c5f61451b 100644 --- a/syft/pkg/cataloger/rpm/parse_rpm.go +++ b/syft/pkg/cataloger/rpm/parse_rpm.go @@ -11,11 +11,10 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // parseRpm parses a single RPM -func parseRpm(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseRpm(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { rpm, err := rpmutils.ReadRpm(reader) if err != nil { return nil, nil, fmt.Errorf("RPM file found but unable to read: %s (%w)", reader.Location.RealPath, err) diff --git a/syft/pkg/cataloger/rpm/parse_rpm_db.go b/syft/pkg/cataloger/rpm/parse_rpm_db.go index ee4d64b4f02..02106f62c35 100644 --- a/syft/pkg/cataloger/rpm/parse_rpm_db.go +++ b/syft/pkg/cataloger/rpm/parse_rpm_db.go @@ -14,11 +14,10 @@ import ( "github.com/anchore/syft/syft/linux" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // parseRpmDb parses an "Packages" RPM DB and returns the Packages listed within it. -func parseRpmDB(resolver source.FileResolver, env *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseRpmDB(resolver file.Resolver, env *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { f, err := os.CreateTemp("", internal.ApplicationName+"-rpmdb") if err != nil { return nil, nil, fmt.Errorf("failed to create temp rpmdb file: %w", err) @@ -90,7 +89,7 @@ func toELVersion(metadata pkg.RpmMetadata) string { return fmt.Sprintf("%s-%s", metadata.Version, metadata.Release) } -func extractRpmdbFileRecords(resolver source.FilePathResolver, entry rpmdb.PackageInfo) []pkg.RpmdbFileRecord { +func extractRpmdbFileRecords(resolver file.PathResolver, entry rpmdb.PackageInfo) []pkg.RpmdbFileRecord { var records = make([]pkg.RpmdbFileRecord, 0) files, err := entry.InstalledFiles() diff --git a/syft/pkg/cataloger/rpm/parse_rpm_db_test.go b/syft/pkg/cataloger/rpm/parse_rpm_db_test.go index b58a01744a5..dea087880e5 100644 --- a/syft/pkg/cataloger/rpm/parse_rpm_db_test.go +++ b/syft/pkg/cataloger/rpm/parse_rpm_db_test.go @@ -10,36 +10,35 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) -var _ source.FileResolver = (*rpmdbTestFileResolverMock)(nil) +var _ file.Resolver = (*rpmdbTestFileResolverMock)(nil) type rpmdbTestFileResolverMock struct { ignorePaths bool } -func (r rpmdbTestFileResolverMock) FilesByExtension(extensions ...string) ([]source.Location, error) { +func (r rpmdbTestFileResolverMock) FilesByExtension(extensions ...string) ([]file.Location, error) { panic("not implemented") } -func (r rpmdbTestFileResolverMock) FilesByBasename(filenames ...string) ([]source.Location, error) { +func (r rpmdbTestFileResolverMock) FilesByBasename(filenames ...string) ([]file.Location, error) { panic("not implemented") } -func (r rpmdbTestFileResolverMock) FilesByBasenameGlob(globs ...string) ([]source.Location, error) { +func (r rpmdbTestFileResolverMock) FilesByBasenameGlob(globs ...string) ([]file.Location, error) { panic("not implemented") } -func (r rpmdbTestFileResolverMock) FileContentsByLocation(location source.Location) (io.ReadCloser, error) { +func (r rpmdbTestFileResolverMock) FileContentsByLocation(location file.Location) (io.ReadCloser, error) { panic("not implemented") } -func (r rpmdbTestFileResolverMock) AllLocations() <-chan source.Location { +func (r rpmdbTestFileResolverMock) AllLocations() <-chan file.Location { panic("not implemented") } -func (r rpmdbTestFileResolverMock) FileMetadataByLocation(location source.Location) (source.FileMetadata, error) { +func (r rpmdbTestFileResolverMock) FileMetadataByLocation(location file.Location) (file.Metadata, error) { panic("not implemented") } @@ -53,34 +52,34 @@ func (r rpmdbTestFileResolverMock) HasPath(path string) bool { return !r.ignorePaths } -func (r *rpmdbTestFileResolverMock) FilesByPath(paths ...string) ([]source.Location, error) { +func (r *rpmdbTestFileResolverMock) FilesByPath(paths ...string) ([]file.Location, error) { if r.ignorePaths { // act as if no paths exist return nil, nil } // act as if all files exist - var locations = make([]source.Location, len(paths)) + var locations = make([]file.Location, len(paths)) for i, p := range paths { - locations[i] = source.NewLocation(p) + locations[i] = file.NewLocation(p) } return locations, nil } -func (r *rpmdbTestFileResolverMock) FilesByGlob(...string) ([]source.Location, error) { +func (r *rpmdbTestFileResolverMock) FilesByGlob(...string) ([]file.Location, error) { return nil, fmt.Errorf("not implemented") } -func (r *rpmdbTestFileResolverMock) RelativeFileByPath(source.Location, string) *source.Location { +func (r *rpmdbTestFileResolverMock) RelativeFileByPath(file.Location, string) *file.Location { panic(fmt.Errorf("not implemented")) return nil } -func (r *rpmdbTestFileResolverMock) FilesByMIMEType(...string) ([]source.Location, error) { +func (r *rpmdbTestFileResolverMock) FilesByMIMEType(...string) ([]file.Location, error) { return nil, fmt.Errorf("not implemented") } func TestParseRpmDB(t *testing.T) { - packagesLocation := source.NewLocation("test-fixtures/Packages") + packagesLocation := file.NewLocation("test-fixtures/Packages") tests := []struct { fixture string expected []pkg.Package @@ -95,7 +94,7 @@ func TestParseRpmDB(t *testing.T) { Name: "dive", Version: "0.9.2-1", PURL: "pkg:rpm/dive@0.9.2-1?arch=x86_64&upstream=dive-0.9.2-1.src.rpm", - Locations: source.NewLocationSet(packagesLocation), + Locations: file.NewLocationSet(file.NewLocation("test-fixtures/Packages")), Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, Licenses: pkg.NewLicenseSet( @@ -124,7 +123,7 @@ func TestParseRpmDB(t *testing.T) { Name: "dive", Version: "0.9.2-1", PURL: "pkg:rpm/dive@0.9.2-1?arch=x86_64&upstream=dive-0.9.2-1.src.rpm", - Locations: source.NewLocationSet(packagesLocation), + Locations: file.NewLocationSet(packagesLocation), Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, Licenses: pkg.NewLicenseSet( diff --git a/syft/pkg/cataloger/rpm/parse_rpm_manifest.go b/syft/pkg/cataloger/rpm/parse_rpm_manifest.go index ee8de71c6ba..c8110d6dab4 100644 --- a/syft/pkg/cataloger/rpm/parse_rpm_manifest.go +++ b/syft/pkg/cataloger/rpm/parse_rpm_manifest.go @@ -8,13 +8,13 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) // Parses an RPM manifest file, as used in Mariner distroless containers, and returns the Packages listed -func parseRpmManifest(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseRpmManifest(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { r := bufio.NewReader(reader) allPkgs := make([]pkg.Package, 0) @@ -52,7 +52,7 @@ func parseRpmManifest(_ source.FileResolver, _ *generic.Environment, reader sour // Each line is the output of : // rpm --query --all --query-format "%{NAME}\t%{VERSION}-%{RELEASE}\t%{INSTALLTIME}\t%{BUILDTIME}\t%{VENDOR}\t%{EPOCH}\t%{SIZE}\t%{ARCH}\t%{EPOCHNUM}\t%{SOURCERPM}\n" // https://github.com/microsoft/CBL-Mariner/blob/3df18fac373aba13a54bd02466e64969574f13af/toolkit/docs/how_it_works/5_misc.md?plain=1#L150 -func parseRpmManifestEntry(entry string, location source.Location) (*pkg.Package, error) { +func parseRpmManifestEntry(entry string, location file.Location) (*pkg.Package, error) { metadata, err := newMetadataFromManifestLine(entry) if err != nil { return nil, err diff --git a/syft/pkg/cataloger/rpm/parse_rpm_manifest_test.go b/syft/pkg/cataloger/rpm/parse_rpm_manifest_test.go index 64cca390aab..5f2c3e3b7b3 100644 --- a/syft/pkg/cataloger/rpm/parse_rpm_manifest_test.go +++ b/syft/pkg/cataloger/rpm/parse_rpm_manifest_test.go @@ -3,20 +3,20 @@ package rpm import ( "testing" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseRpmManifest(t *testing.T) { fixture := "test-fixtures/container-manifest-2" - location := source.NewLocation(fixture) + location := file.NewLocation(fixture) expected := []pkg.Package{ { Name: "mariner-release", Version: "2.0-12.cm2", PURL: "pkg:rpm/mariner-release@2.0-12.cm2?arch=noarch&upstream=mariner-release-2.0-12.cm2.src.rpm", - Locations: source.NewLocationSet(location), + Locations: file.NewLocationSet(location), Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, Metadata: pkg.RpmMetadata{ @@ -34,7 +34,7 @@ func TestParseRpmManifest(t *testing.T) { Name: "filesystem", Version: "1.1-9.cm2", PURL: "pkg:rpm/filesystem@1.1-9.cm2?arch=x86_64&upstream=filesystem-1.1-9.cm2.src.rpm", - Locations: source.NewLocationSet(location), + Locations: file.NewLocationSet(location), Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, Metadata: pkg.RpmMetadata{ @@ -52,7 +52,7 @@ func TestParseRpmManifest(t *testing.T) { Name: "glibc", Version: "2.35-2.cm2", PURL: "pkg:rpm/glibc@2.35-2.cm2?arch=x86_64&upstream=glibc-2.35-2.cm2.src.rpm", - Locations: source.NewLocationSet(location), + Locations: file.NewLocationSet(location), Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, Metadata: pkg.RpmMetadata{ @@ -70,7 +70,7 @@ func TestParseRpmManifest(t *testing.T) { Name: "openssl-libs", Version: "1.1.1k-15.cm2", PURL: "pkg:rpm/openssl-libs@1.1.1k-15.cm2?arch=x86_64&upstream=openssl-1.1.1k-15.cm2.src.rpm", - Locations: source.NewLocationSet(location), + Locations: file.NewLocationSet(location), Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, Metadata: pkg.RpmMetadata{ diff --git a/syft/pkg/cataloger/rpm/parse_rpm_test.go b/syft/pkg/cataloger/rpm/parse_rpm_test.go index 253d99f59dd..83e39528b6f 100644 --- a/syft/pkg/cataloger/rpm/parse_rpm_test.go +++ b/syft/pkg/cataloger/rpm/parse_rpm_test.go @@ -6,12 +6,11 @@ import ( "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseRpmFiles(t *testing.T) { - abcRpmLocation := source.NewLocation("abc-1.01-9.hg20160905.el7.x86_64.rpm") - zorkRpmLocation := source.NewLocation("zork-1.0.3-1.el7.x86_64.rpm") + abcRpmLocation := file.NewLocation("abc-1.01-9.hg20160905.el7.x86_64.rpm") + zorkRpmLocation := file.NewLocation("zork-1.0.3-1.el7.x86_64.rpm") tests := []struct { fixture string expected []pkg.Package @@ -23,7 +22,7 @@ func TestParseRpmFiles(t *testing.T) { Name: "abc", Version: "0:1.01-9.hg20160905.el7", PURL: "pkg:rpm/abc@1.01-9.hg20160905.el7?arch=x86_64&epoch=0&upstream=abc-1.01-9.hg20160905.el7.src.rpm", - Locations: source.NewLocationSet(abcRpmLocation), + Locations: file.NewLocationSet(file.NewLocation("abc-1.01-9.hg20160905.el7.x86_64.rpm")), FoundBy: "rpm-file-cataloger", Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, @@ -52,7 +51,7 @@ func TestParseRpmFiles(t *testing.T) { Name: "zork", Version: "0:1.0.3-1.el7", PURL: "pkg:rpm/zork@1.0.3-1.el7?arch=x86_64&epoch=0&upstream=zork-1.0.3-1.el7.src.rpm", - Locations: source.NewLocationSet(zorkRpmLocation), + Locations: file.NewLocationSet(zorkRpmLocation), FoundBy: "rpm-file-cataloger", Type: pkg.RpmPkg, MetadataType: pkg.RpmMetadataType, diff --git a/syft/pkg/cataloger/ruby/package.go b/syft/pkg/cataloger/ruby/package.go index 973d67350fb..86075274204 100644 --- a/syft/pkg/cataloger/ruby/package.go +++ b/syft/pkg/cataloger/ruby/package.go @@ -2,16 +2,16 @@ package ruby import ( "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newGemfileLockPackage(name, version string, locations ...source.Location) pkg.Package { +func newGemfileLockPackage(name, version string, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: name, Version: version, PURL: packageURL(name, version), - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), Language: pkg.Ruby, Type: pkg.GemPkg, } @@ -21,11 +21,11 @@ func newGemfileLockPackage(name, version string, locations ...source.Location) p return p } -func newGemspecPackage(m gemData, gemSpecLocation source.Location) pkg.Package { +func newGemspecPackage(m gemData, gemSpecLocation file.Location) pkg.Package { p := pkg.Package{ Name: m.Name, Version: m.Version, - Locations: source.NewLocationSet(gemSpecLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), + Locations: file.NewLocationSet(gemSpecLocation.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation)), Licenses: pkg.NewLicenseSet(pkg.NewLicensesFromLocation(gemSpecLocation, m.Licenses...)...), PURL: packageURL(m.Name, m.Version), Language: pkg.Ruby, diff --git a/syft/pkg/cataloger/ruby/parse_gemfile_lock.go b/syft/pkg/cataloger/ruby/parse_gemfile_lock.go index 884f1ea144b..f2bedb4b2a1 100644 --- a/syft/pkg/cataloger/ruby/parse_gemfile_lock.go +++ b/syft/pkg/cataloger/ruby/parse_gemfile_lock.go @@ -6,9 +6,9 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseGemFileLockEntries @@ -16,7 +16,7 @@ var _ generic.Parser = parseGemFileLockEntries var sectionsOfInterest = internal.NewStringSet("GEM", "GIT", "PATH", "PLUGIN SOURCE") // parseGemFileLockEntries is a parser function for Gemfile.lock contents, returning all Gems discovered. -func parseGemFileLockEntries(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseGemFileLockEntries(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package scanner := bufio.NewScanner(reader) diff --git a/syft/pkg/cataloger/ruby/parse_gemfile_lock_test.go b/syft/pkg/cataloger/ruby/parse_gemfile_lock_test.go index ef2a0378429..ad94283aae3 100644 --- a/syft/pkg/cataloger/ruby/parse_gemfile_lock_test.go +++ b/syft/pkg/cataloger/ruby/parse_gemfile_lock_test.go @@ -3,14 +3,14 @@ package ruby import ( "testing" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseGemfileLockEntries(t *testing.T) { fixture := "test-fixtures/Gemfile.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) var expectedPkgs = []pkg.Package{ {Name: "actionmailer", Version: "4.1.1", PURL: "pkg:gem/actionmailer@4.1.1", Locations: locations, Language: pkg.Ruby, Type: pkg.GemPkg}, {Name: "actionpack", Version: "4.1.1", PURL: "pkg:gem/actionpack@4.1.1", Locations: locations, Language: pkg.Ruby, Type: pkg.GemPkg}, diff --git a/syft/pkg/cataloger/ruby/parse_gemspec.go b/syft/pkg/cataloger/ruby/parse_gemspec.go index 347caabbd01..97c2876bd81 100644 --- a/syft/pkg/cataloger/ruby/parse_gemspec.go +++ b/syft/pkg/cataloger/ruby/parse_gemspec.go @@ -11,9 +11,9 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseGemFileLockEntries @@ -64,7 +64,7 @@ func processList(s string) []string { return results } -func parseGemSpecEntries(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseGemSpecEntries(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package var fields = make(map[string]interface{}) scanner := bufio.NewScanner(reader) diff --git a/syft/pkg/cataloger/ruby/parse_gemspec_test.go b/syft/pkg/cataloger/ruby/parse_gemspec_test.go index 53cb59ecfa1..c320185cccc 100644 --- a/syft/pkg/cataloger/ruby/parse_gemspec_test.go +++ b/syft/pkg/cataloger/ruby/parse_gemspec_test.go @@ -3,15 +3,15 @@ package ruby import ( "testing" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseGemspec(t *testing.T) { fixture := "test-fixtures/bundler.gemspec" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) var expectedPkg = pkg.Package{ Name: "bundler", @@ -20,7 +20,7 @@ func TestParseGemspec(t *testing.T) { Locations: locations, Type: pkg.GemPkg, Licenses: pkg.NewLicenseSet( - pkg.NewLicenseFromLocations("MIT", source.NewLocation(fixture)), + pkg.NewLicenseFromLocations("MIT", file.NewLocation(fixture)), ), Language: pkg.Ruby, MetadataType: pkg.GemMetadataType, diff --git a/syft/pkg/cataloger/rust/cataloger_test.go b/syft/pkg/cataloger/rust/cataloger_test.go index 73b442c817b..303b88fd41d 100644 --- a/syft/pkg/cataloger/rust/cataloger_test.go +++ b/syft/pkg/cataloger/rust/cataloger_test.go @@ -3,9 +3,9 @@ package rust import ( "testing" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestNewAuditBinaryCataloger(t *testing.T) { @@ -16,7 +16,7 @@ func TestNewAuditBinaryCataloger(t *testing.T) { Version: "0.1.0", PURL: "pkg:cargo/auditable@0.1.0", FoundBy: "cargo-auditable-binary-cataloger", - Locations: source.NewLocationSet(source.NewVirtualLocation("/hello-auditable", "/hello-auditable")), + Locations: file.NewLocationSet(file.NewVirtualLocation("/hello-auditable", "/hello-auditable")), Language: pkg.Rust, Type: pkg.RustPkg, MetadataType: pkg.RustCargoPackageMetadataType, @@ -31,7 +31,7 @@ func TestNewAuditBinaryCataloger(t *testing.T) { Version: "0.1.0", PURL: "pkg:cargo/hello-auditable@0.1.0", FoundBy: "cargo-auditable-binary-cataloger", - Locations: source.NewLocationSet(source.NewVirtualLocation("/hello-auditable", "/hello-auditable")), + Locations: file.NewLocationSet(file.NewVirtualLocation("/hello-auditable", "/hello-auditable")), Language: pkg.Rust, Type: pkg.RustPkg, MetadataType: pkg.RustCargoPackageMetadataType, diff --git a/syft/pkg/cataloger/rust/package.go b/syft/pkg/cataloger/rust/package.go index 1d661bce3b5..8787c5153bf 100644 --- a/syft/pkg/cataloger/rust/package.go +++ b/syft/pkg/cataloger/rust/package.go @@ -4,16 +4,16 @@ import ( "github.com/microsoft/go-rustaudit" "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) // Pkg returns the standard `pkg.Package` representation of the package referenced within the Cargo.lock metadata. -func newPackageFromCargoMetadata(m pkg.CargoPackageMetadata, locations ...source.Location) pkg.Package { +func newPackageFromCargoMetadata(m pkg.CargoPackageMetadata, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: m.Name, Version: m.Version, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), PURL: packageURL(m.Name, m.Version), Language: pkg.Rust, Type: pkg.RustPkg, @@ -26,7 +26,7 @@ func newPackageFromCargoMetadata(m pkg.CargoPackageMetadata, locations ...source return p } -func newPackagesFromAudit(location source.Location, versionInfo rustaudit.VersionInfo) []pkg.Package { +func newPackagesFromAudit(location file.Location, versionInfo rustaudit.VersionInfo) []pkg.Package { var pkgs []pkg.Package for _, dep := range versionInfo.Packages { @@ -40,14 +40,14 @@ func newPackagesFromAudit(location source.Location, versionInfo rustaudit.Versio return pkgs } -func newPackageFromAudit(dep *rustaudit.Package, locations ...source.Location) pkg.Package { +func newPackageFromAudit(dep *rustaudit.Package, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: dep.Name, Version: dep.Version, PURL: packageURL(dep.Name, dep.Version), Language: pkg.Rust, Type: pkg.RustPkg, - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), MetadataType: pkg.RustCargoPackageMetadataType, Metadata: pkg.CargoPackageMetadata{ Name: dep.Name, diff --git a/syft/pkg/cataloger/rust/parse_audit_binary.go b/syft/pkg/cataloger/rust/parse_audit_binary.go index 7c7e3ad54ec..de894006b56 100644 --- a/syft/pkg/cataloger/rust/parse_audit_binary.go +++ b/syft/pkg/cataloger/rust/parse_audit_binary.go @@ -7,14 +7,14 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" "github.com/anchore/syft/syft/pkg/cataloger/internal/unionreader" - "github.com/anchore/syft/syft/source" ) // Catalog identifies executables then attempts to read Rust dependency information from them -func parseAuditBinary(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseAuditBinary(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { var pkgs []pkg.Package unionReader, err := unionreader.GetUnionReader(reader.ReadCloser) diff --git a/syft/pkg/cataloger/rust/parse_cargo_lock.go b/syft/pkg/cataloger/rust/parse_cargo_lock.go index 0e9d582a7ed..cd001728685 100644 --- a/syft/pkg/cataloger/rust/parse_cargo_lock.go +++ b/syft/pkg/cataloger/rust/parse_cargo_lock.go @@ -6,9 +6,9 @@ import ( "github.com/pelletier/go-toml" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parseCargoLock @@ -18,7 +18,7 @@ type cargoLockFile struct { } // parseCargoLock is a parser function for Cargo.lock contents, returning all rust cargo crates discovered. -func parseCargoLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseCargoLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { tree, err := toml.LoadReader(reader) if err != nil { return nil, nil, fmt.Errorf("unable to load Cargo.lock for parsing: %w", err) diff --git a/syft/pkg/cataloger/rust/parse_cargo_lock_test.go b/syft/pkg/cataloger/rust/parse_cargo_lock_test.go index d05f62d6245..fb4ed7427ab 100644 --- a/syft/pkg/cataloger/rust/parse_cargo_lock_test.go +++ b/syft/pkg/cataloger/rust/parse_cargo_lock_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParseCargoLock(t *testing.T) { fixture := "test-fixtures/Cargo.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { Name: "ansi_term", diff --git a/syft/pkg/cataloger/sbom/cataloger.go b/syft/pkg/cataloger/sbom/cataloger.go index c66c9940ab9..a08c2c2a942 100644 --- a/syft/pkg/cataloger/sbom/cataloger.go +++ b/syft/pkg/cataloger/sbom/cataloger.go @@ -3,10 +3,10 @@ package sbom import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/formats" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) const catalogerName = "sbom-cataloger" @@ -29,7 +29,7 @@ func NewSBOMCataloger() *generic.Cataloger { ) } -func parseSBOM(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parseSBOM(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { s, _, err := formats.Decode(reader) if err != nil { return nil, nil, err @@ -47,7 +47,7 @@ func parseSBOM(_ source.FileResolver, _ *generic.Environment, reader source.Loca // Why not keep the original list of locations? Since the "locations" field is meant to capture // where there is evidence of this file, and the catalogers have not run against any file other than, // the SBOM, this is the only location that is relevant for this cataloger. - p.Locations = source.NewLocationSet( + p.Locations = file.NewLocationSet( reader.Location.WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation), ) p.FoundBy = catalogerName diff --git a/syft/pkg/cataloger/sbom/cataloger_test.go b/syft/pkg/cataloger/sbom/cataloger_test.go index a2226f80e7c..46332a2a745 100644 --- a/syft/pkg/cataloger/sbom/cataloger_test.go +++ b/syft/pkg/cataloger/sbom/cataloger_test.go @@ -7,11 +7,11 @@ import ( "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/cpe" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/formats/syftjson" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" "github.com/anchore/syft/syft/sbom" - "github.com/anchore/syft/syft/source" ) func mustCPEs(s ...string) (c []cpe.CPE) { @@ -37,7 +37,7 @@ func Test_parseSBOM(t *testing.T) { Name: "alpine-baselayout", Version: "3.2.0-r23", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("GPL-2.0-only")), FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/alpine-baselayout@3.2.0-r23?arch=x86_64&upstream=alpine-baselayout&distro=alpine-3.16.3", @@ -54,7 +54,7 @@ func Test_parseSBOM(t *testing.T) { Name: "alpine-baselayout-data", Version: "3.2.0-r23", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("GPL-2.0-only")), FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/alpine-baselayout-data@3.2.0-r23?arch=x86_64&upstream=alpine-baselayout&distro=alpine-3.16.3", @@ -75,7 +75,7 @@ func Test_parseSBOM(t *testing.T) { Name: "alpine-keys", Version: "2.4-r1", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("MIT")), FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/alpine-keys@2.4-r1?arch=x86_64&upstream=alpine-keys&distro=alpine-3.16.3", @@ -92,7 +92,7 @@ func Test_parseSBOM(t *testing.T) { Name: "apk-tools", Version: "2.12.9-r3", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("GPL-2.0-only")), FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/apk-tools@2.12.9-r3?arch=x86_64&upstream=apk-tools&distro=alpine-3.16.3", @@ -109,7 +109,7 @@ func Test_parseSBOM(t *testing.T) { Name: "busybox", Version: "1.35.0-r17", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("GPL-2.0-only")), FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/busybox@1.35.0-r17?arch=x86_64&upstream=busybox&distro=alpine-3.16.3", @@ -121,7 +121,7 @@ func Test_parseSBOM(t *testing.T) { Name: "ca-certificates-bundle", Version: "20220614-r0", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet( pkg.NewLicense("MPL-2.0"), pkg.NewLicense("MIT"), @@ -145,7 +145,7 @@ func Test_parseSBOM(t *testing.T) { Name: "libc-utils", Version: "0.7.2-r3", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet( pkg.NewLicense("BSD-2-Clause"), pkg.NewLicense("BSD-3-Clause"), @@ -165,7 +165,7 @@ func Test_parseSBOM(t *testing.T) { Name: "libcrypto1.1", Version: "1.1.1s-r0", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("OpenSSL")), // SPDX expression is not set FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/libcrypto1.1@1.1.1s-r0?arch=x86_64&upstream=openssl&distro=alpine-3.16.3", @@ -177,7 +177,7 @@ func Test_parseSBOM(t *testing.T) { Name: "libssl1.1", Version: "1.1.1s-r0", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("OpenSSL")), // SPDX expression is not set FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/libssl1.1@1.1.1s-r0?arch=x86_64&upstream=openssl&distro=alpine-3.16.3", @@ -189,7 +189,7 @@ func Test_parseSBOM(t *testing.T) { Name: "musl", Version: "1.2.3-r1", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet(pkg.NewLicense("MIT")), // SPDX expression is not set FoundBy: "sbom-cataloger", PURL: "pkg:apk/alpine/musl@1.2.3-r1?arch=x86_64&upstream=musl&distro=alpine-3.16.3", @@ -201,7 +201,7 @@ func Test_parseSBOM(t *testing.T) { Name: "musl-utils", Version: "1.2.3-r1", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet( pkg.NewLicense("MIT"), pkg.NewLicense("BSD"), @@ -222,7 +222,7 @@ func Test_parseSBOM(t *testing.T) { Name: "scanelf", Version: "1.3.4-r0", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet( pkg.NewLicense("GPL-2.0-only"), ), @@ -236,7 +236,7 @@ func Test_parseSBOM(t *testing.T) { Name: "ssl_client", Version: "1.35.0-r17", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet( pkg.NewLicense("GPL-2.0-only"), ), @@ -255,7 +255,7 @@ func Test_parseSBOM(t *testing.T) { Name: "zlib", Version: "1.2.12-r3", Type: "apk", - Locations: source.NewLocationSet(source.NewLocation("sbom.syft.json")), + Locations: file.NewLocationSet(file.NewLocation("sbom.syft.json")), Licenses: pkg.NewLicenseSet( pkg.NewLicense("Zlib"), ), @@ -267,9 +267,9 @@ func Test_parseSBOM(t *testing.T) { }, } - apkgdbLocation := source.NewLocationSet(source.Location{ - LocationData: source.LocationData{ - Coordinates: source.Coordinates{ + apkgdbLocation := file.NewLocationSet(file.Location{ + LocationData: file.LocationData{ + Coordinates: file.Coordinates{ RealPath: "/lib/apk/db/installed", FileSystemID: "sha256:e5e13b0c77cbb769548077189c3da2f0a764ceca06af49d8d558e759f5c232bd", }, @@ -359,7 +359,7 @@ func Test_parseSBOM(t *testing.T) { }, { From: libSSL, - To: source.Coordinates{ + To: file.Coordinates{ RealPath: "/lib/libssl.so.1.1", FileSystemID: "sha256:e5e13b0c77cbb769548077189c3da2f0a764ceca06af49d8d558e759f5c232bd", }, @@ -372,7 +372,7 @@ func Test_parseSBOM(t *testing.T) { }, { From: baseLayout, - To: source.Coordinates{ + To: file.Coordinates{ RealPath: "/etc/profile.d/color_prompt.sh.disabled", FileSystemID: "sha256:e5e13b0c77cbb769548077189c3da2f0a764ceca06af49d8d558e759f5c232bd", }, @@ -380,7 +380,7 @@ func Test_parseSBOM(t *testing.T) { }, { From: baseLayout, - To: source.Coordinates{ + To: file.Coordinates{ RealPath: "/etc/modprobe.d/kms.conf", FileSystemID: "sha256:e5e13b0c77cbb769548077189c3da2f0a764ceca06af49d8d558e759f5c232bd", }, @@ -396,7 +396,7 @@ func Test_parseSBOM(t *testing.T) { for _, p := range expectedPkgs { expectedRelationships = append(expectedRelationships, artifact.Relationship{ From: p, - To: source.Coordinates{ + To: file.Coordinates{ RealPath: "sbom.syft.json", }, Type: artifact.DescribedByRelationship, diff --git a/syft/pkg/cataloger/swift/package.go b/syft/pkg/cataloger/swift/package.go index 0e1c1ce4a0a..ad6416e64ae 100644 --- a/syft/pkg/cataloger/swift/package.go +++ b/syft/pkg/cataloger/swift/package.go @@ -2,16 +2,16 @@ package swift import ( "github.com/anchore/packageurl-go" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" - "github.com/anchore/syft/syft/source" ) -func newPackage(name, version, hash string, locations ...source.Location) pkg.Package { +func newPackage(name, version, hash string, locations ...file.Location) pkg.Package { p := pkg.Package{ Name: name, Version: version, PURL: packageURL(name, version), - Locations: source.NewLocationSet(locations...), + Locations: file.NewLocationSet(locations...), Type: pkg.CocoapodsPkg, Language: pkg.Swift, MetadataType: pkg.CocoapodsMetadataType, diff --git a/syft/pkg/cataloger/swift/parse_podfile_lock.go b/syft/pkg/cataloger/swift/parse_podfile_lock.go index afff41ae9b3..58a58c4643f 100644 --- a/syft/pkg/cataloger/swift/parse_podfile_lock.go +++ b/syft/pkg/cataloger/swift/parse_podfile_lock.go @@ -8,9 +8,9 @@ import ( "gopkg.in/yaml.v3" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/generic" - "github.com/anchore/syft/syft/source" ) var _ generic.Parser = parsePodfileLock @@ -25,7 +25,7 @@ type podfileLock struct { } // parsePodfileLock is a parser function for Podfile.lock contents, returning all cocoapods pods discovered. -func parsePodfileLock(_ source.FileResolver, _ *generic.Environment, reader source.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { +func parsePodfileLock(_ file.Resolver, _ *generic.Environment, reader file.LocationReadCloser) ([]pkg.Package, []artifact.Relationship, error) { bytes, err := io.ReadAll(reader) if err != nil { return nil, nil, fmt.Errorf("unable to read file: %w", err) diff --git a/syft/pkg/cataloger/swift/parse_podfile_lock_test.go b/syft/pkg/cataloger/swift/parse_podfile_lock_test.go index ef4c7d2c444..53b6dfd12e4 100644 --- a/syft/pkg/cataloger/swift/parse_podfile_lock_test.go +++ b/syft/pkg/cataloger/swift/parse_podfile_lock_test.go @@ -4,14 +4,14 @@ import ( "testing" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest" - "github.com/anchore/syft/syft/source" ) func TestParsePodfileLock(t *testing.T) { fixture := "test-fixtures/Podfile.lock" - locations := source.NewLocationSet(source.NewLocation(fixture)) + locations := file.NewLocationSet(file.NewLocation(fixture)) expectedPkgs := []pkg.Package{ { Name: "GlossButtonNode", diff --git a/syft/pkg/license.go b/syft/pkg/license.go index 8278ba7bd90..6e681da6348 100644 --- a/syft/pkg/license.go +++ b/syft/pkg/license.go @@ -7,8 +7,8 @@ import ( "github.com/anchore/syft/internal" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/license" - "github.com/anchore/syft/syft/source" ) var _ sort.Interface = (*Licenses)(nil) @@ -27,7 +27,7 @@ type License struct { SPDXExpression string `json:"spdxExpression"` Type license.Type `json:"type"` URLs internal.StringSet `hash:"ignore"` - Locations source.LocationSet `hash:"ignore"` + Locations file.LocationSet `hash:"ignore"` } type Licenses []License @@ -70,7 +70,7 @@ func NewLicense(value string) License { SPDXExpression: spdxExpression, Type: license.Declared, URLs: internal.NewStringSet(), - Locations: source.NewLocationSet(), + Locations: file.NewLocationSet(), } } @@ -85,7 +85,7 @@ func NewLicenseFromType(value string, t license.Type) License { SPDXExpression: spdxExpression, Type: t, URLs: internal.NewStringSet(), - Locations: source.NewLocationSet(), + Locations: file.NewLocationSet(), } } @@ -96,7 +96,7 @@ func NewLicensesFromValues(values ...string) (licenses []License) { return } -func NewLicensesFromLocation(location source.Location, values ...string) (licenses []License) { +func NewLicensesFromLocation(location file.Location, values ...string) (licenses []License) { for _, v := range values { if v == "" { continue @@ -106,7 +106,7 @@ func NewLicensesFromLocation(location source.Location, values ...string) (licens return } -func NewLicenseFromLocations(value string, locations ...source.Location) License { +func NewLicenseFromLocations(value string, locations ...file.Location) License { l := NewLicense(value) for _, loc := range locations { l.Locations.Add(loc) diff --git a/syft/pkg/license_set_test.go b/syft/pkg/license_set_test.go index 16abd83a8a1..09c617b6095 100644 --- a/syft/pkg/license_set_test.go +++ b/syft/pkg/license_set_test.go @@ -6,6 +6,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/syft/internal" + "github.com/anchore/syft/syft/file" "github.com/anchore/syft/syft/license" "github.com/anchore/syft/syft/source" ) @@ -58,15 +59,15 @@ func TestLicenseSet_Add(t *testing.T) { { name: "deduplicate licenses with locations", licenses: []License{ - NewLicenseFromLocations("MIT", source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"})), - NewLicenseFromLocations("MIT", source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"})), - NewLicenseFromLocations("MIT", source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "2"})), + NewLicenseFromLocations("MIT", file.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"})), + NewLicenseFromLocations("MIT", file.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"})), + NewLicenseFromLocations("MIT", file.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "2"})), }, want: []License{ NewLicenseFromLocations( "MIT", - source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"}), - source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "2"}), + file.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"}), + file.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "2"}), ), }, }, @@ -74,14 +75,14 @@ func TestLicenseSet_Add(t *testing.T) { name: "same licenses with different locations", licenses: []License{ NewLicense("MIT"), - NewLicenseFromLocations("MIT", source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "2"})), - NewLicenseFromLocations("MIT", source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"})), + NewLicenseFromLocations("MIT", file.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "2"})), + NewLicenseFromLocations("MIT", file.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"})), }, want: []License{ NewLicenseFromLocations( "MIT", - source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"}), - source.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "2"}), + file.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "1"}), + file.NewLocationFromCoordinates(source.Coordinates{RealPath: "/place", FileSystemID: "2"}), ), }, }, @@ -89,7 +90,7 @@ func TestLicenseSet_Add(t *testing.T) { name: "same license from different sources", licenses: []License{ NewLicense("MIT"), - NewLicenseFromLocations("MIT", source.NewLocation("/place")), + NewLicenseFromLocations("MIT", file.NewLocation("/place")), NewLicenseFromURLs("MIT", "https://example.com"), }, want: []License{ @@ -98,7 +99,7 @@ func TestLicenseSet_Add(t *testing.T) { SPDXExpression: "MIT", Type: license.Declared, URLs: internal.NewStringSet("https://example.com"), - Locations: source.NewLocationSet(source.NewLocation("/place")), + Locations: file.NewLocationSet(file.NewLocation("/place")), }, }, }, @@ -107,7 +108,7 @@ func TestLicenseSet_Add(t *testing.T) { licenses: []License{ NewLicenseFromType("MIT", license.Concluded), NewLicenseFromType("MIT", license.Declared), - NewLicenseFromLocations("MIT", source.NewLocation("/place")), + NewLicenseFromLocations("MIT", file.NewLocation("/place")), NewLicenseFromURLs("MIT", "https://example.com"), }, want: []License{ @@ -116,14 +117,14 @@ func TestLicenseSet_Add(t *testing.T) { SPDXExpression: "MIT", Type: license.Concluded, URLs: internal.NewStringSet(), - Locations: source.NewLocationSet(), + Locations: file.NewLocationSet(), }, { Value: "MIT", SPDXExpression: "MIT", Type: license.Declared, URLs: internal.NewStringSet("https://example.com"), - Locations: source.NewLocationSet(source.NewLocation("/place")), + Locations: file.NewLocationSet(file.NewLocation("/place")), }, }, }, diff --git a/syft/pkg/license_test.go b/syft/pkg/license_test.go index f3456f5aa21..4e9e16d943f 100644 --- a/syft/pkg/license_test.go +++ b/syft/pkg/license_test.go @@ -8,14 +8,14 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/artifact" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) func Test_Hash(t *testing.T) { - loc1 := source.NewLocation("place!") + loc1 := file.NewLocation("place!") loc1.FileSystemID = "fs1" - loc2 := source.NewLocation("place!") + loc2 := file.NewLocation("place!") loc2.FileSystemID = "fs2" // important! there is a different file system ID lic1 := NewLicenseFromLocations("MIT", loc1) @@ -47,44 +47,44 @@ func Test_Sort(t *testing.T) { { name: "single", licenses: []License{ - NewLicenseFromLocations("MIT", source.NewLocation("place!")), + NewLicenseFromLocations("MIT", file.NewLocation("place!")), }, expected: []License{ - NewLicenseFromLocations("MIT", source.NewLocation("place!")), + NewLicenseFromLocations("MIT", file.NewLocation("place!")), }, }, { name: "multiple", licenses: []License{ - NewLicenseFromLocations("MIT", source.NewLocation("place!")), + NewLicenseFromLocations("MIT", file.NewLocation("place!")), NewLicenseFromURLs("MIT", "https://github.com/anchore/syft/blob/main/LICENSE"), - NewLicenseFromLocations("Apache", source.NewLocation("area!")), - NewLicenseFromLocations("gpl2+", source.NewLocation("area!")), + NewLicenseFromLocations("Apache", file.NewLocation("area!")), + NewLicenseFromLocations("gpl2+", file.NewLocation("area!")), }, expected: Licenses{ - NewLicenseFromLocations("Apache", source.NewLocation("area!")), + NewLicenseFromLocations("Apache", file.NewLocation("area!")), NewLicenseFromURLs("MIT", "https://github.com/anchore/syft/blob/main/LICENSE"), - NewLicenseFromLocations("MIT", source.NewLocation("place!")), - NewLicenseFromLocations("gpl2+", source.NewLocation("area!")), + NewLicenseFromLocations("MIT", file.NewLocation("place!")), + NewLicenseFromLocations("gpl2+", file.NewLocation("area!")), }, }, { name: "multiple with location variants", licenses: []License{ - NewLicenseFromLocations("MIT", source.NewLocation("place!")), - NewLicenseFromLocations("MIT", source.NewLocation("park!")), + NewLicenseFromLocations("MIT", file.NewLocation("place!")), + NewLicenseFromLocations("MIT", file.NewLocation("park!")), NewLicense("MIT"), NewLicense("AAL"), NewLicense("Adobe-2006"), - NewLicenseFromLocations("Apache", source.NewLocation("area!")), + NewLicenseFromLocations("Apache", file.NewLocation("area!")), }, expected: Licenses{ NewLicense("AAL"), NewLicense("Adobe-2006"), - NewLicenseFromLocations("Apache", source.NewLocation("area!")), + NewLicenseFromLocations("Apache", file.NewLocation("area!")), NewLicense("MIT"), - NewLicenseFromLocations("MIT", source.NewLocation("park!")), - NewLicenseFromLocations("MIT", source.NewLocation("place!")), + NewLicenseFromLocations("MIT", file.NewLocation("park!")), + NewLicenseFromLocations("MIT", file.NewLocation("place!")), }, }, } diff --git a/syft/pkg/package.go b/syft/pkg/package.go index 6d028f20c68..c72e57d34ae 100644 --- a/syft/pkg/package.go +++ b/syft/pkg/package.go @@ -11,25 +11,24 @@ import ( "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" "github.com/anchore/syft/syft/cpe" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) // Package represents an application or library that has been bundled into a distributable format. // TODO: if we ignore FoundBy for ID generation should we merge the field to show it was found in two places? -// TODO: should cyclonedx tags exist on the struct? Why don't we use the model.Package type? type Package struct { - id artifact.ID `hash:"ignore"` - Name string // the package name - Version string // the version of the package - FoundBy string `hash:"ignore" cyclonedx:"foundBy"` // the specific cataloger that discovered this package - Locations source.LocationSet // the locations that lead to the discovery of this package (note: this is not necessarily the locations that make up this package) - Licenses LicenseSet // licenses discovered with the package metadata - Language Language `hash:"ignore" cyclonedx:"language"` // the language ecosystem this package belongs to (e.g. JavaScript, Python, etc) - Type Type `cyclonedx:"type"` // the package type (e.g. Npm, Yarn, Python, Rpm, Deb, etc) - CPEs []cpe.CPE `hash:"ignore"` // all possible Common Platform Enumerators (note: this is NOT included in the definition of the ID since all fields on a CPE are derived from other fields) - PURL string `hash:"ignore"` // the Package URL (see https://github.com/package-url/purl-spec) - MetadataType MetadataType `cyclonedx:"metadataType"` // the shape of the additional data in the "metadata" field - Metadata interface{} // additional data found while parsing the package source + id artifact.ID `hash:"ignore"` + Name string // the package name + Version string // the version of the package + FoundBy string `hash:"ignore" cyclonedx:"foundBy"` // the specific cataloger that discovered this package + Locations file.LocationSet // the locations that lead to the discovery of this package (note: this is not necessarily the locations that make up this package) + Licenses LicenseSet // licenses discovered with the package metadata + Language Language `hash:"ignore" cyclonedx:"language"` // the language ecosystem this package belongs to (e.g. JavaScript, Python, etc) + Type Type `cyclonedx:"type"` // the package type (e.g. Npm, Yarn, Python, Rpm, Deb, etc) + CPEs []cpe.CPE `hash:"ignore"` // all possible Common Platform Enumerators (note: this is NOT included in the definition of the ID since all fields on a CPE are derived from other fields) + PURL string `hash:"ignore"` // the Package URL (see https://github.com/package-url/purl-spec) + MetadataType MetadataType `cyclonedx:"metadataType"` // the shape of the additional data in the "metadata" field + Metadata interface{} // additional data found while parsing the package source } func (p *Package) OverrideID(id artifact.ID) { diff --git a/syft/pkg/package_test.go b/syft/pkg/package_test.go index 7c461a680b8..24b8d37fd50 100644 --- a/syft/pkg/package_test.go +++ b/syft/pkg/package_test.go @@ -8,12 +8,12 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/cpe" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) func TestIDUniqueness(t *testing.T) { - originalLocation := source.NewVirtualLocationFromCoordinates( - source.Coordinates{ + originalLocation := file.NewVirtualLocationFromCoordinates( + file.Coordinates{ RealPath: "39.0742° N, 21.8243° E", FileSystemID: "Earth", }, @@ -24,7 +24,7 @@ func TestIDUniqueness(t *testing.T) { Name: "pi", Version: "3.14", FoundBy: "Archimedes", - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( originalLocation, ), Licenses: NewLicenseSet( @@ -101,8 +101,8 @@ func TestIDUniqueness(t *testing.T) { { name: "location is reflected", transform: func(pkg Package) Package { - locations := source.NewLocationSet(pkg.Locations.ToSlice()...) - locations.Add(source.NewLocation("/somewhere/new")) + locations := file.NewLocationSet(pkg.Locations.ToSlice()...) + locations.Add(file.NewLocation("/somewhere/new")) pkg.Locations = locations return pkg }, @@ -122,7 +122,7 @@ func TestIDUniqueness(t *testing.T) { newLocation := originalLocation newLocation.FileSystemID = "Mars" - pkg.Locations = source.NewLocationSet(newLocation) + pkg.Locations = file.NewLocationSet(newLocation) return pkg }, expectedIDComparison: assert.Equal, @@ -133,7 +133,7 @@ func TestIDUniqueness(t *testing.T) { newLocation := originalLocation newLocation.FileSystemID = "Mars" - locations := source.NewLocationSet(pkg.Locations.ToSlice()...) + locations := file.NewLocationSet(pkg.Locations.ToSlice()...) locations.Add(newLocation, originalLocation) pkg.Locations = locations @@ -236,8 +236,8 @@ func TestIDUniqueness(t *testing.T) { } func TestPackage_Merge(t *testing.T) { - originalLocation := source.NewVirtualLocationFromCoordinates( - source.Coordinates{ + originalLocation := file.NewVirtualLocationFromCoordinates( + file.Coordinates{ RealPath: "39.0742° N, 21.8243° E", FileSystemID: "Earth", }, @@ -259,7 +259,7 @@ func TestPackage_Merge(t *testing.T) { Name: "pi", Version: "3.14", FoundBy: "Archimedes", - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( originalLocation, ), Language: "math", @@ -282,7 +282,7 @@ func TestPackage_Merge(t *testing.T) { Name: "pi", Version: "3.14", FoundBy: "Archimedes", - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( similarLocation, // NOTE: difference; we have a different layer but the same path ), Language: "math", @@ -305,7 +305,7 @@ func TestPackage_Merge(t *testing.T) { Name: "pi", Version: "3.14", FoundBy: "Archimedes", - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( originalLocation, similarLocation, // NOTE: merge! ), @@ -333,7 +333,7 @@ func TestPackage_Merge(t *testing.T) { Name: "pi", Version: "3.14", FoundBy: "Archimedes", - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( originalLocation, ), Language: "math", @@ -356,7 +356,7 @@ func TestPackage_Merge(t *testing.T) { Name: "pi-DIFFERENT", // difference Version: "3.14", FoundBy: "Archimedes", - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( originalLocation, ), Language: "math", @@ -395,7 +395,7 @@ func TestPackage_Merge(t *testing.T) { if diff := cmp.Diff(*tt.expected, tt.subject, cmp.AllowUnexported(Package{}), cmp.Comparer( - func(x, y source.LocationSet) bool { + func(x, y file.LocationSet) bool { xs := x.ToSlice() ys := y.ToSlice() @@ -442,7 +442,7 @@ func licenseComparer(x, y License) bool { return cmp.Equal(x, y, cmp.Comparer(locationComparer)) } -func locationComparer(x, y source.Location) bool { +func locationComparer(x, y file.Location) bool { return cmp.Equal(x.Coordinates, y.Coordinates) && cmp.Equal(x.VirtualPath, y.VirtualPath) } diff --git a/syft/pkg/relationships_by_file_ownership_test.go b/syft/pkg/relationships_by_file_ownership_test.go index fdef2897171..f34cb8be4cb 100644 --- a/syft/pkg/relationships_by_file_ownership_test.go +++ b/syft/pkg/relationships_by_file_ownership_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/syft/syft/artifact" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) func TestOwnershipByFilesRelationship(t *testing.T) { @@ -19,9 +19,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { name: "owns-by-real-path", setup: func(t testing.TB) ([]Package, []artifact.Relationship) { parent := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/a/path", "/another/path"), - source.NewVirtualLocation("/b/path", "/bee/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/a/path", "/another/path"), + file.NewVirtualLocation("/b/path", "/bee/path"), ), Type: RpmPkg, MetadataType: RpmMetadataType, @@ -36,9 +36,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { parent.SetID() child := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/c/path", "/another/path"), - source.NewVirtualLocation("/d/path", "/another/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/c/path", "/another/path"), + file.NewVirtualLocation("/d/path", "/another/path"), ), Type: NpmPkg, } @@ -62,9 +62,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { name: "owns-by-virtual-path", setup: func(t testing.TB) ([]Package, []artifact.Relationship) { parent := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/a/path", "/some/other/path"), - source.NewVirtualLocation("/b/path", "/bee/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/a/path", "/some/other/path"), + file.NewVirtualLocation("/b/path", "/bee/path"), ), Type: RpmPkg, MetadataType: RpmMetadataType, @@ -79,9 +79,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { parent.SetID() child := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/c/path", "/another/path"), - source.NewLocation("/d/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/c/path", "/another/path"), + file.NewLocation("/d/path"), ), Type: NpmPkg, } @@ -104,9 +104,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { name: "ignore-empty-path", setup: func(t testing.TB) ([]Package, []artifact.Relationship) { parent := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/a/path", "/some/other/path"), - source.NewVirtualLocation("/b/path", "/bee/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/a/path", "/some/other/path"), + file.NewVirtualLocation("/b/path", "/bee/path"), ), Type: RpmPkg, MetadataType: RpmMetadataType, @@ -122,9 +122,9 @@ func TestOwnershipByFilesRelationship(t *testing.T) { parent.SetID() child := Package{ - Locations: source.NewLocationSet( - source.NewVirtualLocation("/c/path", "/another/path"), - source.NewLocation("/d/path"), + Locations: file.NewLocationSet( + file.NewVirtualLocation("/c/path", "/another/path"), + file.NewLocation("/d/path"), ), Type: NpmPkg, } diff --git a/syft/pkg/relationships_evident_by_test.go b/syft/pkg/relationships_evident_by_test.go index f0a99a6ba1a..21e7801bfd5 100644 --- a/syft/pkg/relationships_evident_by_test.go +++ b/syft/pkg/relationships_evident_by_test.go @@ -7,45 +7,45 @@ import ( "github.com/stretchr/testify/require" "github.com/anchore/syft/syft/artifact" - "github.com/anchore/syft/syft/source" + "github.com/anchore/syft/syft/file" ) func TestRelationshipsEvidentBy(t *testing.T) { c := NewCollection() - coordA := source.Coordinates{ + coordA := file.Coordinates{ RealPath: "/somewhere/real", FileSystemID: "abc", } - coordC := source.Coordinates{ + coordC := file.Coordinates{ RealPath: "/somewhere/real", FileSystemID: "abc", } - coordD := source.Coordinates{ + coordD := file.Coordinates{ RealPath: "/somewhere/real", FileSystemID: "abc", } pkgA := Package{ - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( // added! - source.NewLocationFromCoordinates(coordA).WithAnnotation(EvidenceAnnotationKey, PrimaryEvidenceAnnotation), + file.NewLocationFromCoordinates(coordA).WithAnnotation(EvidenceAnnotationKey, PrimaryEvidenceAnnotation), // ignored... - source.NewLocationFromCoordinates(coordC).WithAnnotation(EvidenceAnnotationKey, SupportingEvidenceAnnotation), - source.NewLocationFromCoordinates(coordD), + file.NewLocationFromCoordinates(coordC).WithAnnotation(EvidenceAnnotationKey, SupportingEvidenceAnnotation), + file.NewLocationFromCoordinates(coordD), ), } pkgA.SetID() c.Add(pkgA) - coordB := source.Coordinates{ + coordB := file.Coordinates{ RealPath: "/somewhere-else/real", FileSystemID: "def", } pkgB := Package{ - Locations: source.NewLocationSet( + Locations: file.NewLocationSet( // added! - source.NewLocationFromCoordinates(coordB).WithAnnotation(EvidenceAnnotationKey, PrimaryEvidenceAnnotation), + file.NewLocationFromCoordinates(coordB).WithAnnotation(EvidenceAnnotationKey, PrimaryEvidenceAnnotation), ), } pkgB.SetID() diff --git a/syft/sbom/sbom.go b/syft/sbom/sbom.go index 7770027182b..0bc8feb0cfa 100644 --- a/syft/sbom/sbom.go +++ b/syft/sbom/sbom.go @@ -21,11 +21,11 @@ type SBOM struct { type Artifacts struct { Packages *pkg.Collection - FileMetadata map[source.Coordinates]source.FileMetadata - FileDigests map[source.Coordinates][]file.Digest - FileContents map[source.Coordinates]string - FileLicenses map[source.Coordinates][]file.License - Secrets map[source.Coordinates][]file.SearchResult + FileMetadata map[file.Coordinates]file.Metadata + FileDigests map[file.Coordinates][]file.Digest + FileContents map[file.Coordinates]string + FileLicenses map[file.Coordinates][]file.License + Secrets map[file.Coordinates][]file.SearchResult LinuxDistribution *linux.Release } @@ -49,8 +49,8 @@ func (s SBOM) RelationshipsSorted() []artifact.Relationship { return relationships } -func (s SBOM) AllCoordinates() []source.Coordinates { - set := source.NewCoordinateSet() +func (s SBOM) AllCoordinates() []file.Coordinates { + set := file.NewCoordinateSet() for coordinates := range s.Artifacts.FileMetadata { set.Add(coordinates) } @@ -89,8 +89,8 @@ func (s SBOM) RelationshipsForPackage(p pkg.Package, rt ...artifact.Relationship // CoordinatesForPackage returns all coordinates for the provided package for provided relationship types // If no types are provided, all relationship types are considered. -func (s SBOM) CoordinatesForPackage(p pkg.Package, rt ...artifact.RelationshipType) []source.Coordinates { - var coordinates []source.Coordinates +func (s SBOM) CoordinatesForPackage(p pkg.Package, rt ...artifact.RelationshipType) []file.Coordinates { + var coordinates []file.Coordinates for _, relationship := range s.RelationshipsForPackage(p, rt...) { cords := extractCoordinates(relationship) coordinates = append(coordinates, cords...) @@ -98,12 +98,12 @@ func (s SBOM) CoordinatesForPackage(p pkg.Package, rt ...artifact.RelationshipTy return coordinates } -func extractCoordinates(relationship artifact.Relationship) (results []source.Coordinates) { - if coordinates, exists := relationship.From.(source.Coordinates); exists { +func extractCoordinates(relationship artifact.Relationship) (results []file.Coordinates) { + if coordinates, exists := relationship.From.(file.Coordinates); exists { results = append(results, coordinates) } - if coordinates, exists := relationship.To.(source.Coordinates); exists { + if coordinates, exists := relationship.To.(file.Coordinates); exists { results = append(results, coordinates) } diff --git a/syft/source/deferred_resolver.go b/syft/source/deferred_resolver.go deleted file mode 100644 index 7ca9b90eab6..00000000000 --- a/syft/source/deferred_resolver.go +++ /dev/null @@ -1,108 +0,0 @@ -package source - -import ( - "io" - - "github.com/anchore/syft/internal/log" -) - -func NewDeferredResolverFromSource(creator func() (Source, error)) *DeferredResolver { - return NewDeferredResolver(func() (FileResolver, error) { - s, err := creator() - if err != nil { - return nil, err - } - - return s.FileResolver(SquashedScope) - }) -} - -func NewDeferredResolver(creator func() (FileResolver, error)) *DeferredResolver { - return &DeferredResolver{ - creator: creator, - } -} - -type DeferredResolver struct { - creator func() (FileResolver, error) - resolver FileResolver -} - -func (d *DeferredResolver) getResolver() (FileResolver, error) { - if d.resolver == nil { - resolver, err := d.creator() - if err != nil { - return nil, err - } - d.resolver = resolver - } - return d.resolver, nil -} - -func (d *DeferredResolver) FileContentsByLocation(location Location) (io.ReadCloser, error) { - r, err := d.getResolver() - if err != nil { - return nil, err - } - return r.FileContentsByLocation(location) -} - -func (d *DeferredResolver) HasPath(s string) bool { - r, err := d.getResolver() - if err != nil { - log.Debug("unable to get resolver: %v", err) - return false - } - return r.HasPath(s) -} - -func (d *DeferredResolver) FilesByPath(paths ...string) ([]Location, error) { - r, err := d.getResolver() - if err != nil { - return nil, err - } - return r.FilesByPath(paths...) -} - -func (d *DeferredResolver) FilesByGlob(patterns ...string) ([]Location, error) { - r, err := d.getResolver() - if err != nil { - return nil, err - } - return r.FilesByGlob(patterns...) -} - -func (d *DeferredResolver) FilesByMIMEType(types ...string) ([]Location, error) { - r, err := d.getResolver() - if err != nil { - return nil, err - } - return r.FilesByMIMEType(types...) -} - -func (d *DeferredResolver) RelativeFileByPath(location Location, path string) *Location { - r, err := d.getResolver() - if err != nil { - return nil - } - return r.RelativeFileByPath(location, path) -} - -func (d *DeferredResolver) AllLocations() <-chan Location { - r, err := d.getResolver() - if err != nil { - log.Debug("unable to get resolver: %v", err) - return nil - } - return r.AllLocations() -} - -func (d *DeferredResolver) FileMetadataByLocation(location Location) (FileMetadata, error) { - r, err := d.getResolver() - if err != nil { - return FileMetadata{}, err - } - return r.FileMetadataByLocation(location) -} - -var _ FileResolver = (*DeferredResolver)(nil) diff --git a/syft/source/deprecated.go b/syft/source/deprecated.go new file mode 100644 index 00000000000..4b7e35cf11a --- /dev/null +++ b/syft/source/deprecated.go @@ -0,0 +1,119 @@ +package source + +import ( + "io" + + stereoscopeFile "github.com/anchore/stereoscope/pkg/file" + "github.com/anchore/stereoscope/pkg/image" + "github.com/anchore/syft/syft/file" +) + +// Deprecated: use file.Metadata instead +type FileMetadata = file.Metadata + +type ( + // Deprecated: use file.Coordinates instead + Coordinates = file.Coordinates + + // Deprecated: use file.CoordinateSet instead + CoordinateSet = file.CoordinateSet + + // Deprecated: use file.Resolver instead + FileResolver = file.Resolver + + // Deprecated: use file.ContentResolver instead + FileContentResolver = file.ContentResolver + + // Deprecated: use file.PathResolver instead + FilePathResolver = file.PathResolver + + // Deprecated: use file.LocationResolver instead + FileLocationResolver = file.LocationResolver + + // Deprecated: use file.MetadataResolver instead + FileMetadataResolver = file.MetadataResolver + + // Deprecated: use file.WritableResolver instead + WritableFileResolver = file.WritableResolver + + // Deprecated: use file.MockResolver instead + MockResolver = file.MockResolver + + // Deprecated: use file.Location instead + Location = file.Location + + // Deprecated: use file.LocationData instead + LocationData = file.LocationData + + // Deprecated: use file.LocationMetadata instead + LocationMetadata = file.LocationMetadata + + // Deprecated: use file.LocationSet instead + LocationSet = file.LocationSet + + // Deprecated: use file.Locations instead + Locations = file.Locations + + // Deprecated: use file.LocationReadCloser instead + LocationReadCloser = file.LocationReadCloser +) + +// Deprecated: use file.NewCoordinateSet instead +func NewCoordinateSet(coordinates ...file.Coordinates) file.CoordinateSet { + return file.NewCoordinateSet(coordinates...) +} + +// Deprecated: use file.NewLocationSet instead +func NewLocationSet(locations ...file.Location) file.LocationSet { + return file.NewLocationSet(locations...) +} + +// Deprecated: use file.NewLocation instead +func NewLocation(realPath string) file.Location { + return file.NewLocation(realPath) +} + +// Deprecated: use file.NewVirtualLocation instead +func NewVirtualLocation(realPath, virtualPath string) file.Location { + return file.NewVirtualLocation(realPath, virtualPath) +} + +// Deprecated: use file.NewLocationFromCoordinates instead +func NewLocationFromCoordinates(coordinates file.Coordinates) file.Location { + return file.NewLocationFromCoordinates(coordinates) +} + +// Deprecated: use file.NewVirtualLocationFromCoordinates instead +func NewVirtualLocationFromCoordinates(coordinates file.Coordinates, virtualPath string) file.Location { + return file.NewVirtualLocationFromCoordinates(coordinates, virtualPath) +} + +// Deprecated: use file.NewLocationFromImage instead +func NewLocationFromImage(virtualPath string, ref stereoscopeFile.Reference, img *image.Image) file.Location { + return file.NewLocationFromImage(virtualPath, ref, img) +} + +// Deprecated: use file.NewLocationFromDirectory instead +func NewLocationFromDirectory(responsePath string, ref stereoscopeFile.Reference) file.Location { + return file.NewLocationFromDirectory(responsePath, ref) +} + +// Deprecated: use file.NewVirtualLocationFromDirectory instead +func NewVirtualLocationFromDirectory(responsePath, virtualResponsePath string, ref stereoscopeFile.Reference) file.Location { + return file.NewVirtualLocationFromDirectory(responsePath, virtualResponsePath, ref) +} + +// Deprecated: use file.NewLocationReadCloser instead +func NewLocationReadCloser(location file.Location, reader io.ReadCloser) file.LocationReadCloser { + return file.NewLocationReadCloser(location, reader) +} + +// Deprecated: use file.NewMockResolverForPaths instead +func NewMockResolverForPaths(paths ...string) *file.MockResolver { + return file.NewMockResolverForPaths(paths...) +} + +// Deprecated: use file.NewMockResolverForPathsWithMetadata instead +func NewMockResolverForPathsWithMetadata(metadata map[file.Coordinates]file.Metadata) *file.MockResolver { + return file.NewMockResolverForPathsWithMetadata(metadata) +} diff --git a/syft/source/empty_resolver.go b/syft/source/empty_resolver.go deleted file mode 100644 index 72c9331dd9d..00000000000 --- a/syft/source/empty_resolver.go +++ /dev/null @@ -1,45 +0,0 @@ -package source - -import ( - "io" -) - -type EmptyResolver struct{} - -func (e EmptyResolver) FileContentsByLocation(_ Location) (io.ReadCloser, error) { - return nil, nil -} - -func (e EmptyResolver) HasPath(_ string) bool { - return false -} - -func (e EmptyResolver) FilesByPath(_ ...string) ([]Location, error) { - return nil, nil -} - -func (e EmptyResolver) FilesByGlob(_ ...string) ([]Location, error) { - return nil, nil -} - -func (e EmptyResolver) FilesByMIMEType(_ ...string) ([]Location, error) { - return nil, nil -} - -func (e EmptyResolver) RelativeFileByPath(_ Location, _ string) *Location { - return nil -} - -func (e EmptyResolver) AllLocations() <-chan Location { - return nil -} - -func (e EmptyResolver) FileMetadataByLocation(_ Location) (FileMetadata, error) { - return FileMetadata{}, nil -} - -func (e EmptyResolver) Write(_ Location, _ io.Reader) error { - return nil -} - -var _ WritableFileResolver = (*EmptyResolver)(nil) diff --git a/syft/source/file_details.go b/syft/source/file_details.go deleted file mode 100644 index f034057ba5f..00000000000 --- a/syft/source/file_details.go +++ /dev/null @@ -1,21 +0,0 @@ -//go:build linux || darwin || netbsd -// +build linux darwin netbsd - -package source - -import ( - "os" - "syscall" -) - -// GetXid is the UID GID system info for unix -func GetXid(info os.FileInfo) (uid, gid int) { - uid = -1 - gid = -1 - if stat, ok := info.Sys().(*syscall.Stat_t); ok { - uid = int(stat.Uid) - gid = int(stat.Gid) - } - - return uid, gid -} diff --git a/syft/source/file_details_win.go b/syft/source/file_details_win.go deleted file mode 100644 index 31fd05063e7..00000000000 --- a/syft/source/file_details_win.go +++ /dev/null @@ -1,13 +0,0 @@ -//go:build windows -// +build windows - -package source - -import ( - "os" -) - -// GetXid is a placeholder for windows file information -func GetXid(info os.FileInfo) (uid, gid int) { - return -1, -1 -} diff --git a/syft/source/file_metadata.go b/syft/source/file_metadata.go deleted file mode 100644 index 0763564d0fb..00000000000 --- a/syft/source/file_metadata.go +++ /dev/null @@ -1,17 +0,0 @@ -package source - -import ( - "github.com/anchore/stereoscope/pkg/file" - "github.com/anchore/stereoscope/pkg/image" -) - -type FileMetadata = file.Metadata - -func fileMetadataByLocation(img *image.Image, location Location) (file.Metadata, error) { - entry, err := img.FileCatalog.Get(location.ref) - if err != nil { - return FileMetadata{}, err - } - - return entry.Metadata, nil -} diff --git a/syft/source/source.go b/syft/source/source.go index edbd86203d1..bc59b306d7a 100644 --- a/syft/source/source.go +++ b/syft/source/source.go @@ -22,6 +22,8 @@ import ( "github.com/anchore/stereoscope/pkg/image" "github.com/anchore/syft/internal/log" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/file" + "github.com/anchore/syft/syft/internal/fileresolver" ) // Source is an object that captures the data source to be cataloged, configuration, and a specific resolver used @@ -30,7 +32,7 @@ type Source struct { id artifact.ID `hash:"ignore"` Image *image.Image `hash:"ignore"` // the image object to be cataloged (image only) Metadata Metadata - directoryResolver *directoryResolver `hash:"ignore"` + directoryResolver *fileresolver.Directory `hash:"ignore"` path string base string mutex *sync.Mutex @@ -466,7 +468,7 @@ func chain(chainID string, layers []LayerMetadata) string { return chain(chainID, layers[1:]) } -func (s *Source) FileResolver(scope Scope) (FileResolver, error) { +func (s *Source) FileResolver(scope Scope) (file.Resolver, error) { switch s.Metadata.Scheme { case DirectoryScheme, FileScheme: s.mutex.Lock() @@ -476,21 +478,21 @@ func (s *Source) FileResolver(scope Scope) (FileResolver, error) { if err != nil { return nil, err } - resolver, err := newDirectoryResolver(s.path, s.base, exclusionFunctions...) + res, err := fileresolver.NewFromDirectory(s.path, s.base, exclusionFunctions...) if err != nil { return nil, fmt.Errorf("unable to create directory resolver: %w", err) } - s.directoryResolver = resolver + s.directoryResolver = res } return s.directoryResolver, nil case ImageScheme: - var resolver FileResolver + var res file.Resolver var err error switch scope { case SquashedScope: - resolver, err = newImageSquashResolver(s.Image) + res, err = fileresolver.NewFromContainerImageSquash(s.Image) case AllLayersScope: - resolver, err = newAllLayersResolver(s.Image) + res, err = fileresolver.NewFromContainerImageAllLayers(s.Image) default: return nil, fmt.Errorf("bad image scope provided: %+v", scope) } @@ -499,9 +501,9 @@ func (s *Source) FileResolver(scope Scope) (FileResolver, error) { } // image tree contains all paths, so we filter out the excluded entries afterwards if len(s.Exclusions) > 0 { - resolver = NewExcludingResolver(resolver, getImageExclusionFunction(s.Exclusions)) + res = fileresolver.NewExcluding(res, getImageExclusionFunction(s.Exclusions)) } - return resolver, nil + return res, nil } return nil, fmt.Errorf("unable to determine FilePathResolver with current scheme=%q", s.Metadata.Scheme) } @@ -543,12 +545,12 @@ func getImageExclusionFunction(exclusions []string) func(string) bool { } } -func getDirectoryExclusionFunctions(root string, exclusions []string) ([]pathIndexVisitor, error) { +func getDirectoryExclusionFunctions(root string, exclusions []string) ([]fileresolver.PathIndexVisitor, error) { if len(exclusions) == 0 { return nil, nil } - // this is what directoryResolver.indexTree is doing to get the absolute path: + // this is what Directory.indexTree is doing to get the absolute path: root, err := filepath.Abs(root) if err != nil { return nil, err @@ -576,7 +578,7 @@ func getDirectoryExclusionFunctions(root string, exclusions []string) ([]pathInd return nil, fmt.Errorf("invalid exclusion pattern(s): '%s' (must start with one of: './', '*/', or '**/')", strings.Join(errors, "', '")) } - return []pathIndexVisitor{ + return []fileresolver.PathIndexVisitor{ func(path string, info os.FileInfo, _ error) error { for _, exclusion := range exclusions { // this is required to handle Windows filepaths @@ -589,7 +591,7 @@ func getDirectoryExclusionFunctions(root string, exclusions []string) ([]pathInd if info != nil && info.IsDir() { return filepath.SkipDir } - return errSkipPath + return fileresolver.ErrSkipPath } } return nil diff --git a/syft/source/source_test.go b/syft/source/source_test.go index cfc6b8f2923..32af0c05202 100644 --- a/syft/source/source_test.go +++ b/syft/source/source_test.go @@ -6,7 +6,6 @@ package source import ( "io" "io/fs" - "io/ioutil" "os" "os/exec" "path" @@ -24,6 +23,7 @@ import ( "github.com/anchore/stereoscope/pkg/image" "github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/syft/syft/artifact" + "github.com/anchore/syft/syft/internal/fileresolver" ) func TestParseInput(t *testing.T) { @@ -191,7 +191,7 @@ func TestNewFromDirectory(t *testing.T) { require.NoError(t, err) assert.Equal(t, test.input, src.Metadata.Path) - resolver, err := src.FileResolver(SquashedScope) + res, err := src.FileResolver(SquashedScope) if test.expectedErr { if err == nil { t.Fatal("expected an error when making the resolver but got none") @@ -201,7 +201,7 @@ func TestNewFromDirectory(t *testing.T) { require.NoError(t, err) } - refs, err := resolver.FilesByPath(test.inputPaths...) + refs, err := res.FilesByPath(test.inputPaths...) if err != nil { t.Errorf("FilesByPath call produced an error: %+v", err) } @@ -239,10 +239,10 @@ func TestNewFromFile(t *testing.T) { assert.Equal(t, test.input, src.Metadata.Path) assert.Equal(t, src.Metadata.Path, src.path) - resolver, err := src.FileResolver(SquashedScope) + res, err := src.FileResolver(SquashedScope) require.NoError(t, err) - refs, err := resolver.FilesByPath(test.inputPaths...) + refs, err := res.FilesByPath(test.inputPaths...) require.NoError(t, err) assert.Len(t, refs, test.expRefs) @@ -287,15 +287,15 @@ func TestNewFromFile_WithArchive(t *testing.T) { assert.Equal(t, archivePath, src.Metadata.Path) assert.NotEqual(t, src.Metadata.Path, src.path) - resolver, err := src.FileResolver(SquashedScope) + res, err := src.FileResolver(SquashedScope) require.NoError(t, err) - refs, err := resolver.FilesByPath(test.inputPaths...) + refs, err := res.FilesByPath(test.inputPaths...) require.NoError(t, err) assert.Len(t, refs, test.expRefs) if test.contents != "" { - reader, err := resolver.FileContentsByLocation(refs[0]) + reader, err := res.FileContentsByLocation(refs[0]) require.NoError(t, err) data, err := io.ReadAll(reader) @@ -354,10 +354,10 @@ func TestNewFromDirectoryShared(t *testing.T) { assert.NoError(t, err) src.Metadata.Path = test.notExist - resolver2, err := src.FileResolver(SquashedScope) + resolver, err := src.FileResolver(SquashedScope) assert.NoError(t, err) - refs, err := resolver2.FilesByPath(test.inputPaths...) + refs, err := resolver.FilesByPath(test.inputPaths...) if err != nil { t.Errorf("FilesByPath call produced an error: %+v", err) } @@ -389,11 +389,11 @@ func TestFilesByPathDoesNotExist(t *testing.T) { if err != nil { t.Errorf("could not create NewDirScope: %+v", err) } - resolver, err := src.FileResolver(SquashedScope) + res, err := src.FileResolver(SquashedScope) if err != nil { t.Errorf("could not get resolver error: %+v", err) } - refs, err := resolver.FilesByPath(test.path) + refs, err := res.FilesByPath(test.path) if err != nil { t.Errorf("could not get file references from path: %s, %v", test.path, err) } @@ -438,11 +438,11 @@ func TestFilesByGlob(t *testing.T) { if err != nil { t.Errorf("could not create NewDirScope: %+v", err) } - resolver, err := src.FileResolver(SquashedScope) + res, err := src.FileResolver(SquashedScope) if err != nil { t.Errorf("could not get resolver error: %+v", err) } - contents, err := resolver.FilesByGlob(test.glob) + contents, err := res.FilesByGlob(test.glob) if err != nil { t.Errorf("could not get files by glob: %s+v", err) } @@ -612,11 +612,11 @@ func TestDirectoryExclusions(t *testing.T) { if err != nil { t.Errorf("could not create NewDirScope: %+v", err) } - resolver, err := src.FileResolver(SquashedScope) + res, err := src.FileResolver(SquashedScope) if err != nil { t.Errorf("could not get resolver error: %+v", err) } - locations, err := resolver.FilesByGlob(test.glob) + locations, err := res.FilesByGlob(test.glob) if err != nil { t.Errorf("could not get files by glob: %s+v", err) } @@ -704,11 +704,11 @@ func TestImageExclusions(t *testing.T) { if err != nil { t.Errorf("could not create NewDirScope: %+v", err) } - resolver, err := src.FileResolver(SquashedScope) + res, err := src.FileResolver(SquashedScope) if err != nil { t.Errorf("could not get resolver error: %+v", err) } - contents, err := resolver.FilesByGlob(test.glob) + contents, err := res.FilesByGlob(test.glob) if err != nil { t.Errorf("could not get files by glob: %s+v", err) } @@ -774,7 +774,7 @@ func Test_crossPlatformExclusions(t *testing.T) { root: "/", path: "/usr/var/lib", exclude: "**/var/lib", - walkHint: errSkipPath, + walkHint: fileresolver.ErrSkipPath, }, // linux specific tests... { @@ -783,7 +783,7 @@ func Test_crossPlatformExclusions(t *testing.T) { path: "/usr/var/lib/etc.txt", exclude: "**/*.txt", finfo: dummyInfo{isDir: false}, - walkHint: errSkipPath, + walkHint: fileresolver.ErrSkipPath, }, { desc: "linux relative", @@ -792,7 +792,7 @@ func Test_crossPlatformExclusions(t *testing.T) { exclude: "./*.txt", finfo: dummyInfo{isDir: false}, - walkHint: errSkipPath, + walkHint: fileresolver.ErrSkipPath, }, { desc: "linux one level", @@ -814,7 +814,7 @@ func Test_crossPlatformExclusions(t *testing.T) { path: "/C:/User/stuff/thing.txt", exclude: "**/*.txt", finfo: dummyInfo{isDir: false}, - walkHint: errSkipPath, + walkHint: fileresolver.ErrSkipPath, }, { desc: "windows relative", @@ -822,7 +822,7 @@ func Test_crossPlatformExclusions(t *testing.T) { path: "/C:/User/stuff/thing.txt", exclude: "./*.txt", finfo: dummyInfo{isDir: false}, - walkHint: errSkipPath, + walkHint: fileresolver.ErrSkipPath, }, { desc: "windows one level", @@ -898,7 +898,7 @@ func createArchive(t testing.TB, sourceDirPath, destinationArchivePath string, l func setupArchiveTest(t testing.TB, sourceDirPath string, layer2 bool) string { t.Helper() - archivePrefix, err := ioutil.TempFile("", "syft-archive-TEST-") + archivePrefix, err := os.CreateTemp("", "syft-archive-TEST-") require.NoError(t, err) t.Cleanup( diff --git a/syft/source/test-fixtures/system_paths/target/link/a-symlink/place b/syft/source/test-fixtures/system_paths/target/link/a-symlink/place new file mode 100644 index 00000000000..476e93d5714 --- /dev/null +++ b/syft/source/test-fixtures/system_paths/target/link/a-symlink/place @@ -0,0 +1 @@ +good \ No newline at end of file