mirror of
https://github.com/containers/buildah.git
synced 2025-07-30 04:23:09 +03:00
build,commit: add --sbom to scan and produce SBOMs when committing
Add a --sbom flag to `buildah build` and `buildah commit` which will scan the rootfs and specified context directories to build SPDX or CycloneDX SBOMs and lists of package URLs. Signed-off-by: Nalin Dahyabhai <nalin@redhat.com>
This commit is contained in:
2
Makefile
2
Makefile
@ -39,7 +39,7 @@ LIBSECCOMP_COMMIT := release-2.3
|
||||
|
||||
EXTRA_LDFLAGS ?=
|
||||
BUILDAH_LDFLAGS := $(GO_LDFLAGS) '-X main.GitCommit=$(GIT_COMMIT) -X main.buildInfo=$(SOURCE_DATE_EPOCH) -X main.cniVersion=$(CNI_COMMIT) $(EXTRA_LDFLAGS)'
|
||||
SOURCES=*.go imagebuildah/*.go bind/*.go chroot/*.go copier/*.go define/*.go docker/*.go internal/config/*.go internal/mkcw/*.go internal/mkcw/types/*.go internal/parse/*.go internal/source/*.go internal/util/*.go manifests/*.go pkg/chrootuser/*.go pkg/cli/*.go pkg/completion/*.go pkg/formats/*.go pkg/overlay/*.go pkg/parse/*.go pkg/rusage/*.go pkg/sshagent/*.go pkg/umask/*.go pkg/util/*.go util/*.go
|
||||
SOURCES=*.go imagebuildah/*.go bind/*.go chroot/*.go copier/*.go define/*.go docker/*.go internal/config/*.go internal/mkcw/*.go internal/mkcw/types/*.go internal/parse/*.go internal/sbom/*.go internal/source/*.go internal/tmpdir/*.go internal/util/*.go internal/volumes/*.go manifests/*.go pkg/chrootuser/*.go pkg/cli/*.go pkg/completion/*.go pkg/formats/*.go pkg/overlay/*.go pkg/parse/*.go pkg/rusage/*.go pkg/sshagent/*.go pkg/umask/*.go pkg/util/*.go util/*.go
|
||||
|
||||
LINTFLAGS ?=
|
||||
|
||||
|
@ -391,6 +391,10 @@ type ImportFromImageOptions struct {
|
||||
// instead of the usual rootfs contents.
|
||||
type ConfidentialWorkloadOptions = define.ConfidentialWorkloadOptions
|
||||
|
||||
// SBOMScanOptions encapsulates options which control whether or not we run a
|
||||
// scanner on the rootfs that we're about to commit, and how.
|
||||
type SBOMScanOptions = define.SBOMScanOptions
|
||||
|
||||
// NewBuilder creates a new build container.
|
||||
func NewBuilder(ctx context.Context, store storage.Store, options BuilderOptions) (*Builder, error) {
|
||||
if options.CommonBuildOpts == nil {
|
||||
|
@ -42,6 +42,17 @@ type commitInputOptions struct {
|
||||
quiet bool
|
||||
referenceTime string
|
||||
rm bool
|
||||
pull string
|
||||
pullAlways bool
|
||||
pullNever bool
|
||||
sbomImgOutput string
|
||||
sbomImgPurlOutput string
|
||||
sbomMergeStrategy string
|
||||
sbomOutput string
|
||||
sbomPreset string
|
||||
sbomPurlOutput string
|
||||
sbomScannerCommand []string
|
||||
sbomScannerImage string
|
||||
signaturePolicy string
|
||||
signBy string
|
||||
squash bool
|
||||
@ -112,6 +123,36 @@ func commitListFlagSet(cmd *cobra.Command, opts *commitInputOptions) {
|
||||
flags.BoolVarP(&opts.quiet, "quiet", "q", false, "don't output progress information when writing images")
|
||||
flags.StringVar(&opts.referenceTime, "reference-time", "", "set the timestamp on the image to match the named `file`")
|
||||
_ = cmd.RegisterFlagCompletionFunc("reference-time", completion.AutocompleteNone)
|
||||
|
||||
flags.StringVar(&opts.pull, "pull", "true", "pull SBOM scanner images from the registry if newer or not present in store, if false, only pull SBOM scanner images if not present, if always, pull SBOM scanner images even if the named images are present in store, if never, only use images present in store if available")
|
||||
flags.Lookup("pull").NoOptDefVal = "true" //allow `--pull ` to be set to `true` as expected.
|
||||
|
||||
flags.BoolVar(&opts.pullAlways, "pull-always", false, "pull the image even if the named image is present in store")
|
||||
if err := flags.MarkHidden("pull-always"); err != nil {
|
||||
panic(fmt.Sprintf("error marking the pull-always flag as hidden: %v", err))
|
||||
}
|
||||
flags.BoolVar(&opts.pullNever, "pull-never", false, "do not pull the image, use the image present in store if available")
|
||||
if err := flags.MarkHidden("pull-never"); err != nil {
|
||||
panic(fmt.Sprintf("error marking the pull-never flag as hidden: %v", err))
|
||||
}
|
||||
|
||||
flags.StringVar(&opts.sbomPreset, "sbom", "", "scan working container using `preset` configuration")
|
||||
_ = cmd.RegisterFlagCompletionFunc("sbom", completion.AutocompleteNone)
|
||||
flags.StringVar(&opts.sbomScannerImage, "sbom-scanner-image", "", "scan working container using scanner command from `image`")
|
||||
_ = cmd.RegisterFlagCompletionFunc("sbom-scanner-image", completion.AutocompleteNone)
|
||||
flags.StringArrayVar(&opts.sbomScannerCommand, "sbom-scanner-command", nil, "scan working container using `command` in scanner image")
|
||||
_ = cmd.RegisterFlagCompletionFunc("sbom-scanner-command", completion.AutocompleteNone)
|
||||
flags.StringVar(&opts.sbomMergeStrategy, "sbom-merge-strategy", "", "merge scan results using `strategy`")
|
||||
_ = cmd.RegisterFlagCompletionFunc("sbom-merge-strategy", completion.AutocompleteNone)
|
||||
flags.StringVar(&opts.sbomOutput, "sbom-output", "", "save scan results to `file`")
|
||||
_ = cmd.RegisterFlagCompletionFunc("sbom-output", completion.AutocompleteDefault)
|
||||
flags.StringVar(&opts.sbomImgOutput, "sbom-image-output", "", "add scan results to image as `path`")
|
||||
_ = cmd.RegisterFlagCompletionFunc("sbom-image-output", completion.AutocompleteNone)
|
||||
flags.StringVar(&opts.sbomPurlOutput, "sbom-purl-output", "", "save scan results to `file``")
|
||||
_ = cmd.RegisterFlagCompletionFunc("sbom-purl-output", completion.AutocompleteDefault)
|
||||
flags.StringVar(&opts.sbomImgPurlOutput, "sbom-image-purl-output", "", "add scan results to image as `path`")
|
||||
_ = cmd.RegisterFlagCompletionFunc("sbom-image-purl-output", completion.AutocompleteNone)
|
||||
|
||||
flags.StringVar(&opts.signBy, "sign-by", "", "sign the image using a GPG key with the specified `FINGERPRINT`")
|
||||
_ = cmd.RegisterFlagCompletionFunc("sign-by", completion.AutocompleteNone)
|
||||
if err := flags.MarkHidden("omit-timestamp"); err != nil {
|
||||
@ -296,6 +337,22 @@ func commitCmd(c *cobra.Command, args []string, iopts commitInputOptions) error
|
||||
options.ConfidentialWorkloadOptions = confidentialWorkloadOptions
|
||||
}
|
||||
|
||||
pullPolicy, err := parse.PullPolicyFromOptions(c)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if c.Flag("sbom").Changed || c.Flag("sbom-scanner-command").Changed || c.Flag("sbom-scanner-image").Changed || c.Flag("sbom-image-output").Changed || c.Flag("sbom-merge-strategy").Changed || c.Flag("sbom-output").Changed || c.Flag("sbom-image-output").Changed || c.Flag("sbom-purl-output").Changed || c.Flag("sbom-image-purl-output").Changed {
|
||||
var sbomOptions []define.SBOMScanOptions
|
||||
sbomOption, err := parse.SBOMScanOptions(c)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sbomOption.PullPolicy = pullPolicy
|
||||
sbomOptions = append(sbomOptions, *sbomOption)
|
||||
options.SBOMScanOptions = sbomOptions
|
||||
}
|
||||
|
||||
if exclusiveFlags > 1 {
|
||||
return errors.New("can not use more then one timestamp option at at time")
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ func init() {
|
||||
flags.StringVar(&opts.creds, "creds", "", "use `[username[:password]]` for accessing the registry")
|
||||
flags.StringVarP(&opts.format, "format", "f", defaultFormat(), "`format` of the image manifest and metadata")
|
||||
flags.StringVar(&opts.name, "name", "", "`name` for the working container")
|
||||
flags.StringVar(&opts.pull, "pull", "true", "pull the image from the registry if newer or not present in store, if false, only pull the image if not present, if always, pull the image even if the named image is present in store, if never, only use the image present in store if available")
|
||||
flags.StringVar(&opts.pull, "pull", "true", "pull images from the registry if newer or not present in store, if false, only pull images if not present, if always, pull images even if the named images are present in store, if never, only use images present in store if available")
|
||||
flags.Lookup("pull").NoOptDefVal = "true" //allow `--pull ` to be set to `true` as expected.
|
||||
|
||||
flags.BoolVar(&opts.pullAlways, "pull-always", false, "pull the image even if the named image is present in store")
|
||||
@ -212,35 +212,10 @@ func fromCmd(c *cobra.Command, args []string, iopts fromReply) error {
|
||||
logrus.Warnf("ignoring platforms other than %+v: %+v", platforms[0], platforms[1:])
|
||||
}
|
||||
|
||||
pullFlagsCount := 0
|
||||
if c.Flag("pull").Changed {
|
||||
pullFlagsCount++
|
||||
pullPolicy, err := parse.PullPolicyFromOptions(c)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c.Flag("pull-always").Changed {
|
||||
pullFlagsCount++
|
||||
}
|
||||
if c.Flag("pull-never").Changed {
|
||||
pullFlagsCount++
|
||||
}
|
||||
|
||||
if pullFlagsCount > 1 {
|
||||
return errors.New("can only set one of 'pull' or 'pull-always' or 'pull-never'")
|
||||
}
|
||||
|
||||
// Allow for --pull, --pull=true, --pull=false, --pull=never, --pull=always
|
||||
// --pull-always and --pull-never. The --pull-never and --pull-always options
|
||||
// will not be documented.
|
||||
pullPolicy := define.PullIfMissing
|
||||
if strings.EqualFold(strings.TrimSpace(iopts.pull), "true") {
|
||||
pullPolicy = define.PullIfNewer
|
||||
}
|
||||
if iopts.pullAlways || strings.EqualFold(strings.TrimSpace(iopts.pull), "always") {
|
||||
pullPolicy = define.PullAlways
|
||||
}
|
||||
if iopts.pullNever || strings.EqualFold(strings.TrimSpace(iopts.pull), "never") {
|
||||
pullPolicy = define.PullNever
|
||||
}
|
||||
logrus.Debugf("Pull Policy for pull [%v]", pullPolicy)
|
||||
|
||||
signaturePolicy := iopts.signaturePolicy
|
||||
|
||||
|
49
commit.go
49
commit.go
@ -124,6 +124,9 @@ type CommitOptions struct {
|
||||
// contents will be used in their place. The contents will be owned by
|
||||
// 0:0 and have mode 0644. Currently only accepts regular files.
|
||||
ExtraImageContent map[string]string
|
||||
// SBOMScanOptions encapsulates options which control whether or not we
|
||||
// run scanners on the rootfs that we're about to commit, and how.
|
||||
SBOMScanOptions []SBOMScanOptions
|
||||
}
|
||||
|
||||
var (
|
||||
@ -321,6 +324,28 @@ func (b *Builder) Commit(ctx context.Context, dest types.ImageReference, options
|
||||
}
|
||||
logrus.Debugf("committing image with reference %q is allowed by policy", transports.ImageName(dest))
|
||||
|
||||
// If we need to scan the rootfs, do it now.
|
||||
options.ExtraImageContent = copyStringStringMap(options.ExtraImageContent)
|
||||
var extraImageContent, extraLocalContent map[string]string
|
||||
if len(options.SBOMScanOptions) != 0 {
|
||||
var scansDirectory string
|
||||
if extraImageContent, extraLocalContent, scansDirectory, err = b.sbomScan(ctx, options); err != nil {
|
||||
return imgID, nil, "", fmt.Errorf("scanning rootfs to generate SBOM for container %q: %w", b.ContainerID, err)
|
||||
}
|
||||
if scansDirectory != "" {
|
||||
defer func() {
|
||||
if err := os.RemoveAll(scansDirectory); err != nil {
|
||||
logrus.Warnf("removing temporary directory %q: %v", scansDirectory, err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
for k, v := range extraImageContent {
|
||||
if _, set := options.ExtraImageContent[k]; !set {
|
||||
options.ExtraImageContent[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Build an image reference from which we can copy the finished image.
|
||||
src, err = b.makeContainerImageRef(options)
|
||||
if err != nil {
|
||||
@ -408,7 +433,31 @@ func (b *Builder) Commit(ctx context.Context, dest types.ImageReference, options
|
||||
}
|
||||
}
|
||||
}
|
||||
// If we're supposed to store SBOM or PURL information in local files, write them now.
|
||||
for filename, content := range extraLocalContent {
|
||||
err := func() error {
|
||||
output, err := os.OpenFile(filename, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer output.Close()
|
||||
input, err := os.Open(content)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer input.Close()
|
||||
if _, err := io.Copy(output, input); err != nil {
|
||||
return fmt.Errorf("copying from %q to %q: %w", content, filename, err)
|
||||
}
|
||||
return nil
|
||||
}()
|
||||
if err != nil {
|
||||
return imgID, nil, "", err
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate the as-written digest of the image's manifest and build the digested
|
||||
// reference for the image.
|
||||
manifestDigest, err := manifest.Digest(manifestBytes)
|
||||
if err != nil {
|
||||
return imgID, nil, "", fmt.Errorf("computing digest of manifest of new image %q: %w", transports.ImageName(dest), err)
|
||||
|
@ -333,4 +333,7 @@ type BuildOptions struct {
|
||||
// value set in a base image will be preserved, so this does not
|
||||
// frequently need to be set.
|
||||
OSVersion string
|
||||
// SBOMScanOptions encapsulates options which control whether or not we
|
||||
// run scanners on the rootfs that we're about to commit, and how.
|
||||
SBOMScanOptions []SBOMScanOptions
|
||||
}
|
||||
|
@ -130,6 +130,42 @@ type ConfidentialWorkloadOptions struct {
|
||||
FirmwareLibrary string
|
||||
}
|
||||
|
||||
// SBOMMergeStrategy tells us how to merge multiple SBOM documents into one.
|
||||
type SBOMMergeStrategy string
|
||||
|
||||
const (
|
||||
// SBOMMergeStrategyCat literally concatenates the documents.
|
||||
SBOMMergeStrategyCat SBOMMergeStrategy = "cat"
|
||||
// SBOMMergeStrategyCycloneDXByComponentNameAndVersion adds components
|
||||
// from the second document to the first, so long as they have a
|
||||
// name+version combination which is not already present in the
|
||||
// components array.
|
||||
SBOMMergeStrategyCycloneDXByComponentNameAndVersion SBOMMergeStrategy = "merge-cyclonedx-by-component-name-and-version"
|
||||
// SBOMMergeStrategySPDXByPackageNameAndVersionInfo adds packages from
|
||||
// the second document to the first, so long as they have a
|
||||
// name+versionInfo combination which is not already present in the
|
||||
// first document's packages array, and adds hasExtractedLicensingInfos
|
||||
// items from the second document to the first, so long as they include
|
||||
// a licenseId value which is not already present in the first
|
||||
// document's hasExtractedLicensingInfos array.
|
||||
SBOMMergeStrategySPDXByPackageNameAndVersionInfo SBOMMergeStrategy = "merge-spdx-by-package-name-and-versioninfo"
|
||||
)
|
||||
|
||||
// SBOMScanOptions encapsulates options which control whether or not we run a
|
||||
// scanner on the rootfs that we're about to commit, and how.
|
||||
type SBOMScanOptions struct {
|
||||
Type []string // a shorthand name for a defined group of these options
|
||||
Image string // the scanner image to use
|
||||
PullPolicy PullPolicy // how to get the scanner image
|
||||
Commands []string // one or more commands to invoke for the image rootfs or ContextDir locations
|
||||
ContextDir []string // one or more "source" directory locations
|
||||
SBOMOutput string // where to save SBOM scanner output outside of the image (i.e., the local filesystem)
|
||||
PURLOutput string // where to save PURL list outside of the image (i.e., the local filesystem)
|
||||
ImageSBOMOutput string // where to save SBOM scanner output in the image
|
||||
ImagePURLOutput string // where to save PURL list in the image
|
||||
MergeStrategy SBOMMergeStrategy // how to merge the outputs of multiple scans
|
||||
}
|
||||
|
||||
// TempDirForURL checks if the passed-in string looks like a URL or -. If it is,
|
||||
// TempDirForURL creates a temporary directory, arranges for its contents to be
|
||||
// the contents of that URL, and returns the temporary directory's path, along
|
||||
|
@ -706,7 +706,7 @@ Valid _type_ values are:
|
||||
- **tar**: write the resulting files as a single tarball (.tar).
|
||||
|
||||
If no type is specified, the value defaults to **local**.
|
||||
Alternatively, instead of a comma-separated sequence, the value of **--output** can be just a destination (in the `**dest** format) (e.g. `--output some-path`, `--output -`) where `--output some-path` is treated as if **type=local** and `--output -` is treated as if **type=tar**.
|
||||
Alternatively, instead of a comma-separated sequence, the value of **--output** can be just a destination (in the `**dest**` format) (e.g. `--output some-path`, `--output -`) where `--output some-path` is treated as if **type=local** and `--output -` is treated as if **type=tar**.
|
||||
|
||||
Note: The **--tag** option can also be used to change the file image format to supported `containers-transports(5)`.
|
||||
|
||||
@ -742,26 +742,29 @@ The `buildah build` command allows building images for all Linux architectures,
|
||||
|
||||
**--pull**
|
||||
|
||||
When the flag is enabled or set explicitly to `true` (with *--pull=true*), attempt to pull the latest image from the registries
|
||||
listed in registries.conf if a local image does not exist or the image is newer
|
||||
than the one in storage. Raise an error if the image is not in any listed
|
||||
registry and is not present locally.
|
||||
When the *--pull* flag is enabled or set explicitly to `true` (with
|
||||
*--pull=true*), attempt to pull the latest versions of base and SBOM scanner
|
||||
images from the registries listed in registries.conf if a local base or SBOM
|
||||
scanner image does not exist or the image in the registry is newer than the one
|
||||
in local storage. Raise an error if the base or SBOM scanner image is not in
|
||||
any listed registry and is not present locally.
|
||||
|
||||
If the flag is disabled (with *--pull=false*), do not pull the image from the
|
||||
registry, use only the local version. Raise an error if the image is not
|
||||
present locally.
|
||||
If the flag is disabled (with *--pull=false*), do not pull base and SBOM
|
||||
scanner images from registries, use only local versions. Raise an error if a
|
||||
base or SBOM scanner image is not present locally.
|
||||
|
||||
If the pull flag is set to `always` (with *--pull=always*),
|
||||
pull the image from the first registry it is found in as listed in registries.conf.
|
||||
Raise an error if not found in the registries, even if the image is present locally.
|
||||
If the pull flag is set to `always` (with *--pull=always*), pull base and SBOM
|
||||
scanner images from the registries listed in registries.conf. Raise an error
|
||||
if a base or SBOM scanner image is not found in the registries, even if an
|
||||
image with the same name is present locally.
|
||||
|
||||
If the pull flag is set to `missing` (with *--pull=missing*),
|
||||
pull the image only if it could not be found in the local containers storage.
|
||||
Raise an error if no image could be found and the pull fails.
|
||||
If the pull flag is set to `missing` (with *--pull=missing*), pull base and
|
||||
SBOM scanner images only if they could not be found in the local containers
|
||||
storage. Raise an error if no image could be found and the pull fails.
|
||||
|
||||
If the pull flag is set to `never` (with *--pull=never*),
|
||||
Do not pull the image from the registry, use only the local version.
|
||||
Raise an error if the image is not present locally.
|
||||
If the pull flag is set to `never` (with *--pull=never*), do not pull base and
|
||||
SBOM scanner images from registries, use only the local versions. Raise an
|
||||
error if the image is not present locally.
|
||||
|
||||
Defaults to *true*.
|
||||
|
||||
@ -803,6 +806,97 @@ consult the manpages of the selected container runtime.
|
||||
Note: Do not pass the leading `--` to the flag. To pass the runc flag `--log-format json`
|
||||
to buildah build, the option given would be `--runtime-flag log-format=json`.
|
||||
|
||||
**--sbom** *preset*
|
||||
|
||||
Generate SBOMs (Software Bills Of Materials) for the output image by scanning
|
||||
the working container and build contexts using the named combination of scanner
|
||||
image, scanner commands, and merge strategy. Must be specified with one or
|
||||
more of **--sbom-image-output**, **--sbom-image-purl-output**, **--sbom-output**,
|
||||
and **--sbom-purl-output**. Recognized presets, and the set of options which
|
||||
they equate to:
|
||||
|
||||
- "syft", "syft-cyclonedx":
|
||||
--sbom-scanner-image=ghcr.io/anchore/syft
|
||||
--sbom-scanner-command="/syft scan -q dir:{ROOTFS} --output cyclonedx-json={OUTPUT}"
|
||||
--sbom-scanner-command="/syft scan -q dir:{CONTEXT} --output cyclonedx-json={OUTPUT}"
|
||||
--sbom-merge-strategy=merge-cyclonedx-by-component-name-and-version
|
||||
- "syft-spdx":
|
||||
--sbom-scanner-image=ghcr.io/anchore/syft
|
||||
--sbom-scanner-command="/syft scan -q dir:{ROOTFS} --output spdx-json={OUTPUT}"
|
||||
--sbom-scanner-command="/syft scan -q dir:{CONTEXT} --output spdx-json={OUTPUT}"
|
||||
--sbom-merge-strategy=merge-spdx-by-package-name-and-versioninfo
|
||||
- "trivy", "trivy-cyclonedx":
|
||||
--sbom-scanner-image=ghcr.io/aquasecurity/trivy
|
||||
--sbom-scanner-command="trivy filesystem -q {ROOTFS} --format cyclonedx --output {OUTPUT}"
|
||||
--sbom-scanner-command="trivy filesystem -q {CONTEXT} --format cyclonedx --output {OUTPUT}"
|
||||
--sbom-merge-strategy=merge-cyclonedx-by-component-name-and-version
|
||||
- "trivy-spdx":
|
||||
--sbom-scanner-image=ghcr.io/aquasecurity/trivy
|
||||
--sbom-scanner-command="trivy filesystem -q {ROOTFS} --format spdx-json --output {OUTPUT}"
|
||||
--sbom-scanner-command="trivy filesystem -q {CONTEXT} --format spdx-json --output {OUTPUT}"
|
||||
--sbom-merge-strategy=merge-spdx-by-package-name-and-versioninfo
|
||||
|
||||
**--sbom-image-output** *path*
|
||||
|
||||
When generating SBOMs, store the generated SBOM in the specified path in the
|
||||
output image. There is no default.
|
||||
|
||||
**--sbom-image-purl-output** *path*
|
||||
|
||||
When generating SBOMs, scan them for PURL ([package
|
||||
URL](https://github.com/package-url/purl-spec/blob/master/PURL-SPECIFICATION.rst))
|
||||
information, and save a list of found PURLs to the specified path in the output
|
||||
image. There is no default.
|
||||
|
||||
**--sbom-merge-strategy** *method*
|
||||
|
||||
If more than one **--sbom-scanner-command** value is being used, use the
|
||||
specified method to merge the output from later commands with output from
|
||||
earlier commands. Recognized values include:
|
||||
|
||||
- cat
|
||||
Concatenate the files.
|
||||
- merge-cyclonedx-by-component-name-and-version
|
||||
Merge the "component" fields of JSON documents, ignoring values from
|
||||
documents when the combination of their "name" and "version" values is
|
||||
already present. Documents are processed in the order in which they are
|
||||
generated, which is the order in which the commands that generate them
|
||||
were specified.
|
||||
- merge-spdx-by-package-name-and-versioninfo
|
||||
Merge the "package" fields of JSON documents, ignoring values from
|
||||
documents when the combination of their "name" and "versionInfo" values is
|
||||
already present. Documents are processed in the order in which they are
|
||||
generated, which is the order in which the commands that generate them
|
||||
were specified.
|
||||
|
||||
**--sbom-output** *file*
|
||||
|
||||
When generating SBOMs, store the generated SBOM in the named file on the local
|
||||
filesystem. There is no default.
|
||||
|
||||
**--sbom-purl-output** *file*
|
||||
|
||||
When generating SBOMs, scan them for PURL ([package
|
||||
URL](https://github.com/package-url/purl-spec/blob/master/PURL-SPECIFICATION.rst))
|
||||
information, and save a list of found PURLs to the named file in the local
|
||||
filesystem. There is no default.
|
||||
|
||||
**--sbom-scanner-command** *image*
|
||||
|
||||
Generate SBOMs by running the specified command from the scanner image. If
|
||||
multiple commands are specified, they are run in the order in which they are
|
||||
specified. These text substitutions are performed:
|
||||
- {ROOTFS}
|
||||
The root of the built image's filesystem, bind mounted.
|
||||
- {CONTEXT}
|
||||
The build context and additional build contexts, bind mounted.
|
||||
- {OUTPUT}
|
||||
The name of a temporary output file, to be read and merged with others or copied elsewhere.
|
||||
|
||||
**--sbom-scanner-image** *image*
|
||||
|
||||
Generate SBOMs using the specified scanner image.
|
||||
|
||||
**--secret**=**id=id,src=path**
|
||||
|
||||
Pass secret information to be used in the Containerfile for building images
|
||||
@ -1275,7 +1369,7 @@ Users can specify a series of Unix shell globals in a
|
||||
.containerignore/.dockerignore file to identify files/directories to exclude.
|
||||
|
||||
Buildah supports a special wildcard string `**` which matches any number of
|
||||
directories (including zero). For example, **/*.go will exclude all files that
|
||||
directories (including zero). For example, `**/*.go` will exclude all files that
|
||||
end with .go that are found in all directories.
|
||||
|
||||
Example .containerignore file:
|
||||
|
@ -166,6 +166,32 @@ want to set `--omit-history` to omit the optional `History` from
|
||||
built images or when working with images built using build tools that
|
||||
do not include `History` information in their images.
|
||||
|
||||
**--pull**
|
||||
|
||||
When the *--pull* flag is enabled or set explicitly to `true` (with
|
||||
*--pull=true*), attempt to pull the latest versions of SBOM scanner images from
|
||||
the registries listed in registries.conf if a local SBOM scanner image does not
|
||||
exist or the image in the registry is newer than the one in local storage.
|
||||
Raise an error if the SBOM scanner image is not in any listed registry and is
|
||||
not present locally.
|
||||
|
||||
If the flag is disabled (with *--pull=false*), do not pull SBOM scanner images
|
||||
from registries, use only local versions. Raise an error if a SBOM scanner
|
||||
image is not present locally.
|
||||
|
||||
If the pull flag is set to `always` (with *--pull=always*), pull SBOM scanner
|
||||
images from the registries listed in registries.conf. Raise an error if a SBOM
|
||||
scanner image is not found in the registries, even if an image with the same
|
||||
name is present locally.
|
||||
|
||||
If the pull flag is set to `missing` (with *--pull=missing*), pull SBOM scanner
|
||||
images only if they could not be found in the local containers storage. Raise
|
||||
an error if no image could be found and the pull fails.
|
||||
|
||||
If the pull flag is set to `never` (with *--pull=never*), do not pull SBOM
|
||||
scanner images from registries, use only the local versions. Raise an error if
|
||||
the image is not present locally.
|
||||
|
||||
**--quiet**, **-q**
|
||||
|
||||
When writing the output image, suppress progress output.
|
||||
@ -174,6 +200,97 @@ When writing the output image, suppress progress output.
|
||||
Remove the working container and its contents after creating the image.
|
||||
Default leaves the container and its content in place.
|
||||
|
||||
**--sbom** *preset*
|
||||
|
||||
Generate SBOMs (Software Bills Of Materials) for the output image by scanning
|
||||
the working container and build contexts using the named combination of scanner
|
||||
image, scanner commands, and merge strategy. Must be specified with one or
|
||||
more of **--sbom-image-output**, **--sbom-image-purl-output**, **--sbom-output**,
|
||||
and **--sbom-purl-output**. Recognized presets, and the set of options which
|
||||
they equate to:
|
||||
|
||||
- "syft", "syft-cyclonedx":
|
||||
--sbom-scanner-image=ghcr.io/anchore/syft
|
||||
--sbom-scanner-command="/syft scan -q dir:{ROOTFS} --output cyclonedx-json={OUTPUT}"
|
||||
--sbom-scanner-command="/syft scan -q dir:{CONTEXT} --output cyclonedx-json={OUTPUT}"
|
||||
--sbom-merge-strategy=merge-cyclonedx-by-component-name-and-version
|
||||
- "syft-spdx":
|
||||
--sbom-scanner-image=ghcr.io/anchore/syft
|
||||
--sbom-scanner-command="/syft scan -q dir:{ROOTFS} --output spdx-json={OUTPUT}"
|
||||
--sbom-scanner-command="/syft scan -q dir:{CONTEXT} --output spdx-json={OUTPUT}"
|
||||
--sbom-merge-strategy=merge-spdx-by-package-name-and-versioninfo
|
||||
- "trivy", "trivy-cyclonedx":
|
||||
--sbom-scanner-image=ghcr.io/aquasecurity/trivy
|
||||
--sbom-scanner-command="trivy filesystem -q {ROOTFS} --format cyclonedx --output {OUTPUT}"
|
||||
--sbom-scanner-command="trivy filesystem -q {CONTEXT} --format cyclonedx --output {OUTPUT}"
|
||||
--sbom-merge-strategy=merge-cyclonedx-by-component-name-and-version
|
||||
- "trivy-spdx":
|
||||
--sbom-scanner-image=ghcr.io/aquasecurity/trivy
|
||||
--sbom-scanner-command="trivy filesystem -q {ROOTFS} --format spdx-json --output {OUTPUT}"
|
||||
--sbom-scanner-command="trivy filesystem -q {CONTEXT} --format spdx-json --output {OUTPUT}"
|
||||
--sbom-merge-strategy=merge-spdx-by-package-name-and-versioninfo
|
||||
|
||||
**--sbom-image-output** *path*
|
||||
|
||||
When generating SBOMs, store the generated SBOM in the specified path in the
|
||||
output image. There is no default.
|
||||
|
||||
**--sbom-image-purl-output** *path*
|
||||
|
||||
When generating SBOMs, scan them for PURL ([package
|
||||
URL](https://github.com/package-url/purl-spec/blob/master/PURL-SPECIFICATION.rst))
|
||||
information, and save a list of found PURLs to the named file in the local
|
||||
filesystem. There is no default.
|
||||
|
||||
**--sbom-merge-strategy** *method*
|
||||
|
||||
If more than one **--sbom-scanner-command** value is being used, use the
|
||||
specified method to merge the output from later commands with output from
|
||||
earlier commands. Recognized values include:
|
||||
|
||||
- cat
|
||||
Concatenate the files.
|
||||
- merge-cyclonedx-by-component-name-and-version
|
||||
Merge the "component" fields of JSON documents, ignoring values from
|
||||
documents when the combination of their "name" and "version" values is
|
||||
already present. Documents are processed in the order in which they are
|
||||
generated, which is the order in which the commands that generate them
|
||||
were specified.
|
||||
- merge-spdx-by-package-name-and-versioninfo
|
||||
Merge the "package" fields of JSON documents, ignoring values from
|
||||
documents when the combination of their "name" and "versionInfo" values is
|
||||
already present. Documents are processed in the order in which they are
|
||||
generated, which is the order in which the commands that generate them
|
||||
were specified.
|
||||
|
||||
**--sbom-output** *file*
|
||||
|
||||
When generating SBOMs, store the generated SBOM in the named file on the local
|
||||
filesystem. There is no default.
|
||||
|
||||
**--sbom-purl-output** *file*
|
||||
|
||||
When generating SBOMs, scan them for PURL ([package
|
||||
URL](https://github.com/package-url/purl-spec/blob/master/PURL-SPECIFICATION.rst))
|
||||
information, and save a list of found PURLs to the named file in the local
|
||||
filesystem. There is no default.
|
||||
|
||||
**--sbom-scanner-command** *image*
|
||||
|
||||
Generate SBOMs by running the specified command from the scanner image. If
|
||||
multiple commands are specified, they are run in the order in which they are
|
||||
specified. These text substitutions are performed:
|
||||
- {ROOTFS}
|
||||
The root of the built image's filesystem, bind mounted.
|
||||
- {CONTEXT}
|
||||
The build context and additional build contexts, bind mounted.
|
||||
- {OUTPUT}
|
||||
The name of a temporary output file, to be read and merged with others or copied elsewhere.
|
||||
|
||||
**--sbom-scanner-image** *image*
|
||||
|
||||
Generate SBOMs using the specified scanner image.
|
||||
|
||||
**--sign-by** *fingerprint*
|
||||
|
||||
Sign the new image using the GPG key that matches the specified fingerprint.
|
||||
|
2
image.go
2
image.go
@ -273,7 +273,7 @@ func (i *containerImageRef) extractRootfs(opts ExtractRootfsOptions) (io.ReadClo
|
||||
StripSetgidBit: opts.StripSetgidBit,
|
||||
StripXattrs: opts.StripXattrs,
|
||||
}
|
||||
err = copier.Get(mountPoint, mountPoint, copierOptions, []string{"."}, pipeWriter)
|
||||
err := copier.Get(mountPoint, mountPoint, copierOptions, []string{"."}, pipeWriter)
|
||||
errChan <- err
|
||||
pipeWriter.Close()
|
||||
|
||||
|
@ -152,6 +152,7 @@ type Executor struct {
|
||||
osFeatures []string
|
||||
envs []string
|
||||
confidentialWorkload define.ConfidentialWorkloadOptions
|
||||
sbomScanOptions []define.SBOMScanOptions
|
||||
}
|
||||
|
||||
type imageTypeAndHistoryAndDiffIDs struct {
|
||||
@ -310,6 +311,7 @@ func newExecutor(logger *logrus.Logger, logPrefix string, store storage.Store, o
|
||||
osFeatures: append([]string{}, options.OSFeatures...),
|
||||
envs: append([]string{}, options.Envs...),
|
||||
confidentialWorkload: options.ConfidentialWorkload,
|
||||
sbomScanOptions: options.SBOMScanOptions,
|
||||
}
|
||||
if exec.err == nil {
|
||||
exec.err = os.Stderr
|
||||
|
@ -1159,7 +1159,7 @@ func (s *StageExecutor) Execute(ctx context.Context, base string) (imgID string,
|
||||
|
||||
if len(children) == 0 {
|
||||
// There are no steps.
|
||||
if s.builder.FromImageID == "" || s.executor.squash || s.executor.confidentialWorkload.Convert || len(s.executor.labels) > 0 || len(s.executor.annotations) > 0 || len(s.executor.unsetEnvs) > 0 || len(s.executor.unsetLabels) > 0 {
|
||||
if s.builder.FromImageID == "" || s.executor.squash || s.executor.confidentialWorkload.Convert || len(s.executor.labels) > 0 || len(s.executor.annotations) > 0 || len(s.executor.unsetEnvs) > 0 || len(s.executor.unsetLabels) > 0 || len(s.executor.sbomScanOptions) > 0 {
|
||||
// We either don't have a base image, or we need to
|
||||
// transform the contents of the base image, or we need
|
||||
// to make some changes to just the config blob. Whichever
|
||||
@ -1168,7 +1168,7 @@ func (s *StageExecutor) Execute(ctx context.Context, base string) (imgID string,
|
||||
// No base image means there's nothing to put in a
|
||||
// layer, so don't create one.
|
||||
emptyLayer := (s.builder.FromImageID == "")
|
||||
if imgID, ref, err = s.commit(ctx, s.getCreatedBy(nil, ""), emptyLayer, s.output, s.executor.squash, lastStage); err != nil {
|
||||
if imgID, ref, err = s.commit(ctx, s.getCreatedBy(nil, ""), emptyLayer, s.output, s.executor.squash || s.executor.confidentialWorkload.Convert, lastStage); err != nil {
|
||||
return "", nil, false, fmt.Errorf("committing base container: %w", err)
|
||||
}
|
||||
} else {
|
||||
@ -1566,11 +1566,13 @@ func (s *StageExecutor) Execute(ctx context.Context, base string) (imgID string,
|
||||
}
|
||||
|
||||
if lastInstruction && lastStage {
|
||||
if s.executor.squash || s.executor.confidentialWorkload.Convert {
|
||||
// Create a squashed version of this image
|
||||
// if we're supposed to create one and this
|
||||
// is the last instruction of the last stage.
|
||||
imgID, ref, err = s.commit(ctx, s.getCreatedBy(node, addedContentSummary), !s.stepRequiresLayer(step), commitName, true, lastStage && lastInstruction)
|
||||
if s.executor.squash || s.executor.confidentialWorkload.Convert || len(s.executor.sbomScanOptions) != 0 {
|
||||
// If this is the last instruction of the last stage,
|
||||
// create a squashed or confidential workload
|
||||
// version of the image if that's what we're after,
|
||||
// or a normal one if we need to scan the image while
|
||||
// committing it.
|
||||
imgID, ref, err = s.commit(ctx, s.getCreatedBy(node, addedContentSummary), !s.stepRequiresLayer(step), commitName, s.executor.squash || s.executor.confidentialWorkload.Convert, lastStage && lastInstruction)
|
||||
if err != nil {
|
||||
return "", nil, false, fmt.Errorf("committing final squash step %+v: %w", *step, err)
|
||||
}
|
||||
@ -2190,6 +2192,7 @@ func (s *StageExecutor) commit(ctx context.Context, createdBy string, emptyLayer
|
||||
}
|
||||
if finalInstruction {
|
||||
options.ConfidentialWorkloadOptions = s.executor.confidentialWorkload
|
||||
options.SBOMScanOptions = s.executor.sbomScanOptions
|
||||
}
|
||||
imgID, _, manifestDigest, err := s.builder.Commit(ctx, imageRef, options)
|
||||
if err != nil {
|
||||
|
296
internal/sbom/merge.go
Normal file
296
internal/sbom/merge.go
Normal file
@ -0,0 +1,296 @@
|
||||
package sbom
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"sort"
|
||||
|
||||
"github.com/containers/buildah/define"
|
||||
)
|
||||
|
||||
// getComponentNameVersionPurl extracts the "name", "version", and "purl"
|
||||
// fields of a CycloneDX component record
|
||||
func getComponentNameVersionPurl(anyComponent any) (string, string, error) {
|
||||
if component, ok := anyComponent.(map[string]any); ok {
|
||||
// read the "name" field
|
||||
anyName, ok := component["name"]
|
||||
if !ok {
|
||||
return "", "", fmt.Errorf("no name in component %v", anyComponent)
|
||||
}
|
||||
name, ok := anyName.(string)
|
||||
if !ok {
|
||||
return "", "", fmt.Errorf("name %v is not a string", anyName)
|
||||
}
|
||||
// read the optional "version" field
|
||||
var version string
|
||||
anyVersion, ok := component["version"]
|
||||
if ok {
|
||||
if version, ok = anyVersion.(string); !ok {
|
||||
return "", "", fmt.Errorf("version %v is not a string", anyVersion)
|
||||
}
|
||||
}
|
||||
// combine them
|
||||
nameWithVersion := name
|
||||
if version != "" {
|
||||
nameWithVersion += ("@" + version)
|
||||
}
|
||||
// read the optional "purl" field
|
||||
var purl string
|
||||
anyPurl, ok := component["purl"]
|
||||
if ok {
|
||||
if purl, ok = anyPurl.(string); !ok {
|
||||
return "", "", fmt.Errorf("purl %v is not a string", anyPurl)
|
||||
}
|
||||
}
|
||||
return nameWithVersion, purl, nil
|
||||
}
|
||||
return "", "", fmt.Errorf("component %v is not an object", anyComponent)
|
||||
}
|
||||
|
||||
// getPackageNameVersionInfoPurl extracts the "name", "versionInfo", and "purl"
|
||||
// fields of an SPDX package record
|
||||
func getPackageNameVersionInfoPurl(anyPackage any) (string, string, error) {
|
||||
if pkg, ok := anyPackage.(map[string]any); ok {
|
||||
// read the "name" field
|
||||
anyName, ok := pkg["name"]
|
||||
if !ok {
|
||||
return "", "", fmt.Errorf("no name in package %v", anyPackage)
|
||||
}
|
||||
name, ok := anyName.(string)
|
||||
if !ok {
|
||||
return "", "", fmt.Errorf("name %v is not a string", anyName)
|
||||
}
|
||||
// read the optional "versionInfo" field
|
||||
var versionInfo string
|
||||
if anyVersionInfo, ok := pkg["versionInfo"]; ok {
|
||||
if versionInfo, ok = anyVersionInfo.(string); !ok {
|
||||
return "", "", fmt.Errorf("versionInfo %v is not a string", anyVersionInfo)
|
||||
}
|
||||
}
|
||||
// combine them
|
||||
nameWithVersionInfo := name
|
||||
if versionInfo != "" {
|
||||
nameWithVersionInfo += ("@" + versionInfo)
|
||||
}
|
||||
// now look for optional externalRefs[].purl if "referenceCategory"
|
||||
// is "PACKAGE-MANAGER" and "referenceType" is "purl"
|
||||
var purl string
|
||||
if anyExternalRefs, ok := pkg["externalRefs"]; ok {
|
||||
if externalRefs, ok := anyExternalRefs.([]any); ok {
|
||||
for _, anyExternalRef := range externalRefs {
|
||||
if externalRef, ok := anyExternalRef.(map[string]any); ok {
|
||||
anyReferenceCategory, ok := externalRef["referenceCategory"]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if referenceCategory, ok := anyReferenceCategory.(string); !ok || referenceCategory != "PACKAGE-MANAGER" {
|
||||
continue
|
||||
}
|
||||
anyReferenceType, ok := externalRef["referenceType"]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if referenceType, ok := anyReferenceType.(string); !ok || referenceType != "purl" {
|
||||
continue
|
||||
}
|
||||
if anyReferenceLocator, ok := externalRef["referenceLocator"]; ok {
|
||||
if purl, ok = anyReferenceLocator.(string); !ok {
|
||||
return "", "", fmt.Errorf("purl %v is not a string", anyReferenceLocator)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nameWithVersionInfo, purl, nil
|
||||
}
|
||||
return "", "", fmt.Errorf("package %v is not an object", anyPackage)
|
||||
}
|
||||
|
||||
// getLicenseID extracts the "licenseId" field of an SPDX license record
|
||||
func getLicenseID(anyLicense any) (string, error) {
|
||||
var licenseID string
|
||||
if lic, ok := anyLicense.(map[string]any); ok {
|
||||
anyID, ok := lic["licenseId"]
|
||||
if !ok {
|
||||
return "", fmt.Errorf("no licenseId in license %v", anyID)
|
||||
}
|
||||
id, ok := anyID.(string)
|
||||
if !ok {
|
||||
return "", fmt.Errorf("licenseId %v is not a string", anyID)
|
||||
}
|
||||
licenseID = id
|
||||
}
|
||||
return licenseID, nil
|
||||
}
|
||||
|
||||
// mergeSlicesWithoutDuplicates merges a named slice in "base" with items from
|
||||
// the same slice in "merge", so long as getKey() returns values for them that
|
||||
// it didn't for items from the "base" slice
|
||||
func mergeSlicesWithoutDuplicates(base, merge map[string]any, sliceField string, getKey func(record any) (string, error)) error {
|
||||
uniqueKeys := make(map[string]struct{})
|
||||
// go through all of the values in the base slice, grab their
|
||||
// keys, and note them
|
||||
baseRecords := base[sliceField]
|
||||
baseRecordsSlice, ok := baseRecords.([]any)
|
||||
if !ok {
|
||||
baseRecordsSlice = []any{}
|
||||
}
|
||||
for _, anyRecord := range baseRecordsSlice {
|
||||
key, err := getKey(anyRecord)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
uniqueKeys[key] = struct{}{}
|
||||
}
|
||||
// go through all of the record values in the merge doc, grab their
|
||||
// associated keys, and append them to the base records slice if we
|
||||
// haven't seen the key yet
|
||||
mergeRecords := merge[sliceField]
|
||||
mergeRecordsSlice, ok := mergeRecords.([]any)
|
||||
if !ok {
|
||||
mergeRecordsSlice = []any{}
|
||||
}
|
||||
for _, anyRecord := range mergeRecordsSlice {
|
||||
key, err := getKey(anyRecord)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, present := uniqueKeys[key]; !present {
|
||||
baseRecordsSlice = append(baseRecordsSlice, anyRecord)
|
||||
uniqueKeys[key] = struct{}{}
|
||||
}
|
||||
}
|
||||
if len(baseRecordsSlice) > 0 {
|
||||
base[sliceField] = baseRecordsSlice
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// decodeJSON decodes a file into a map
|
||||
func decodeJSON(inputFile string, document *map[string]any) error {
|
||||
src, err := os.Open(inputFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer src.Close()
|
||||
if err = json.NewDecoder(src).Decode(document); err != nil {
|
||||
return fmt.Errorf("decoding JSON document from %q: %w", inputFile, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// encodeJSON encodes a map and saves it to a file
|
||||
func encodeJSON(outputFile string, document any) error {
|
||||
dst, err := os.Create(outputFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer dst.Close()
|
||||
if err = json.NewEncoder(dst).Encode(document); err != nil {
|
||||
return fmt.Errorf("writing JSON document to %q: %w", outputFile, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Merge adds the contents of inputSBOM to inputOutputSBOM using one of a
|
||||
// handful of named strategies.
|
||||
func Merge(mergeStrategy define.SBOMMergeStrategy, inputOutputSBOM, inputSBOM, outputPURL string) (err error) {
|
||||
type purlImageContents struct {
|
||||
Dependencies []string `json:"dependencies,omitempty"`
|
||||
}
|
||||
type purlDocument struct {
|
||||
ImageContents purlImageContents `json:"image_contents,omitempty"`
|
||||
}
|
||||
purls := []string{}
|
||||
seenPurls := make(map[string]struct{})
|
||||
|
||||
switch mergeStrategy {
|
||||
case define.SBOMMergeStrategyCycloneDXByComponentNameAndVersion:
|
||||
var base, merge map[string]any
|
||||
if err = decodeJSON(inputOutputSBOM, &base); err != nil {
|
||||
return fmt.Errorf("reading first SBOM to be merged from %q: %w", inputOutputSBOM, err)
|
||||
}
|
||||
if err = decodeJSON(inputSBOM, &merge); err != nil {
|
||||
return fmt.Errorf("reading second SBOM to be merged from %q: %w", inputSBOM, err)
|
||||
}
|
||||
|
||||
// merge the "components" lists based on unique combinations of
|
||||
// "name" and "version" fields, and save unique package URL
|
||||
// values
|
||||
err = mergeSlicesWithoutDuplicates(base, merge, "components", func(anyPackage any) (string, error) {
|
||||
nameWithVersion, purl, err := getComponentNameVersionPurl(anyPackage)
|
||||
if purl != "" {
|
||||
if _, seen := seenPurls[purl]; !seen {
|
||||
purls = append(purls, purl)
|
||||
seenPurls[purl] = struct{}{}
|
||||
}
|
||||
}
|
||||
return nameWithVersion, err
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("merging the %q field of CycloneDX SBOMs: %w", "components", err)
|
||||
}
|
||||
|
||||
// save the updated doc
|
||||
err = encodeJSON(inputOutputSBOM, base)
|
||||
|
||||
case define.SBOMMergeStrategySPDXByPackageNameAndVersionInfo:
|
||||
var base, merge map[string]any
|
||||
if err = decodeJSON(inputOutputSBOM, &base); err != nil {
|
||||
return fmt.Errorf("reading first SBOM to be merged from %q: %w", inputOutputSBOM, err)
|
||||
}
|
||||
if err = decodeJSON(inputSBOM, &merge); err != nil {
|
||||
return fmt.Errorf("reading second SBOM to be merged from %q: %w", inputSBOM, err)
|
||||
}
|
||||
|
||||
// merge the "packages" lists based on unique combinations of
|
||||
// "name" and "versionInfo" fields, and save unique package URL
|
||||
// values
|
||||
err = mergeSlicesWithoutDuplicates(base, merge, "packages", func(anyPackage any) (string, error) {
|
||||
nameWithVersionInfo, purl, err := getPackageNameVersionInfoPurl(anyPackage)
|
||||
if purl != "" {
|
||||
if _, seen := seenPurls[purl]; !seen {
|
||||
purls = append(purls, purl)
|
||||
seenPurls[purl] = struct{}{}
|
||||
}
|
||||
}
|
||||
return nameWithVersionInfo, err
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("merging the %q field of SPDX SBOMs: %w", "packages", err)
|
||||
}
|
||||
|
||||
// merge the "hasExtractedLicensingInfos" lists based on unique
|
||||
// "licenseId" values
|
||||
err = mergeSlicesWithoutDuplicates(base, merge, "hasExtractedLicensingInfos", getLicenseID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("merging the %q field of SPDX SBOMs: %w", "hasExtractedLicensingInfos", err)
|
||||
}
|
||||
|
||||
// save the updated doc
|
||||
err = encodeJSON(inputOutputSBOM, base)
|
||||
|
||||
case define.SBOMMergeStrategyCat:
|
||||
dst, err := os.OpenFile(inputOutputSBOM, os.O_RDWR|os.O_APPEND, 0o644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer dst.Close()
|
||||
src, err := os.Open(inputSBOM)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer src.Close()
|
||||
if _, err = io.Copy(dst, src); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err == nil {
|
||||
sort.Strings(purls)
|
||||
err = encodeJSON(outputPURL, &purlDocument{purlImageContents{Dependencies: purls}})
|
||||
}
|
||||
return err
|
||||
}
|
297
internal/sbom/merge_test.go
Normal file
297
internal/sbom/merge_test.go
Normal file
@ -0,0 +1,297 @@
|
||||
package sbom
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestEncodeJSON(t *testing.T) {
|
||||
tmp := t.TempDir()
|
||||
map1 := map[string]any{
|
||||
"string": "yeah",
|
||||
"number": 1,
|
||||
"struct": map[string]any{
|
||||
"string": "yep",
|
||||
"number": 2,
|
||||
},
|
||||
}
|
||||
err := encodeJSON(filepath.Join(tmp, "1.json"), map1)
|
||||
require.NoError(t, err)
|
||||
st1, err := os.Stat(filepath.Join(tmp, "1.json"))
|
||||
require.NoError(t, err)
|
||||
assert.NotZero(t, st1.Size())
|
||||
|
||||
map2 := struct {
|
||||
String string `json:"string"`
|
||||
Number int `json:"number"`
|
||||
Struct struct {
|
||||
String string `json:"string"`
|
||||
Number int `json:"number"`
|
||||
} `json:"struct"`
|
||||
}{
|
||||
String: "yeah",
|
||||
Number: 1,
|
||||
Struct: struct {
|
||||
String string `json:"string"`
|
||||
Number int `json:"number"`
|
||||
}{
|
||||
String: "yep",
|
||||
Number: 2,
|
||||
},
|
||||
}
|
||||
err = encodeJSON(filepath.Join(tmp, "2.json"), map2)
|
||||
require.NoError(t, err)
|
||||
st2, err := os.Stat(filepath.Join(tmp, "2.json"))
|
||||
require.NoError(t, err)
|
||||
assert.NotZero(t, st2.Size())
|
||||
c1, err := os.ReadFile(filepath.Join(tmp, "1.json"))
|
||||
require.NoError(t, err)
|
||||
c2, err := os.ReadFile(filepath.Join(tmp, "2.json"))
|
||||
require.NoError(t, err)
|
||||
assert.Equalf(t, len(c2), len(c1), "length of %q is not the same as length of %q", string(c1), string(c2))
|
||||
}
|
||||
|
||||
func TestDecodeJSON(t *testing.T) {
|
||||
tmp := t.TempDir()
|
||||
var map1, map2, map3 map[string]any
|
||||
err := os.WriteFile(filepath.Join(tmp, "1.json"), []byte(`
|
||||
{
|
||||
"string":"yeah",
|
||||
"number":1,
|
||||
"struct":{"string":"yep",
|
||||
"number":2
|
||||
}}
|
||||
`), 0o666)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = decodeJSON(filepath.Join(tmp, "1.json"), &map1)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = os.WriteFile(filepath.Join(tmp, "2.json"), []byte(`
|
||||
{"string":"yeah",
|
||||
"number":1,
|
||||
"struct":{"string":"yep", "number":2}
|
||||
}
|
||||
`), 0o666)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = decodeJSON(filepath.Join(tmp, "2.json"), &map2)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, map2, map1)
|
||||
|
||||
err = os.WriteFile(filepath.Join(tmp, "3.txt"), []byte(`
|
||||
what a lovely, lovely day
|
||||
`), 0o666)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = decodeJSON(filepath.Join(tmp, "3.txt"), &map3)
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
||||
func TestGetComponentNameVersionPurl(t *testing.T) {
|
||||
input := map[string]any{
|
||||
"name": "alice",
|
||||
"version": "1.0",
|
||||
"purl": "purl://...",
|
||||
}
|
||||
s, purl, err := getComponentNameVersionPurl(input)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "alice@1.0", s)
|
||||
assert.Equal(t, "purl://...", purl)
|
||||
|
||||
input = map[string]any{
|
||||
"name": "alice",
|
||||
"purl": "pkg:/...",
|
||||
}
|
||||
s, purl, err = getComponentNameVersionPurl(input)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "alice", s)
|
||||
assert.Equal(t, "pkg:/...", purl)
|
||||
|
||||
input = map[string]any{
|
||||
"name": "alice",
|
||||
"version": "2.0",
|
||||
}
|
||||
s, purl, err = getComponentNameVersionPurl(input)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "alice@2.0", s)
|
||||
assert.Empty(t, purl)
|
||||
}
|
||||
|
||||
func TestGetLicenseID(t *testing.T) {
|
||||
input := map[string]any{
|
||||
"licenseId": "driver",
|
||||
}
|
||||
s, err := getLicenseID(input)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "driver", s)
|
||||
}
|
||||
|
||||
func TestGetPackageNameVersionInfoPurl(t *testing.T) {
|
||||
input := map[string]any{
|
||||
"name": "alice",
|
||||
"versionInfo": "1.0",
|
||||
"externalRefs": []any{
|
||||
map[string]any{
|
||||
"referenceCategory": "PACKAGE-MANAGER",
|
||||
"referenceType": "purl",
|
||||
"referenceLocator": "pkg://....",
|
||||
},
|
||||
},
|
||||
}
|
||||
s, purl, err := getPackageNameVersionInfoPurl(input)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "alice@1.0", s)
|
||||
assert.Equal(t, "pkg://....", purl)
|
||||
|
||||
input = map[string]any{
|
||||
"name": "alice",
|
||||
"externalRefs": []any{
|
||||
map[string]any{
|
||||
"referenceCategory": "PACKAGE-MANAGER",
|
||||
"referenceType": "purl",
|
||||
"referenceLocator": "pkg:///...",
|
||||
},
|
||||
},
|
||||
}
|
||||
s, purl, err = getPackageNameVersionInfoPurl(input)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "alice", s)
|
||||
assert.Equal(t, "pkg:///...", purl)
|
||||
|
||||
input = map[string]any{
|
||||
"name": "alice",
|
||||
"externalRefs": []any{
|
||||
map[string]any{
|
||||
"referenceCategory": "NOT-THE-PACKAGE-MANAGER",
|
||||
"referenceType": "obscure",
|
||||
"referenceLocator": "beep:...",
|
||||
},
|
||||
},
|
||||
}
|
||||
s, purl, err = getPackageNameVersionInfoPurl(input)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "alice", s)
|
||||
assert.Empty(t, purl)
|
||||
|
||||
input = map[string]any{
|
||||
"name": "alice",
|
||||
}
|
||||
s, purl, err = getPackageNameVersionInfoPurl(input)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "alice", s)
|
||||
assert.Empty(t, purl)
|
||||
|
||||
input = map[string]any{
|
||||
"not-name": "alice",
|
||||
}
|
||||
_, _, err = getPackageNameVersionInfoPurl(input)
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
||||
func TestMergeSlicesWithoutDuplicatesFixed(t *testing.T) {
|
||||
base := map[string]any{
|
||||
"array": []any{
|
||||
map[string]any{"first": 1},
|
||||
},
|
||||
}
|
||||
merge := map[string]any{
|
||||
"array": []any{
|
||||
map[string]any{"second": 2},
|
||||
},
|
||||
}
|
||||
expected := map[string]any{
|
||||
"array": []any{
|
||||
map[string]any{"first": 1},
|
||||
},
|
||||
}
|
||||
err := mergeSlicesWithoutDuplicates(base, merge, "array", func(record any) (string, error) {
|
||||
return "fixed", nil
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, expected, base)
|
||||
}
|
||||
|
||||
func TestMergeSlicesWithoutDuplicatesDynamic(t *testing.T) {
|
||||
base := map[string]any{
|
||||
"array": []any{
|
||||
map[string]any{"first": 1},
|
||||
},
|
||||
}
|
||||
merge := map[string]any{
|
||||
"array": []any{
|
||||
map[string]any{"second": 2},
|
||||
},
|
||||
}
|
||||
expected := map[string]any{
|
||||
"array": []any{
|
||||
map[string]any{"first": 1},
|
||||
map[string]any{"second": 2},
|
||||
},
|
||||
}
|
||||
err := mergeSlicesWithoutDuplicates(base, merge, "array", func(record any) (string, error) {
|
||||
if m, ok := record.(map[string]any); ok {
|
||||
for key := range m {
|
||||
return key, nil
|
||||
}
|
||||
}
|
||||
return "broken", nil
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, expected, base)
|
||||
}
|
||||
|
||||
func TestMergeSlicesWithoutDuplicatesNoop(t *testing.T) {
|
||||
base := map[string]any{
|
||||
"array": []any{
|
||||
map[string]any{"first": 1},
|
||||
},
|
||||
}
|
||||
expected := map[string]any{
|
||||
"array": []any{
|
||||
map[string]any{"first": 1},
|
||||
},
|
||||
}
|
||||
err := mergeSlicesWithoutDuplicates(base, nil, "array", func(record any) (string, error) {
|
||||
if m, ok := record.(map[string]any); ok {
|
||||
for key := range m {
|
||||
return key, nil
|
||||
}
|
||||
}
|
||||
return "broken", nil
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, expected, base)
|
||||
}
|
||||
|
||||
func TestMergeSlicesWithoutDuplicatesMissing(t *testing.T) {
|
||||
base := map[string]any{
|
||||
"array": []any{
|
||||
map[string]any{"first": 1},
|
||||
},
|
||||
}
|
||||
merge := map[string]any{
|
||||
"array": []any{
|
||||
map[string]any{"second": 2},
|
||||
},
|
||||
}
|
||||
expected := map[string]any{
|
||||
"array": []any{
|
||||
map[string]any{"first": 1},
|
||||
},
|
||||
}
|
||||
err := mergeSlicesWithoutDuplicates(base, merge, "otherarray", func(record any) (string, error) {
|
||||
if m, ok := record.(map[string]any); ok {
|
||||
for key := range m {
|
||||
return key, nil
|
||||
}
|
||||
}
|
||||
return "broken", nil
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, expected, base)
|
||||
}
|
65
internal/sbom/presets.go
Normal file
65
internal/sbom/presets.go
Normal file
@ -0,0 +1,65 @@
|
||||
package sbom
|
||||
|
||||
import "github.com/containers/buildah/define"
|
||||
|
||||
// Preset returns a predefined SBOMScanOptions structure that has the passed-in
|
||||
// name as one of its "Type" values.
|
||||
func Preset(name string) (preset *define.SBOMScanOptions, err error) {
|
||||
// If you change these, make sure you update references in
|
||||
// buildah-commit.1.md and buildah-build.1.md to match!
|
||||
presets := []define.SBOMScanOptions{
|
||||
{
|
||||
Type: []string{"", "syft", "syft-cyclonedx"},
|
||||
Image: "ghcr.io/anchore/syft",
|
||||
Commands: []string{
|
||||
"/syft scan -q dir:{ROOTFS} --output cyclonedx-json={OUTPUT}",
|
||||
"/syft scan -q dir:{CONTEXT} --output cyclonedx-json={OUTPUT}",
|
||||
},
|
||||
// ImageSBOMOutput: "/root/buildinfo/content_manifests/sbom-cyclonedx.json",
|
||||
// ImagePURLOutput: "/root/buildinfo/content_manifests/sbom-purl.json",
|
||||
MergeStrategy: define.SBOMMergeStrategyCycloneDXByComponentNameAndVersion,
|
||||
},
|
||||
{
|
||||
Type: []string{"syft-spdx"},
|
||||
Image: "ghcr.io/anchore/syft",
|
||||
Commands: []string{
|
||||
"/syft scan -q dir:{ROOTFS} --output spdx-json={OUTPUT}",
|
||||
"/syft scan -q dir:{CONTEXT} --output spdx-json={OUTPUT}",
|
||||
},
|
||||
// ImageSBOMOutput: "/root/buildinfo/content_manifests/sbom-spdx.json",
|
||||
// ImagePURLOutput: "/root/buildinfo/content_manifests/sbom-purl.json",
|
||||
MergeStrategy: define.SBOMMergeStrategySPDXByPackageNameAndVersionInfo,
|
||||
},
|
||||
|
||||
{
|
||||
Type: []string{"trivy", "trivy-cyclonedx"},
|
||||
Image: "ghcr.io/aquasecurity/trivy",
|
||||
Commands: []string{
|
||||
"trivy filesystem -q {ROOTFS} --format cyclonedx --output {OUTPUT}",
|
||||
"trivy filesystem -q {CONTEXT} --format cyclonedx --output {OUTPUT}",
|
||||
},
|
||||
// ImageSBOMOutput: "/root/buildinfo/content_manifests/sbom-cyclonedx.json",
|
||||
// ImagePURLOutput: "/root/buildinfo/content_manifests/sbom-purl.json",
|
||||
MergeStrategy: define.SBOMMergeStrategyCycloneDXByComponentNameAndVersion,
|
||||
},
|
||||
{
|
||||
Type: []string{"trivy-spdx"},
|
||||
Image: "ghcr.io/aquasecurity/trivy",
|
||||
Commands: []string{
|
||||
"trivy filesystem -q {ROOTFS} --format spdx-json --output {OUTPUT}",
|
||||
"trivy filesystem -q {CONTEXT} --format spdx-json --output {OUTPUT}",
|
||||
},
|
||||
// ImageSBOMOutput: "/root/buildinfo/content_manifests/sbom-spdx.json",
|
||||
// ImagePURLOutput: "/root/buildinfo/content_manifests/sbom-purl.json",
|
||||
MergeStrategy: define.SBOMMergeStrategySPDXByPackageNameAndVersionInfo,
|
||||
},
|
||||
}
|
||||
for _, preset := range presets {
|
||||
for _, presetName := range preset.Type {
|
||||
if presetName == name {
|
||||
return &preset, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
37
internal/sbom/presets_test.go
Normal file
37
internal/sbom/presets_test.go
Normal file
@ -0,0 +1,37 @@
|
||||
package sbom
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestPreset(t *testing.T) {
|
||||
for presetName, expectToFind := range map[string]bool{
|
||||
"": true,
|
||||
"syft": true,
|
||||
"syft-cyclonedx": true,
|
||||
"syft-spdx": true,
|
||||
"trivy": true,
|
||||
"trivy-cyclonedx": true,
|
||||
"trivy-spdx": true,
|
||||
"rpc": false,
|
||||
"justmakestuffup": false,
|
||||
} {
|
||||
desc := presetName
|
||||
if desc == "" {
|
||||
desc = "(blank)"
|
||||
}
|
||||
t.Run(desc, func(t *testing.T) {
|
||||
settings, err := Preset(presetName)
|
||||
require.NoError(t, err)
|
||||
if expectToFind {
|
||||
assert.NotNil(t, settings)
|
||||
assert.NotEmpty(t, settings.Commands)
|
||||
} else {
|
||||
assert.Nil(t, settings)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@ -1,8 +1,10 @@
|
||||
package cli
|
||||
|
||||
// the cli package contains urfave/cli related structs that help make up
|
||||
// the command line for buildah commands. it resides here so other projects
|
||||
// that vendor in this code can use them too.
|
||||
// the cli package contains spf13/cobra related structs that help make up
|
||||
// the command line for buildah commands. this file's contents are better
|
||||
// suited for pkg/parse, but since pkg/parse imports pkg/util which also
|
||||
// imports pkg/parse, having it there would create a cyclic dependency, so
|
||||
// here we are.
|
||||
|
||||
import (
|
||||
"errors"
|
||||
@ -17,6 +19,7 @@ import (
|
||||
"github.com/containers/buildah/pkg/parse"
|
||||
"github.com/containers/buildah/pkg/util"
|
||||
"github.com/containers/common/pkg/auth"
|
||||
cutil "github.com/containers/common/pkg/util"
|
||||
"github.com/containers/image/v5/docker/reference"
|
||||
"github.com/containers/image/v5/types"
|
||||
"github.com/opencontainers/runtime-spec/specs-go"
|
||||
@ -88,20 +91,10 @@ func GenBuildOptions(c *cobra.Command, inputArgs []string, iopts BuildOptions) (
|
||||
removeAll = append(removeAll, iopts.BudResults.Authfile)
|
||||
}
|
||||
|
||||
// Allow for --pull, --pull=true, --pull=false, --pull=never, --pull=always
|
||||
// --pull-always and --pull-never. The --pull-never and --pull-always options
|
||||
// will not be documented.
|
||||
pullPolicy := define.PullIfMissing
|
||||
if strings.EqualFold(strings.TrimSpace(iopts.Pull), "true") {
|
||||
pullPolicy = define.PullIfNewer
|
||||
pullPolicy, err := parse.PullPolicyFromOptions(c)
|
||||
if err != nil {
|
||||
return options, nil, nil, err
|
||||
}
|
||||
if iopts.PullAlways || strings.EqualFold(strings.TrimSpace(iopts.Pull), "always") {
|
||||
pullPolicy = define.PullAlways
|
||||
}
|
||||
if iopts.PullNever || strings.EqualFold(strings.TrimSpace(iopts.Pull), "never") {
|
||||
pullPolicy = define.PullNever
|
||||
}
|
||||
logrus.Debugf("Pull Policy for pull [%v]", pullPolicy)
|
||||
|
||||
args := make(map[string]string)
|
||||
if c.Flag("build-arg-file").Changed {
|
||||
@ -224,21 +217,6 @@ func GenBuildOptions(c *cobra.Command, inputArgs []string, iopts BuildOptions) (
|
||||
return options, nil, nil, err
|
||||
}
|
||||
|
||||
pullFlagsCount := 0
|
||||
if c.Flag("pull").Changed {
|
||||
pullFlagsCount++
|
||||
}
|
||||
if c.Flag("pull-always").Changed {
|
||||
pullFlagsCount++
|
||||
}
|
||||
if c.Flag("pull-never").Changed {
|
||||
pullFlagsCount++
|
||||
}
|
||||
|
||||
if pullFlagsCount > 1 {
|
||||
return options, nil, nil, errors.New("can only set one of 'pull' or 'pull-always' or 'pull-never'")
|
||||
}
|
||||
|
||||
if (c.Flag("rm").Changed || c.Flag("force-rm").Changed) && (!c.Flag("layers").Changed && !c.Flag("no-cache").Changed) {
|
||||
return options, nil, nil, errors.New("'rm' and 'force-rm' can only be set with either 'layers' or 'no-cache'")
|
||||
}
|
||||
@ -356,6 +334,24 @@ func GenBuildOptions(c *cobra.Command, inputArgs []string, iopts BuildOptions) (
|
||||
}
|
||||
}
|
||||
|
||||
var sbomScanOptions []define.SBOMScanOptions
|
||||
if c.Flag("sbom").Changed || c.Flag("sbom-scanner-command").Changed || c.Flag("sbom-scanner-image").Changed || c.Flag("sbom-image-output").Changed || c.Flag("sbom-merge-strategy").Changed || c.Flag("sbom-output").Changed || c.Flag("sbom-image-output").Changed || c.Flag("sbom-purl-output").Changed || c.Flag("sbom-image-purl-output").Changed {
|
||||
sbomScanOption, err := parse.SBOMScanOptions(c)
|
||||
if err != nil {
|
||||
return options, nil, nil, err
|
||||
}
|
||||
if !cutil.StringInSlice(contextDir, sbomScanOption.ContextDir) {
|
||||
sbomScanOption.ContextDir = append(sbomScanOption.ContextDir, contextDir)
|
||||
}
|
||||
for _, abc := range additionalBuildContext {
|
||||
if !abc.IsURL && !abc.IsImage {
|
||||
sbomScanOption.ContextDir = append(sbomScanOption.ContextDir, abc.Value)
|
||||
}
|
||||
}
|
||||
sbomScanOption.PullPolicy = pullPolicy
|
||||
sbomScanOptions = append(sbomScanOptions, *sbomScanOption)
|
||||
}
|
||||
|
||||
options = define.BuildOptions{
|
||||
AddCapabilities: iopts.CapAdd,
|
||||
AdditionalBuildContexts: additionalBuildContext,
|
||||
@ -416,6 +412,7 @@ func GenBuildOptions(c *cobra.Command, inputArgs []string, iopts BuildOptions) (
|
||||
Runtime: iopts.Runtime,
|
||||
RuntimeArgs: runtimeFlags,
|
||||
RusageLogFile: iopts.RusageLogFile,
|
||||
SBOMScanOptions: sbomScanOptions,
|
||||
SignBy: iopts.SignBy,
|
||||
SignaturePolicyPath: iopts.SignaturePolicy,
|
||||
SkipUnusedStages: types.NewOptionalBool(iopts.SkipUnusedStages),
|
||||
|
@ -1,6 +1,6 @@
|
||||
package cli
|
||||
|
||||
// the cli package contains urfave/cli related structs that help make up
|
||||
// the cli package contains spf13/cobra related structs that help make up
|
||||
// the command line for buildah commands. it resides here so other projects
|
||||
// that vendor in this code can use them too.
|
||||
|
||||
@ -90,6 +90,14 @@ type BudResults struct {
|
||||
Rm bool
|
||||
Runtime string
|
||||
RuntimeFlags []string
|
||||
SbomPreset string
|
||||
SbomScannerImage string
|
||||
SbomScannerCommand []string
|
||||
SbomMergeStrategy string
|
||||
SbomOutput string
|
||||
SbomImgOutput string
|
||||
SbomPurlOutput string
|
||||
SbomImgPurlOutput string
|
||||
Secrets []string
|
||||
SSH []string
|
||||
SignaturePolicy string
|
||||
@ -110,6 +118,7 @@ type BudResults struct {
|
||||
OSFeatures []string
|
||||
OSVersion string
|
||||
CWOptions string
|
||||
SBOMOptions []string
|
||||
}
|
||||
|
||||
// FromAndBugResults represents the results for common flags
|
||||
@ -253,7 +262,7 @@ func GetBudFlags(flags *BudResults) pflag.FlagSet {
|
||||
fs.String("os", runtime.GOOS, "set the OS to the provided value instead of the current operating system of the host")
|
||||
fs.StringArrayVar(&flags.OSFeatures, "os-feature", []string{}, "set required OS `feature` for the target image in addition to values from the base image")
|
||||
fs.StringVar(&flags.OSVersion, "os-version", "", "set required OS `version` for the target image instead of the value from the base image")
|
||||
fs.StringVar(&flags.Pull, "pull", "true", "pull the image from the registry if newer or not present in store, if false, only pull the image if not present, if always, pull the image even if the named image is present in store, if never, only use the image present in store if available")
|
||||
fs.StringVar(&flags.Pull, "pull", "true", "pull base and SBOM scanner images from the registry if newer or not present in store, if false, only pull base and SBOM scanner images if not present, if always, pull base and SBOM scanner images even if the named images are present in store, if never, only use images present in store if available")
|
||||
fs.Lookup("pull").NoOptDefVal = "true" //allow `--pull ` to be set to `true` as expected.
|
||||
fs.BoolVar(&flags.PullAlways, "pull-always", false, "pull the image even if the named image is present in store")
|
||||
if err := fs.MarkHidden("pull-always"); err != nil {
|
||||
@ -269,6 +278,14 @@ func GetBudFlags(flags *BudResults) pflag.FlagSet {
|
||||
fs.BoolVar(&flags.Rm, "rm", true, "remove intermediate containers after a successful build")
|
||||
// "runtime" definition moved to avoid name collision in podman build. Defined in cmd/buildah/build.go.
|
||||
fs.StringSliceVar(&flags.RuntimeFlags, "runtime-flag", []string{}, "add global flags for the container runtime")
|
||||
fs.StringVar(&flags.SbomPreset, "sbom", "", "scan working container using `preset` configuration")
|
||||
fs.StringVar(&flags.SbomScannerImage, "sbom-scanner-image", "", "scan working container using scanner command from `image`")
|
||||
fs.StringArrayVar(&flags.SbomScannerCommand, "sbom-scanner-command", nil, "scan working container using `command` in scanner image")
|
||||
fs.StringVar(&flags.SbomMergeStrategy, "sbom-merge-strategy", "", "merge scan results using `strategy`")
|
||||
fs.StringVar(&flags.SbomOutput, "sbom-output", "", "save scan results to `file`")
|
||||
fs.StringVar(&flags.SbomImgOutput, "sbom-image-output", "", "add scan results to image as `path`")
|
||||
fs.StringVar(&flags.SbomPurlOutput, "sbom-purl-output", "", "save scan results to `file``")
|
||||
fs.StringVar(&flags.SbomImgPurlOutput, "sbom-image-purl-output", "", "add scan results to image as `path`")
|
||||
fs.StringArrayVar(&flags.Secrets, "secret", []string{}, "secret file to expose to the build")
|
||||
fs.StringVar(&flags.SignBy, "sign-by", "", "sign the image using a GPG key with the specified `FINGERPRINT`")
|
||||
fs.StringVar(&flags.SignaturePolicy, "signature-policy", "", "`pathname` of signature policy file (not usually used)")
|
||||
@ -324,6 +341,14 @@ func GetBudFlagsCompletions() commonComp.FlagCompletions {
|
||||
flagCompletion["output"] = commonComp.AutocompleteNone
|
||||
flagCompletion["pull"] = commonComp.AutocompleteDefault
|
||||
flagCompletion["runtime-flag"] = commonComp.AutocompleteNone
|
||||
flagCompletion["sbom"] = commonComp.AutocompleteNone
|
||||
flagCompletion["sbom-scanner-image"] = commonComp.AutocompleteNone
|
||||
flagCompletion["sbom-scanner-command"] = commonComp.AutocompleteNone
|
||||
flagCompletion["sbom-merge-strategy"] = commonComp.AutocompleteNone
|
||||
flagCompletion["sbom-output"] = commonComp.AutocompleteDefault
|
||||
flagCompletion["sbom-image-output"] = commonComp.AutocompleteNone
|
||||
flagCompletion["sbom-purl-output"] = commonComp.AutocompleteDefault
|
||||
flagCompletion["sbom-image-purl-output"] = commonComp.AutocompleteNone
|
||||
flagCompletion["secret"] = commonComp.AutocompleteNone
|
||||
flagCompletion["sign-by"] = commonComp.AutocompleteNone
|
||||
flagCompletion["signature-policy"] = commonComp.AutocompleteNone
|
||||
|
@ -18,6 +18,7 @@ import (
|
||||
"github.com/containers/buildah/define"
|
||||
mkcwtypes "github.com/containers/buildah/internal/mkcw/types"
|
||||
internalParse "github.com/containers/buildah/internal/parse"
|
||||
"github.com/containers/buildah/internal/sbom"
|
||||
"github.com/containers/buildah/internal/tmpdir"
|
||||
"github.com/containers/buildah/pkg/sshagent"
|
||||
"github.com/containers/common/pkg/auth"
|
||||
@ -446,6 +447,58 @@ func SystemContextFromFlagSet(flags *pflag.FlagSet, findFlagFunc func(name strin
|
||||
return ctx, nil
|
||||
}
|
||||
|
||||
// PullPolicyFromOptions returns a PullPolicy that reflects the combination of
|
||||
// the specified "pull" and undocumented "pull-always" and "pull-never" flags.
|
||||
func PullPolicyFromOptions(c *cobra.Command) (define.PullPolicy, error) {
|
||||
return PullPolicyFromFlagSet(c.Flags(), c.Flag)
|
||||
}
|
||||
|
||||
// PullPolicyFromFlagSet returns a PullPolicy that reflects the combination of
|
||||
// the specified "pull" and undocumented "pull-always" and "pull-never" flags.
|
||||
func PullPolicyFromFlagSet(flags *pflag.FlagSet, findFlagFunc func(name string) *pflag.Flag) (define.PullPolicy, error) {
|
||||
pullFlagsCount := 0
|
||||
|
||||
if findFlagFunc("pull").Changed {
|
||||
pullFlagsCount++
|
||||
}
|
||||
if findFlagFunc("pull-always").Changed {
|
||||
pullFlagsCount++
|
||||
}
|
||||
if findFlagFunc("pull-never").Changed {
|
||||
pullFlagsCount++
|
||||
}
|
||||
|
||||
if pullFlagsCount > 1 {
|
||||
return 0, errors.New("can only set one of 'pull' or 'pull-always' or 'pull-never'")
|
||||
}
|
||||
|
||||
// Allow for --pull, --pull=true, --pull=false, --pull=never, --pull=always
|
||||
// --pull-always and --pull-never. The --pull-never and --pull-always options
|
||||
// will not be documented.
|
||||
pullPolicy := define.PullIfMissing
|
||||
pullFlagValue := findFlagFunc("pull").Value.String()
|
||||
if strings.EqualFold(pullFlagValue, "true") || strings.EqualFold(pullFlagValue, "ifnewer") {
|
||||
pullPolicy = define.PullIfNewer
|
||||
}
|
||||
pullAlwaysFlagValue, err := flags.GetBool("pull-always")
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if pullAlwaysFlagValue || strings.EqualFold(pullFlagValue, "always") {
|
||||
pullPolicy = define.PullAlways
|
||||
}
|
||||
pullNeverFlagValue, err := flags.GetBool("pull-never")
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if pullNeverFlagValue || strings.EqualFold(pullFlagValue, "never") {
|
||||
pullPolicy = define.PullNever
|
||||
}
|
||||
logrus.Debugf("Pull Policy for pull [%v]", pullPolicy)
|
||||
|
||||
return pullPolicy, nil
|
||||
}
|
||||
|
||||
func getAuthFile(authfile string) string {
|
||||
if authfile != "" {
|
||||
absAuthfile, err := filepath.Abs(authfile)
|
||||
@ -709,6 +762,73 @@ func GetConfidentialWorkloadOptions(arg string) (define.ConfidentialWorkloadOpti
|
||||
return options, nil
|
||||
}
|
||||
|
||||
// SBOMScanOptions parses the build options from the cli
|
||||
func SBOMScanOptions(c *cobra.Command) (*define.SBOMScanOptions, error) {
|
||||
return SBOMScanOptionsFromFlagSet(c.Flags(), c.Flag)
|
||||
}
|
||||
|
||||
// SBOMScanOptionsFromFlagSet parses scan settings from the cli
|
||||
func SBOMScanOptionsFromFlagSet(flags *pflag.FlagSet, findFlagFunc func(name string) *pflag.Flag) (*define.SBOMScanOptions, error) {
|
||||
preset, err := flags.GetString("sbom")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid value for --sbom: %w", err)
|
||||
}
|
||||
|
||||
options, err := sbom.Preset(preset)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if options == nil {
|
||||
return nil, fmt.Errorf("parsing --sbom flag: unrecognized preset name %q", preset)
|
||||
}
|
||||
image, err := flags.GetString("sbom-scanner-image")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid value for --sbom-scanner-image: %w", err)
|
||||
}
|
||||
commands, err := flags.GetStringArray("sbom-scanner-command")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid value for --sbom-scanner-command: %w", err)
|
||||
}
|
||||
mergeStrategy, err := flags.GetString("sbom-merge-strategy")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid value for --sbom-merge-strategy: %w", err)
|
||||
}
|
||||
|
||||
if image != "" || len(commands) > 0 || mergeStrategy != "" {
|
||||
options = &define.SBOMScanOptions{
|
||||
Image: image,
|
||||
Commands: append([]string{}, commands...),
|
||||
MergeStrategy: define.SBOMMergeStrategy(mergeStrategy),
|
||||
}
|
||||
}
|
||||
if options.ImageSBOMOutput, err = flags.GetString("sbom-image-output"); err != nil {
|
||||
return nil, fmt.Errorf("invalid value for --sbom-image-output: %w", err)
|
||||
}
|
||||
if options.SBOMOutput, err = flags.GetString("sbom-output"); err != nil {
|
||||
return nil, fmt.Errorf("invalid value for --sbom-output: %w", err)
|
||||
}
|
||||
if options.ImagePURLOutput, err = flags.GetString("sbom-image-purl-output"); err != nil {
|
||||
return nil, fmt.Errorf("invalid value for --sbom-image-purl-output: %w", err)
|
||||
}
|
||||
if options.PURLOutput, err = flags.GetString("sbom-purl-output"); err != nil {
|
||||
return nil, fmt.Errorf("invalid value for --sbom-purl-output: %w", err)
|
||||
}
|
||||
|
||||
if options.Image == "" || len(options.Commands) == 0 || (options.SBOMOutput == "" && options.ImageSBOMOutput == "" && options.PURLOutput == "" && options.ImagePURLOutput == "") {
|
||||
return options, fmt.Errorf("sbom configuration missing one or more of (%q, %q, %q, %q, %q or %q)", "--sbom-scanner-imag", "--sbom-scanner-command", "--sbom-output", "--sbom-image-output", "--sbom-purl-output", "--sbom-image-purl-output")
|
||||
}
|
||||
if len(options.Commands) > 1 && options.MergeStrategy == "" {
|
||||
return options, fmt.Errorf("sbom configuration included multiple %q values but no %q value", "--sbom-scanner-command", "--sbom-merge-strategy")
|
||||
}
|
||||
switch options.MergeStrategy {
|
||||
default:
|
||||
return options, fmt.Errorf("sbom arguments included unrecognized merge strategy %q", string(options.MergeStrategy))
|
||||
case define.SBOMMergeStrategyCat, define.SBOMMergeStrategyCycloneDXByComponentNameAndVersion, define.SBOMMergeStrategySPDXByPackageNameAndVersionInfo:
|
||||
// all good here
|
||||
}
|
||||
return options, nil
|
||||
}
|
||||
|
||||
// IDMappingOptions parses the build options related to user namespaces and ID mapping.
|
||||
func IDMappingOptions(c *cobra.Command, isolation define.Isolation) (usernsOptions define.NamespaceOptions, idmapOptions *define.IDMappingOptions, err error) {
|
||||
return IDMappingOptionsFromFlagSet(c.Flags(), c.PersistentFlags(), c.Flag)
|
||||
|
295
scan.go
Normal file
295
scan.go
Normal file
@ -0,0 +1,295 @@
|
||||
package buildah
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/containers/buildah/define"
|
||||
"github.com/containers/buildah/internal/sbom"
|
||||
"github.com/containers/common/pkg/util"
|
||||
"github.com/mattn/go-shellwords"
|
||||
rspec "github.com/opencontainers/runtime-spec/specs-go"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func stringSliceReplaceAll(slice []string, replacements map[string]string, important []string) (built []string, replacedAnImportantValue bool) {
|
||||
built = make([]string, 0, len(slice))
|
||||
for i := range slice {
|
||||
element := slice[i]
|
||||
for from, to := range replacements {
|
||||
previous := element
|
||||
if element = strings.ReplaceAll(previous, from, to); element != previous {
|
||||
if len(important) == 0 || util.StringInSlice(from, important) {
|
||||
replacedAnImportantValue = true
|
||||
}
|
||||
}
|
||||
}
|
||||
built = append(built, element)
|
||||
}
|
||||
return built, replacedAnImportantValue
|
||||
}
|
||||
|
||||
// sbomScan iterates through the scanning configuration settings, generating
|
||||
// SBOM files and storing them either in the rootfs or in a local file path.
|
||||
func (b *Builder) sbomScan(ctx context.Context, options CommitOptions) (imageFiles, localFiles map[string]string, scansDir string, err error) {
|
||||
// We'll use a temporary per-container directory for this one.
|
||||
cdir, err := b.store.ContainerDirectory(b.ContainerID)
|
||||
if err != nil {
|
||||
return nil, nil, "", err
|
||||
}
|
||||
scansDir, err = os.MkdirTemp(cdir, "buildah-scan")
|
||||
if err != nil {
|
||||
return nil, nil, "", err
|
||||
}
|
||||
defer func() {
|
||||
if err != nil {
|
||||
if err := os.RemoveAll(scansDir); err != nil {
|
||||
logrus.Warnf("removing temporary directory %q: %v", scansDir, err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// We may be producing sets of outputs using temporary containers, and
|
||||
// there's no need to create more than one container for any one
|
||||
// specific scanner image.
|
||||
scanners := make(map[string]*Builder)
|
||||
defer func() {
|
||||
for _, scanner := range scanners {
|
||||
scannerID := scanner.ContainerID
|
||||
if err := scanner.Delete(); err != nil {
|
||||
logrus.Warnf("removing temporary scanner container %q: %v", scannerID, err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// Just assume that every scanning method will be looking at the rootfs.
|
||||
rootfs, err := b.Mount(b.MountLabel)
|
||||
if err != nil {
|
||||
return nil, nil, "", err
|
||||
}
|
||||
defer func(b *Builder) {
|
||||
if err := b.Unmount(); err != nil {
|
||||
logrus.Warnf("unmounting temporary scanner container %q: %v", b.ContainerID, err)
|
||||
}
|
||||
}(b)
|
||||
|
||||
// Iterate through all of the scanning strategies.
|
||||
for _, scanSpec := range options.SBOMScanOptions {
|
||||
// Pull the image and create a container we can run the scanner
|
||||
// in, unless we've done that already for this scanner image.
|
||||
scanBuilder, ok := scanners[scanSpec.Image]
|
||||
if !ok {
|
||||
builderOptions := BuilderOptions{
|
||||
FromImage: scanSpec.Image,
|
||||
ContainerSuffix: "scanner",
|
||||
PullPolicy: scanSpec.PullPolicy,
|
||||
BlobDirectory: options.BlobDirectory,
|
||||
Logger: b.Logger,
|
||||
SystemContext: options.SystemContext,
|
||||
MountLabel: b.MountLabel,
|
||||
ProcessLabel: b.ProcessLabel,
|
||||
IDMappingOptions: &b.IDMappingOptions,
|
||||
}
|
||||
if scanBuilder, err = NewBuilder(ctx, b.store, builderOptions); err != nil {
|
||||
return nil, nil, "", fmt.Errorf("creating temporary working container to run scanner: %w", err)
|
||||
}
|
||||
scanners[scanSpec.Image] = scanBuilder
|
||||
}
|
||||
// Now figure out which commands we need to run. First, try to
|
||||
// parse a command ourselves, because syft's image (at least)
|
||||
// doesn't include a shell. Build a slice of command slices.
|
||||
var commands [][]string
|
||||
for _, commandSpec := range scanSpec.Commands {
|
||||
// Start by assuming it's shell -c $whatever.
|
||||
parsedCommand := []string{"/bin/sh", "-c", commandSpec}
|
||||
if shell := scanBuilder.Shell(); len(shell) != 0 {
|
||||
parsedCommand = append(append([]string{}, shell...), commandSpec)
|
||||
}
|
||||
if !strings.ContainsAny(commandSpec, "<>|") { // An imperfect check for shell redirection being used.
|
||||
// If we can parse it ourselves, though, prefer to use that result,
|
||||
// in case the scanner image doesn't include a shell.
|
||||
if parsed, err := shellwords.Parse(commandSpec); err == nil {
|
||||
parsedCommand = parsed
|
||||
}
|
||||
}
|
||||
commands = append(commands, parsedCommand)
|
||||
}
|
||||
// Set up a list of mounts for the rootfs and whichever context
|
||||
// directories we're told were used.
|
||||
const rootfsTargetDir = "/.rootfs"
|
||||
const scansTargetDir = "/.scans"
|
||||
const contextsTargetDirPrefix = "/.context"
|
||||
runMounts := []rspec.Mount{
|
||||
// Our temporary directory, read-write.
|
||||
{
|
||||
Type: define.TypeBind,
|
||||
Source: scansDir,
|
||||
Destination: scansTargetDir,
|
||||
Options: []string{"rw", "z"},
|
||||
},
|
||||
// The rootfs, read-only.
|
||||
{
|
||||
Type: define.TypeBind,
|
||||
Source: rootfs,
|
||||
Destination: rootfsTargetDir,
|
||||
Options: []string{"ro"},
|
||||
},
|
||||
}
|
||||
// Each context directory, also read-only.
|
||||
for i := range scanSpec.ContextDir {
|
||||
contextMount := rspec.Mount{
|
||||
Type: define.TypeBind,
|
||||
Source: scanSpec.ContextDir[i],
|
||||
Destination: fmt.Sprintf("%s%d", contextsTargetDirPrefix, i),
|
||||
Options: []string{"ro"},
|
||||
}
|
||||
runMounts = append(runMounts, contextMount)
|
||||
}
|
||||
// Set up run options and mounts one time, and reuse it.
|
||||
runOptions := RunOptions{
|
||||
Logger: b.Logger,
|
||||
Isolation: b.Isolation,
|
||||
SystemContext: options.SystemContext,
|
||||
Mounts: runMounts,
|
||||
}
|
||||
// We'll have to do some text substitutions so that we run the
|
||||
// right commands, in the right order, pointing at the right
|
||||
// mount points.
|
||||
var resolvedCommands [][]string
|
||||
var resultFiles []string
|
||||
for _, command := range commands {
|
||||
// Each command gets to produce its own file that we'll
|
||||
// combine later if there's more than one of them.
|
||||
contextDirScans := 0
|
||||
for i := range scanSpec.ContextDir {
|
||||
resultFile := filepath.Join(scansTargetDir, fmt.Sprintf("scan%d.json", len(resultFiles)))
|
||||
// If the command mentions {CONTEXT}...
|
||||
resolvedCommand, scansContext := stringSliceReplaceAll(command,
|
||||
map[string]string{
|
||||
"{CONTEXT}": fmt.Sprintf("%s%d", contextsTargetDirPrefix, i),
|
||||
"{OUTPUT}": resultFile,
|
||||
},
|
||||
[]string{"{CONTEXT}"},
|
||||
)
|
||||
if !scansContext {
|
||||
break
|
||||
}
|
||||
// ... resolve the path references and add it to the list of commands.
|
||||
resolvedCommands = append(resolvedCommands, resolvedCommand)
|
||||
resultFiles = append(resultFiles, resultFile)
|
||||
contextDirScans++
|
||||
}
|
||||
if contextDirScans == 0 {
|
||||
resultFile := filepath.Join(scansTargetDir, fmt.Sprintf("scan%d.json", len(resultFiles)))
|
||||
// If the command didn't mention {CONTEXT}, but does mention {ROOTFS}...
|
||||
resolvedCommand, scansRootfs := stringSliceReplaceAll(command,
|
||||
map[string]string{
|
||||
"{ROOTFS}": rootfsTargetDir,
|
||||
"{OUTPUT}": resultFile,
|
||||
},
|
||||
[]string{"{ROOTFS}"},
|
||||
)
|
||||
// ... resolve the path references and add that to the list of commands.
|
||||
if scansRootfs {
|
||||
resolvedCommands = append(resolvedCommands, resolvedCommand)
|
||||
resultFiles = append(resultFiles, resultFile)
|
||||
}
|
||||
}
|
||||
}
|
||||
// Run all of the commands, one after the other, producing one
|
||||
// or more files named "scan%d.json" in our temporary directory.
|
||||
for _, resolvedCommand := range resolvedCommands {
|
||||
logrus.Debugf("Running scan command %q", resolvedCommand)
|
||||
if err = scanBuilder.Run(resolvedCommand, runOptions); err != nil {
|
||||
return nil, nil, "", fmt.Errorf("running scanning command %v: %w", resolvedCommand, err)
|
||||
}
|
||||
}
|
||||
// Produce the combined output files that we need to create, if there are any.
|
||||
var sbomResult, purlResult string
|
||||
switch {
|
||||
case scanSpec.ImageSBOMOutput != "":
|
||||
sbomResult = filepath.Join(scansDir, filepath.Base(scanSpec.ImageSBOMOutput))
|
||||
case scanSpec.SBOMOutput != "":
|
||||
sbomResult = filepath.Join(scansDir, filepath.Base(scanSpec.SBOMOutput))
|
||||
default:
|
||||
sbomResult = filepath.Join(scansDir, "sbom-result")
|
||||
}
|
||||
switch {
|
||||
case scanSpec.ImagePURLOutput != "":
|
||||
purlResult = filepath.Join(scansDir, filepath.Base(scanSpec.ImagePURLOutput))
|
||||
case scanSpec.PURLOutput != "":
|
||||
purlResult = filepath.Join(scansDir, filepath.Base(scanSpec.PURLOutput))
|
||||
default:
|
||||
purlResult = filepath.Join(scansDir, "purl-result")
|
||||
}
|
||||
copyFile := func(destination, source string) error {
|
||||
dst, err := os.Create(destination)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer dst.Close()
|
||||
src, err := os.Open(source)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer src.Close()
|
||||
if _, err = io.Copy(dst, src); err != nil {
|
||||
return fmt.Errorf("copying %q to %q: %w", source, destination, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
err = func() error {
|
||||
for i := range resultFiles {
|
||||
thisResultFile := filepath.Join(scansDir, filepath.Base(resultFiles[i]))
|
||||
switch i {
|
||||
case 0:
|
||||
// Straight-up copy to create the first version of the final output.
|
||||
if err = copyFile(sbomResult, thisResultFile); err != nil {
|
||||
return err
|
||||
}
|
||||
// This shouldn't change any contents, but lets us generate the purl file.
|
||||
err = sbom.Merge(scanSpec.MergeStrategy, thisResultFile, sbomResult, purlResult)
|
||||
default:
|
||||
// Hopefully we know how to merge information from the new one into the final output.
|
||||
err = sbom.Merge(scanSpec.MergeStrategy, sbomResult, thisResultFile, purlResult)
|
||||
}
|
||||
}
|
||||
return err
|
||||
}()
|
||||
if err != nil {
|
||||
return nil, nil, "", err
|
||||
}
|
||||
// If these files are supposed to be written to the local filesystem, add
|
||||
// their contents to the map of files we expect our caller to write.
|
||||
if scanSpec.SBOMOutput != "" || scanSpec.PURLOutput != "" {
|
||||
if localFiles == nil {
|
||||
localFiles = make(map[string]string)
|
||||
}
|
||||
if scanSpec.SBOMOutput != "" {
|
||||
localFiles[scanSpec.SBOMOutput] = sbomResult
|
||||
}
|
||||
if scanSpec.PURLOutput != "" {
|
||||
localFiles[scanSpec.PURLOutput] = purlResult
|
||||
}
|
||||
}
|
||||
// If these files are supposed to be written to the image, create a map of
|
||||
// their contents so that we can either create a layer diff for them (or
|
||||
// slipstream them into a squashed layer diff) later.
|
||||
if scanSpec.ImageSBOMOutput != "" || scanSpec.ImagePURLOutput != "" {
|
||||
if imageFiles == nil {
|
||||
imageFiles = make(map[string]string)
|
||||
}
|
||||
if scanSpec.ImageSBOMOutput != "" {
|
||||
imageFiles[scanSpec.ImageSBOMOutput] = sbomResult
|
||||
}
|
||||
if scanSpec.ImagePURLOutput != "" {
|
||||
imageFiles[scanSpec.ImagePURLOutput] = purlResult
|
||||
}
|
||||
}
|
||||
}
|
||||
return imageFiles, localFiles, scansDir, nil
|
||||
}
|
91
tests/sbom.bats
Normal file
91
tests/sbom.bats
Normal file
@ -0,0 +1,91 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
load helpers
|
||||
|
||||
@test "commit-sbom-types" {
|
||||
_prefetch alpine ghcr.io/anchore/syft ghcr.io/aquasecurity/trivy
|
||||
run_buildah from --quiet --pull=false $WITH_POLICY_JSON alpine
|
||||
cid=$output
|
||||
for squash in "--squash" "" ; do
|
||||
for sbomtype in syft syft-cyclonedx syft-spdx trivy trivy-cyclonedx trivy-spdx; do
|
||||
echo "[sbom type $sbomtype${squash:+, $squash}]"
|
||||
# clear out one file that we might need to overwrite, but leave the other to
|
||||
# ensure that we don't accidentally append content to files that are already
|
||||
# present
|
||||
rm -f localpurl.json
|
||||
# write to both the image and the local filesystem
|
||||
run_buildah commit $WITH_POLICY_JSON --sbom ${sbomtype} --sbom-output=localsbom.json --sbom-purl-output=localpurl.json --sbom-image-output=/root/sbom.json --sbom-image-purl-output=/root/purl.json $squash $cid alpine-derived-image
|
||||
# both files should exist now, and neither should be empty
|
||||
test -s localsbom.json
|
||||
test -s localpurl.json
|
||||
# compare them to their equivalents in the image
|
||||
run_buildah from --quiet --pull=false $WITH_POLICY_JSON alpine-derived-image
|
||||
dcid=$output
|
||||
run_buildah mount $dcid
|
||||
mountpoint=$output
|
||||
cmp $mountpoint/root/purl.json localpurl.json
|
||||
cmp $mountpoint/root/sbom.json localsbom.json
|
||||
done
|
||||
done
|
||||
}
|
||||
|
||||
@test "bud-sbom-types" {
|
||||
_prefetch alpine ghcr.io/anchore/syft ghcr.io/aquasecurity/trivy
|
||||
for layers in --layers=true --layers=false --squash ; do
|
||||
for sbomtype in syft syft-cyclonedx syft-spdx trivy trivy-cyclonedx trivy-spdx; do
|
||||
echo "[sbom type $sbomtype with $layers]"
|
||||
# clear out one file that we might need to overwrite, but leave the other to
|
||||
# ensure that we don't accidentally append content to files that are already
|
||||
# present
|
||||
rm -f localpurl.json
|
||||
# write to both the image and the local filesystem
|
||||
run_buildah build $WITH_POLICY_JSON --sbom ${sbomtype} --sbom-output=localsbom.json --sbom-purl-output=localpurl.json --sbom-image-output=/root/sbom.json --sbom-image-purl-output=/root/purl.json $layers -t alpine-derived-image $BUDFILES/simple-multi-step
|
||||
# both files should exist now, and neither should be empty
|
||||
test -s localsbom.json
|
||||
test -s localpurl.json
|
||||
# compare them to their equivalents in the image
|
||||
run_buildah from --quiet --pull=false $WITH_POLICY_JSON alpine-derived-image
|
||||
dcid=$output
|
||||
run_buildah mount $dcid
|
||||
mountpoint=$output
|
||||
cmp $mountpoint/root/purl.json localpurl.json
|
||||
cmp $mountpoint/root/sbom.json localsbom.json
|
||||
done
|
||||
done
|
||||
}
|
||||
|
||||
@test "bud-sbom-with-no-changes" {
|
||||
_prefetch alpine ghcr.io/anchore/syft ghcr.io/aquasecurity/trivy
|
||||
for sbomtype in syft syft-cyclonedx syft-spdx trivy trivy-cyclonedx trivy-spdx; do
|
||||
echo "[sbom type $sbomtype with $layers]"
|
||||
run_buildah build $WITH_POLICY_JSON --sbom ${sbomtype} --sbom-output=localsbom.json --sbom-purl-output=localpurl.json --sbom-image-output=/root/sbom.json --sbom-image-purl-output=/root/purl.json -t busybox-derived-image $BUDFILES/pull
|
||||
# both files should exist now, and neither should be empty
|
||||
test -s localsbom.json
|
||||
test -s localpurl.json
|
||||
done
|
||||
}
|
||||
|
||||
@test "bud-sbom-with-only-config-changes" {
|
||||
_prefetch alpine ghcr.io/anchore/syft ghcr.io/aquasecurity/trivy
|
||||
for layers in --layers=true --layers=false ; do
|
||||
for sbomtype in syft syft-cyclonedx syft-spdx trivy trivy-cyclonedx trivy-spdx; do
|
||||
echo "[sbom type $sbomtype with $layers]"
|
||||
# clear out one file that we might need to overwrite, but leave the other to
|
||||
# ensure that we don't accidentally append content to files that are already
|
||||
# present
|
||||
rm -f localpurl.json
|
||||
run_buildah build $WITH_POLICY_JSON --sbom ${sbomtype} --sbom-output=localsbom.json --sbom-purl-output=localpurl.json --sbom-image-output=/root/sbom.json --sbom-image-purl-output=/root/purl.json $layers -t alpine-derived-image -f $BUDFILES/env/Dockerfile.check-env $BUDFILES/env
|
||||
# both files should exist now, and neither should be empty
|
||||
test -s localsbom.json
|
||||
test -s localpurl.json
|
||||
done
|
||||
done
|
||||
}
|
||||
|
||||
@test "bud-sbom-with-non-presets" {
|
||||
_prefetch alpine busybox
|
||||
run_buildah build --debug $WITH_POLICY_JSON --sbom-output=localsbom.txt --sbom-purl-output=localpurl.txt --sbom-image-output=/root/sbom.txt --sbom-image-purl-output=/root/purl.txt --sbom-scanner-image=alpine --sbom-scanner-command='echo SCANNED ROOT {ROOTFS} > {OUTPUT}' --sbom-scanner-command='echo SCANNED BUILD CONTEXT {CONTEXT} > {OUTPUT}' --sbom-merge-strategy=cat -t busybox-derived-image $BUDFILES/pull
|
||||
# both files should exist now, and neither should be empty
|
||||
test -s localsbom.json
|
||||
test -s localpurl.json
|
||||
}
|
Reference in New Issue
Block a user