1
0
mirror of https://github.com/goreleaser/goreleaser.git synced 2025-03-11 14:39:28 +02:00

feat: add sbom generation pipe (#2648)

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

Co-authored-by: Carlos Alexandro Becker <caarlos0@users.noreply.github.com>
This commit is contained in:
Alex Goodman 2021-12-11 22:21:51 -05:00 committed by GitHub
parent a965789203
commit bfdec808ab
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 1079 additions and 51 deletions

View File

@ -81,6 +81,10 @@ jobs:
./dist/*.apk
key: ${{ runner.os }}-go-${{ hashFiles('**/*.go') }}-${{ hashFiles('**/go.sum') }}
- uses: sigstore/cosign-installer@v1.4.1
- name: setup-syft
run: |
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | \
sh -s -- -b /usr/local/bin v0.32.0
- name: setup-validate-krew-manifest
# TODO: replace this once https://github.com/kubernetes-sigs/krew/pull/736 is merged
run: go install github.com/caarlos0/krew/cmd/validate-krew-manifest@fork

1
.gitignore vendored
View File

@ -11,3 +11,4 @@ www/docs/static/releases*.json
completions/
.vscode/
.task/
.idea/

View File

@ -201,11 +201,19 @@ snapcrafts:
confinement: classic
publish: true
sboms:
- artifacts: archive
signs:
- cmd: cosign
stdin: '{{ .Env.COSIGN_PWD }}'
args: ["sign-blob", "-key=cosign.key", "-output=${signature}", "${artifact}"]
artifacts: checksum
- id: sign-sboms
cmd: cosign
stdin: '{{ .Env.COSIGN_PWD }}'
args: ["sign-blob", "-key=cosign.key", "-output=${signature}", "${artifact}"]
artifacts: sbom
docker_signs:
- artifacts: manifests

View File

@ -22,23 +22,24 @@ type releaseCmd struct {
}
type releaseOpts struct {
config string
releaseNotesFile string
releaseNotesTmpl string
releaseHeaderFile string
releaseHeaderTmpl string
releaseFooterFile string
releaseFooterTmpl string
autoSnapshot bool
snapshot bool
skipPublish bool
skipSign bool
skipValidate bool
skipAnnounce bool
rmDist bool
deprecated bool
parallelism int
timeout time.Duration
config string
releaseNotesFile string
releaseNotesTmpl string
releaseHeaderFile string
releaseHeaderTmpl string
releaseFooterFile string
releaseFooterTmpl string
autoSnapshot bool
snapshot bool
skipPublish bool
skipSign bool
skipValidate bool
skipAnnounce bool
skipSBOMCataloging bool
rmDist bool
deprecated bool
parallelism int
timeout time.Duration
}
func newReleaseCmd() *releaseCmd {
@ -82,6 +83,7 @@ func newReleaseCmd() *releaseCmd {
cmd.Flags().BoolVar(&root.opts.skipPublish, "skip-publish", false, "Skips publishing artifacts")
cmd.Flags().BoolVar(&root.opts.skipAnnounce, "skip-announce", false, "Skips announcing releases (implies --skip-validate)")
cmd.Flags().BoolVar(&root.opts.skipSign, "skip-sign", false, "Skips signing artifacts")
cmd.Flags().BoolVar(&root.opts.skipSBOMCataloging, "skip-sbom", false, "Skips cataloging artifacts")
cmd.Flags().BoolVar(&root.opts.skipValidate, "skip-validate", false, "Skips git checks")
cmd.Flags().BoolVar(&root.opts.rmDist, "rm-dist", false, "Removes the dist folder")
cmd.Flags().IntVarP(&root.opts.parallelism, "parallelism", "p", 0, "Amount tasks to run concurrently (default: number of CPUs)")
@ -139,6 +141,7 @@ func setupReleaseContext(ctx *context.Context, options releaseOpts) *context.Con
ctx.SkipAnnounce = ctx.Snapshot || options.skipPublish || options.skipAnnounce
ctx.SkipValidate = ctx.Snapshot || options.skipValidate
ctx.SkipSign = options.skipSign
ctx.SkipSBOMCataloging = options.skipSBOMCataloging
ctx.RmDist = options.rmDist
// test only

View File

@ -61,6 +61,8 @@ const (
KrewPluginManifest
// ScoopManifest is an uploadable scoop manifest file.
ScoopManifest
// SBOM is a Software Bill of Materials file.
SBOM
)
func (t Type) String() string {
@ -95,6 +97,8 @@ func (t Type) String() string {
return "Krew Plugin Manifest"
case ScoopManifest:
return "Scoop Manifest"
case SBOM:
return "SBOM"
default:
return "unknown"
}

View File

@ -385,6 +385,7 @@ func TestTypeToString(t *testing.T) {
GoFishRig,
KrewPluginManifest,
ScoopManifest,
SBOM,
} {
t.Run(a.String(), func(t *testing.T) {
require.NotEqual(t, "unknown", a.String())

View File

@ -70,6 +70,7 @@ func refresh(ctx *context.Context, filepath string) error {
artifact.ByType(artifact.UploadableBinary),
artifact.ByType(artifact.UploadableSourceArchive),
artifact.ByType(artifact.LinuxPackage),
artifact.ByType(artifact.SBOM),
)
if len(ctx.Config.Checksum.IDs) > 0 {
filter = artifact.And(filter, artifact.ByIDs(ctx.Config.Checksum.IDs...))

282
internal/pipe/sbom/sbom.go Normal file
View File

@ -0,0 +1,282 @@
package sbom
import (
"bytes"
"fmt"
"io"
"os"
"os/exec"
"path/filepath"
"strings"
"github.com/apex/log"
"github.com/goreleaser/goreleaser/internal/artifact"
"github.com/goreleaser/goreleaser/internal/gio"
"github.com/goreleaser/goreleaser/internal/ids"
"github.com/goreleaser/goreleaser/internal/logext"
"github.com/goreleaser/goreleaser/internal/semerrgroup"
"github.com/goreleaser/goreleaser/internal/tmpl"
"github.com/goreleaser/goreleaser/pkg/config"
"github.com/goreleaser/goreleaser/pkg/context"
)
// Environment variables to pass through to exec
var passthroughEnvVars = []string{"HOME", "USER", "USERPROFILE", "TMPDIR", "TMP", "TEMP", "PATH"}
// Pipe that catalogs common artifacts as an SBOM.
type Pipe struct{}
func (Pipe) String() string { return "cataloging artifacts" }
func (Pipe) Skip(ctx *context.Context) bool {
return ctx.SkipSBOMCataloging || len(ctx.Config.SBOMs) == 0
}
// Default sets the Pipes defaults.
func (Pipe) Default(ctx *context.Context) error {
ids := ids.New("sboms")
for i := range ctx.Config.SBOMs {
cfg := &ctx.Config.SBOMs[i]
if cfg.Cmd == "" {
cfg.Cmd = "syft"
}
if cfg.Artifacts == "" {
cfg.Artifacts = "archive"
}
if len(cfg.Documents) == 0 {
switch cfg.Artifacts {
case "binary":
cfg.Documents = []string{"{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}.sbom"}
case "any":
cfg.Documents = []string{}
default:
cfg.Documents = []string{"{{ .ArtifactName }}.sbom"}
}
}
if cfg.Cmd == "syft" {
if len(cfg.Args) == 0 {
cfg.Args = []string{"$artifact", "--file", "$document", "--output", "spdx-json"}
}
if len(cfg.Env) == 0 && cfg.Artifacts == "source" || cfg.Artifacts == "archive" {
cfg.Env = []string{
"SYFT_FILE_METADATA_CATALOGER_ENABLED=true",
}
}
}
if cfg.ID == "" {
cfg.ID = "default"
}
if cfg.Artifacts != "any" && len(cfg.Documents) > 1 {
return fmt.Errorf("multiple SBOM outputs when artifacts=%q is unsupported", cfg.Artifacts)
}
ids.Inc(cfg.ID)
}
return ids.Validate()
}
// Run executes the Pipe.
func (Pipe) Run(ctx *context.Context) error {
g := semerrgroup.New(ctx.Parallelism)
for _, cfg := range ctx.Config.SBOMs {
g.Go(catalogTask(ctx, cfg))
}
return g.Wait()
}
func catalogTask(ctx *context.Context, cfg config.SBOM) func() error {
return func() error {
var filters []artifact.Filter
switch cfg.Artifacts {
case "source":
filters = append(filters, artifact.ByType(artifact.UploadableSourceArchive))
if len(cfg.IDs) > 0 {
log.Warn("when artifacts is `source`, `ids` has no effect. ignoring")
}
case "archive":
filters = append(filters, artifact.ByType(artifact.UploadableArchive))
case "binary":
filters = append(filters, artifact.Or(
artifact.ByType(artifact.Binary),
artifact.ByType(artifact.UploadableBinary),
artifact.ByType(artifact.UniversalBinary),
))
case "package":
filters = append(filters, artifact.ByType(artifact.LinuxPackage))
case "any":
newArtifacts, err := catalogArtifact(ctx, cfg, nil)
if err != nil {
return err
}
for _, newArtifact := range newArtifacts {
ctx.Artifacts.Add(newArtifact)
}
return nil
default:
return fmt.Errorf("invalid list of artifacts to catalog: %s", cfg.Artifacts)
}
if len(cfg.IDs) > 0 {
filters = append(filters, artifact.ByIDs(cfg.IDs...))
}
artifacts := ctx.Artifacts.Filter(artifact.And(filters...)).List()
return catalog(ctx, cfg, artifacts)
}
}
func catalog(ctx *context.Context, cfg config.SBOM, artifacts []*artifact.Artifact) error {
for _, a := range artifacts {
newArtifacts, err := catalogArtifact(ctx, cfg, a)
if err != nil {
return err
}
for _, newArtifact := range newArtifacts {
ctx.Artifacts.Add(newArtifact)
}
}
return nil
}
func subprocessDistPath(distDir string, pathRelativeToCwd string) (string, error) {
cwd, err := os.Getwd()
if err != nil {
return "", err
}
if !filepath.IsAbs(distDir) {
distDir, err = filepath.Abs(distDir)
if err != nil {
return "", err
}
}
relativePath, err := filepath.Rel(cwd, distDir)
if err != nil {
return "", err
}
return strings.TrimPrefix(pathRelativeToCwd, relativePath+string(filepath.Separator)), nil
}
func catalogArtifact(ctx *context.Context, cfg config.SBOM, a *artifact.Artifact) ([]*artifact.Artifact, error) {
env := ctx.Env.Copy()
artifactDisplayName := "(any)"
templater := tmpl.New(ctx).WithEnv(env)
if a != nil {
procPath, err := subprocessDistPath(ctx.Config.Dist, a.Path)
if err != nil {
return nil, fmt.Errorf("cataloging artifacts failed: cannot determine artifact path for %q: %w", a.Path, err)
}
env["artifact"] = procPath
env["artifactID"] = a.ID()
templater = templater.WithArtifact(a, nil)
artifactDisplayName = a.Path
}
var paths []string
for idx, sbom := range cfg.Documents {
input := filepath.Join(ctx.Config.Dist, expand(sbom, env))
path, err := templater.Apply(input)
if err != nil {
return nil, fmt.Errorf("cataloging artifacts failed: %s: invalid template: %w", input, err)
}
path, err = filepath.Abs(path)
if err != nil {
return nil, fmt.Errorf("cataloging artifacts failed: unable to create artifact path %q: %w", sbom, err)
}
procPath, err := subprocessDistPath(ctx.Config.Dist, path)
if err != nil {
return nil, fmt.Errorf("cataloging artifacts failed: cannot determine document path for %q: %w", path, err)
}
env[fmt.Sprintf("document%d", idx)] = procPath
if idx == 0 {
env["document"] = procPath
}
paths = append(paths, procPath)
}
var names []string
for _, p := range paths {
names = append(names, filepath.Base(p))
}
fields := log.Fields{"cmd": cfg.Cmd, "artifact": artifactDisplayName, "sboms": strings.Join(names, ", ")}
// nolint:prealloc
var args []string
for _, arg := range cfg.Args {
renderedArg, err := templater.Apply(expand(arg, env))
if err != nil {
return nil, fmt.Errorf("cataloging artifacts failed: %s: invalid template: %w", arg, err)
}
args = append(args, renderedArg)
}
// The GoASTScanner flags this as a security risk.
// However, this works as intended. The nosec annotation
// tells the scanner to ignore this.
// #nosec
cmd := exec.CommandContext(ctx, cfg.Cmd, args...)
cmd.Env = []string{}
for _, key := range passthroughEnvVars {
if value := os.Getenv(key); value != "" {
cmd.Env = append(cmd.Env, fmt.Sprintf("%s=%s", key, value))
}
}
cmd.Env = append(cmd.Env, cfg.Env...)
cmd.Dir = ctx.Config.Dist
var b bytes.Buffer
w := gio.Safe(&b)
cmd.Stderr = io.MultiWriter(logext.NewWriter(fields, logext.Error), w)
cmd.Stdout = io.MultiWriter(logext.NewWriter(fields, logext.Info), w)
log.WithFields(fields).Info("cataloging")
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("cataloging artifacts: %s failed: %w: %s", cfg.Cmd, err, b.String())
}
var artifacts []*artifact.Artifact
for _, sbom := range cfg.Documents {
templater = tmpl.New(ctx).WithEnv(env)
if a != nil {
env["artifact"] = a.Name
templater = templater.WithArtifact(a, nil)
}
name, err := templater.Apply(expand(sbom, env))
if err != nil {
return nil, fmt.Errorf("cataloging artifacts failed: %s: invalid template: %w", a, err)
}
search := filepath.Join(ctx.Config.Dist, name)
matches, err := filepath.Glob(search)
if err != nil {
return nil, fmt.Errorf("cataloging artifacts: failed to find SBOM artifact %q: %w", search, err)
}
for _, match := range matches {
artifacts = append(artifacts, &artifact.Artifact{
Type: artifact.SBOM,
Name: name,
Path: match,
Extra: map[string]interface{}{
artifact.ExtraID: cfg.ID,
},
})
}
}
return artifacts, nil
}
func expand(s string, env map[string]string) string {
return os.Expand(s, func(key string) string {
return env[key]
})
}

View File

@ -0,0 +1,593 @@
package sbom
import (
"fmt"
"os"
"path/filepath"
"sort"
"testing"
"github.com/stretchr/testify/assert"
"github.com/goreleaser/goreleaser/internal/artifact"
"github.com/goreleaser/goreleaser/pkg/config"
"github.com/goreleaser/goreleaser/pkg/context"
"github.com/stretchr/testify/require"
)
func TestDescription(t *testing.T) {
require.NotEmpty(t, Pipe{}.String())
}
func TestSBOMCatalogDefault(t *testing.T) {
defaultArgs := []string{"$artifact", "--file", "$document", "--output", "spdx-json"}
defaultSboms := []string{
"{{ .ArtifactName }}.sbom",
}
defaultCmd := "syft"
tests := []struct {
configs []config.SBOM
artifact string
cmd string
sboms []string
args []string
env []string
err bool
}{
{
configs: []config.SBOM{
{
// empty
},
},
artifact: "archive",
cmd: defaultCmd,
sboms: defaultSboms,
args: defaultArgs,
env: []string{
"SYFT_FILE_METADATA_CATALOGER_ENABLED=true",
},
},
{
configs: []config.SBOM{
{
Artifacts: "package",
},
},
artifact: "package",
cmd: defaultCmd,
sboms: defaultSboms,
args: defaultArgs,
},
{
configs: []config.SBOM{
{
Artifacts: "archive",
},
},
artifact: "archive",
cmd: defaultCmd,
sboms: defaultSboms,
args: defaultArgs,
env: []string{
"SYFT_FILE_METADATA_CATALOGER_ENABLED=true",
},
},
{
configs: []config.SBOM{
{
Artifacts: "any",
},
},
artifact: "any",
cmd: defaultCmd,
sboms: []string{},
args: defaultArgs,
},
{
configs: []config.SBOM{
{
Artifacts: "binary",
},
},
artifact: "binary",
cmd: defaultCmd,
sboms: []string{"{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}.sbom"},
args: defaultArgs,
},
{
configs: []config.SBOM{
{
Artifacts: "source",
},
},
artifact: "source",
cmd: defaultCmd,
sboms: defaultSboms,
args: defaultArgs,
env: []string{
"SYFT_FILE_METADATA_CATALOGER_ENABLED=true",
},
},
{
// multiple documents are not allowed when artifacts != "any"
configs: []config.SBOM{
{
Artifacts: "binary",
Documents: []string{
"doc1",
"doc2",
},
},
},
err: true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf("artifact=%q", test.configs[0].Artifacts), func(t *testing.T) {
ctx := &context.Context{
Config: config.Project{
SBOMs: test.configs,
},
}
err := Pipe{}.Default(ctx)
if test.err {
require.Error(t, err)
return
}
require.NoError(t, err)
require.Equal(t, ctx.Config.SBOMs[0].Cmd, test.cmd)
require.Equal(t, ctx.Config.SBOMs[0].Documents, test.sboms)
require.Equal(t, ctx.Config.SBOMs[0].Args, test.args)
require.Equal(t, ctx.Config.SBOMs[0].Env, test.env)
require.Equal(t, ctx.Config.SBOMs[0].Artifacts, test.artifact)
})
}
}
func TestSBOMCatalogInvalidArtifacts(t *testing.T) {
ctx := context.New(config.Project{})
ctx.Config.SBOMs = []config.SBOM{
{Artifacts: "foo"},
}
err := Pipe{}.Run(ctx)
require.EqualError(t, err, "invalid list of artifacts to catalog: foo")
}
func TestSeveralSBOMsWithTheSameID(t *testing.T) {
ctx := &context.Context{
Config: config.Project{
SBOMs: []config.SBOM{
{
ID: "a",
},
{
ID: "a",
},
},
},
}
require.EqualError(t, Pipe{}.Default(ctx), "found 2 sboms with the ID 'a', please fix your config")
}
func TestSkipCataloging(t *testing.T) {
t.Run("skip", func(t *testing.T) {
require.True(t, Pipe{}.Skip(context.New(config.Project{})))
})
t.Run("skip SBOM cataloging", func(t *testing.T) {
ctx := context.New(config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "all",
},
},
})
ctx.SkipSBOMCataloging = true
require.True(t, Pipe{}.Skip(ctx))
})
t.Run("dont skip", func(t *testing.T) {
ctx := context.New(config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "all",
},
},
})
require.False(t, Pipe{}.Skip(ctx))
})
}
func TestSBOMCatalogArtifacts(t *testing.T) {
tests := []struct {
desc string
ctx *context.Context
sbomPaths []string
sbomNames []string
expectedErrMsg string
}{
{
desc: "catalog errors",
expectedErrMsg: "cataloging artifacts: exit failed",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "binary",
Cmd: "exit",
Args: []string{"1"},
},
},
},
),
},
{
desc: "invalid args template",
expectedErrMsg: `cataloging artifacts failed: ${FOO}-{{ .foo }{{}}{: invalid template: template: tmpl:1: unexpected "}" in operand`,
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "binary",
Cmd: "exit",
Args: []string{"${FOO}-{{ .foo }{{}}{"},
},
},
Env: []string{
"FOO=BAR",
},
},
),
},
{
desc: "catalog source archives",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{Artifacts: "source"},
},
},
),
sbomPaths: []string{"artifact5.tar.gz.sbom"},
sbomNames: []string{"artifact5.tar.gz.sbom"},
},
{
desc: "catalog archives",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{Artifacts: "archive"},
},
},
),
sbomPaths: []string{"artifact1.sbom", "artifact2.sbom"},
sbomNames: []string{"artifact1.sbom", "artifact2.sbom"},
},
{
desc: "catalog linux packages",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{Artifacts: "package"},
},
},
),
sbomPaths: []string{"package1.deb.sbom"},
sbomNames: []string{"package1.deb.sbom"},
},
{
desc: "catalog binaries",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{Artifacts: "binary"},
},
},
),
sbomPaths: []string{
"artifact3-name_1.2.2_linux_amd64.sbom",
"artifact4-name_1.2.2_linux_amd64.sbom",
},
sbomNames: []string{
"artifact3-name_1.2.2_linux_amd64.sbom",
"artifact4-name_1.2.2_linux_amd64.sbom",
},
},
{
desc: "manual cataloging",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "any",
Args: []string{
"--file",
"$document0",
"--output",
"spdx-json",
"artifact5.tar.gz",
},
Documents: []string{
"final.sbom",
},
},
},
},
),
sbomPaths: []string{"final.sbom"},
sbomNames: []string{"final.sbom"},
},
{
desc: "multiple SBOM configs",
ctx: context.New(
config.Project{
Env: []string{
"SBOM_SUFFIX=s2-ish",
},
SBOMs: []config.SBOM{
{
ID: "s1",
Artifacts: "binary",
},
{
ID: "s2",
Artifacts: "archive",
Documents: []string{"{{ .ArtifactName }}.{{ .Env.SBOM_SUFFIX }}.sbom"},
},
},
},
),
sbomPaths: []string{
"artifact1.s2-ish.sbom",
"artifact2.s2-ish.sbom",
"artifact3-name_1.2.2_linux_amd64.sbom",
"artifact4-name_1.2.2_linux_amd64.sbom",
},
sbomNames: []string{
"artifact1.s2-ish.sbom",
"artifact2.s2-ish.sbom",
"artifact3-name_1.2.2_linux_amd64.sbom",
"artifact4-name_1.2.2_linux_amd64.sbom",
},
},
{
desc: "catalog artifacts with filtered by ID",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "binary",
IDs: []string{"foo"},
},
},
},
),
sbomPaths: []string{
"artifact3-name_1.2.2_linux_amd64.sbom",
},
sbomNames: []string{
"artifact3-name_1.2.2_linux_amd64.sbom",
},
},
{
desc: "catalog binary artifacts with env in arguments",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "binary",
Args: []string{
"--file",
"$document",
"--output",
"spdx-json",
"$artifact",
},
Documents: []string{
"{{ .ArtifactName }}.{{ .Env.TEST_USER }}.sbom",
},
},
},
Env: []string{
"TEST_USER=test-user-name",
},
},
),
sbomPaths: []string{
"artifact3-name.test-user-name.sbom",
"artifact4.test-user-name.sbom",
},
sbomNames: []string{
"artifact3-name.test-user-name.sbom",
"artifact4.test-user-name.sbom",
},
},
{
desc: "cataloging 'any' artifacts fails",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "any",
Cmd: "false",
},
},
},
),
expectedErrMsg: "cataloging artifacts: false failed: exit status 1: ",
},
}
for _, test := range tests {
t.Run(test.desc, func(t *testing.T) {
testSBOMCataloging(t, test.ctx, test.sbomPaths, test.sbomNames, test.expectedErrMsg)
})
}
}
func testSBOMCataloging(tb testing.TB, ctx *context.Context, sbomPaths, sbomNames []string, expectedErrMsg string) {
tb.Helper()
tmpdir := tb.TempDir()
ctx.Config.Dist = tmpdir
ctx.Version = "1.2.2"
// create some fake artifacts
artifacts := []string{"artifact1", "artifact2", "artifact3", "package1.deb"}
require.NoError(tb, os.Mkdir(filepath.Join(tmpdir, "linux_amd64"), os.ModePerm))
for _, f := range artifacts {
file := filepath.Join(tmpdir, f)
require.NoError(tb, os.WriteFile(file, []byte("foo"), 0o644))
}
require.NoError(tb, os.WriteFile(filepath.Join(tmpdir, "linux_amd64", "artifact4"), []byte("foo"), 0o644))
artifacts = append(artifacts, "linux_amd64/artifact4")
require.NoError(tb, os.WriteFile(filepath.Join(tmpdir, "artifact5.tar.gz"), []byte("foo"), 0o644))
artifacts = append(artifacts, "artifact5.tar.gz")
ctx.Artifacts.Add(&artifact.Artifact{
Name: "artifact1",
Path: filepath.Join(tmpdir, "artifact1"),
Type: artifact.UploadableArchive,
Extra: map[string]interface{}{
artifact.ExtraID: "foo",
},
})
ctx.Artifacts.Add(&artifact.Artifact{
Name: "artifact2",
Path: filepath.Join(tmpdir, "artifact2"),
Type: artifact.UploadableArchive,
Extra: map[string]interface{}{
artifact.ExtraID: "foo3",
},
})
ctx.Artifacts.Add(&artifact.Artifact{
Name: "artifact3-name",
Path: filepath.Join(tmpdir, "artifact3"),
Goos: "linux",
Goarch: "amd64",
Type: artifact.UploadableBinary,
Extra: map[string]interface{}{
artifact.ExtraID: "foo",
artifact.ExtraBinary: "artifact3-name",
},
})
ctx.Artifacts.Add(&artifact.Artifact{
Name: "artifact4",
Path: filepath.Join(tmpdir, "linux_amd64", "artifact4"),
Goos: "linux",
Goarch: "amd64",
Type: artifact.Binary,
Extra: map[string]interface{}{
artifact.ExtraID: "foo3",
artifact.ExtraBinary: "artifact4-name",
},
})
ctx.Artifacts.Add(&artifact.Artifact{
Name: "artifact5.tar.gz",
Path: filepath.Join(tmpdir, "artifact5.tar.gz"),
Type: artifact.UploadableSourceArchive,
})
ctx.Artifacts.Add(&artifact.Artifact{
Name: "package1.deb",
Path: filepath.Join(tmpdir, "package1.deb"),
Type: artifact.LinuxPackage,
Extra: map[string]interface{}{
artifact.ExtraID: "foo",
},
})
// configure the pipeline
require.NoError(tb, Pipe{}.Default(ctx))
// run the pipeline
if expectedErrMsg != "" {
err := Pipe{}.Run(ctx)
require.Error(tb, err)
require.Contains(tb, err.Error(), expectedErrMsg)
return
}
require.NoError(tb, Pipe{}.Run(ctx))
// ensure all artifacts have an ID
for _, arti := range ctx.Artifacts.Filter(artifact.ByType(artifact.SBOM)).List() {
require.NotEmptyf(tb, arti.ID(), ".Extra.ID on %s", arti.Path)
}
// verify that only the artifacts and the sboms are in the dist dir
gotFiles := []string{}
require.NoError(tb, filepath.Walk(tmpdir,
func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
relPath, err := filepath.Rel(tmpdir, path)
if err != nil {
return err
}
gotFiles = append(gotFiles, relPath)
return nil
}),
)
wantFiles := append(artifacts, sbomPaths...)
sort.Strings(wantFiles)
require.ElementsMatch(tb, wantFiles, gotFiles, "SBOM paths differ")
var sbomArtifacts []string
for _, sig := range ctx.Artifacts.Filter(artifact.ByType(artifact.SBOM)).List() {
sbomArtifacts = append(sbomArtifacts, sig.Name)
}
require.ElementsMatch(tb, sbomArtifacts, sbomNames, "SBOM names differ")
}
func Test_subprocessDistPath(t *testing.T) {
cwd, err := os.Getwd()
require.NoError(t, err)
tests := []struct {
name string
distDir string
pathRelativeToCwd string
expects string
}{
{
name: "relative dist with anchor",
distDir: "./dist",
pathRelativeToCwd: "dist/my.sbom",
expects: "my.sbom",
},
{
name: "relative dist without anchor",
distDir: "dist",
pathRelativeToCwd: "dist/my.sbom",
expects: "my.sbom",
},
{
name: "relative dist with nested resource",
distDir: "dist",
pathRelativeToCwd: "dist/something/my.sbom",
expects: "something/my.sbom",
},
{
name: "absolute dist with nested resource",
distDir: filepath.Join(cwd, "dist/"),
pathRelativeToCwd: "dist/something/my.sbom",
expects: "something/my.sbom",
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
actual, err := subprocessDistPath(test.distDir, test.pathRelativeToCwd)
require.NoError(t, err)
assert.Equal(t, test.expects, actual)
})
}
}

View File

@ -76,11 +76,14 @@ func (Pipe) Run(ctx *context.Context) error {
artifact.ByType(artifact.UploadableSourceArchive),
artifact.ByType(artifact.Checksum),
artifact.ByType(artifact.LinuxPackage),
artifact.ByType(artifact.SBOM),
))
case "archive":
filters = append(filters, artifact.ByType(artifact.UploadableArchive))
case "binary":
filters = append(filters, artifact.ByType(artifact.UploadableBinary))
case "sbom":
filters = append(filters, artifact.ByType(artifact.SBOM))
case "package":
filters = append(filters, artifact.ByType(artifact.LinuxPackage))
case "none": // TODO(caarlos0): this is not very useful, lets remove it.
@ -200,7 +203,7 @@ func signone(ctx *context.Context, cfg config.Sign, art *artifact.Artifact) ([]*
return nil, nil
}
// re-execute template results, using artifact name as artifact so they eval to the actual needed file name.
// re-execute template results, using artifact desc as artifact so they eval to the actual needed file desc.
env["artifact"] = art.Name
name, _ = tmpl.New(ctx).WithEnv(env).Apply(expand(cfg.Signature, env)) // could never error as it passed the previous check
cert, _ = tmpl.New(ctx).WithEnv(env).Apply(expand(cfg.Certificate, env)) // could never error as it passed the previous check

View File

@ -12,6 +12,8 @@ import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/goreleaser/goreleaser/internal/artifact"
"github.com/goreleaser/goreleaser/pkg/config"
"github.com/goreleaser/goreleaser/pkg/context"
@ -183,18 +185,6 @@ func TestSignArtifacts(t *testing.T) {
},
),
},
{
desc: "sign single",
ctx: context.New(
config.Project{
Signs: []config.Sign{
{Artifacts: "all"},
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
},
{
desc: "sign all artifacts",
ctx: context.New(
@ -206,8 +196,8 @@ func TestSignArtifacts(t *testing.T) {
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
},
{
desc: "sign archives",
@ -357,6 +347,20 @@ func TestSignArtifacts(t *testing.T) {
signaturePaths: []string{"artifact5.tar.gz.sig"},
signatureNames: []string{"artifact5.tar.gz.sig"},
},
{
desc: "sign only sbom",
ctx: context.New(
config.Project{
Signs: []config.Sign{
{
Artifacts: "sbom",
},
},
},
),
signaturePaths: []string{"artifact5.tar.gz.sbom.sig"},
signatureNames: []string{"artifact5.tar.gz.sbom.sig"},
},
{
desc: "sign all artifacts with env",
ctx: context.New(
@ -379,8 +383,8 @@ func TestSignArtifacts(t *testing.T) {
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
},
{
desc: "sign all artifacts with template",
@ -404,8 +408,8 @@ func TestSignArtifacts(t *testing.T) {
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
},
{
desc: "sign single with password from stdin",
@ -432,8 +436,8 @@ func TestSignArtifacts(t *testing.T) {
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
user: passwordUser,
},
{
@ -462,8 +466,8 @@ func TestSignArtifacts(t *testing.T) {
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
user: passwordUser,
},
{
@ -491,8 +495,8 @@ func TestSignArtifacts(t *testing.T) {
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
user: passwordUser,
},
{
@ -545,9 +549,9 @@ func TestSignArtifacts(t *testing.T) {
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
certificateNames: []string{"artifact1_honk.pem", "artifact2_honk.pem", "artifact3_1.0.0_linux_amd64_honk.pem", "checksum_honk.pem", "checksum2_honk.pem", "artifact4_1.0.0_linux_amd64_honk.pem", "artifact5_honk.pem", "package1_honk.pem"},
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
certificateNames: []string{"artifact1_honk.pem", "artifact2_honk.pem", "artifact3_1.0.0_linux_amd64_honk.pem", "checksum_honk.pem", "checksum2_honk.pem", "artifact4_1.0.0_linux_amd64_honk.pem", "artifact5_honk.pem", "artifact5.tar.gz.sbom_honk.pem", "package1_honk.pem"},
},
}
@ -579,6 +583,8 @@ func testSign(tb testing.TB, ctx *context.Context, certificateNames, signaturePa
artifacts = append(artifacts, "linux_amd64/artifact4")
require.NoError(tb, os.WriteFile(filepath.Join(tmpdir, "artifact5.tar.gz"), []byte("foo"), 0o644))
artifacts = append(artifacts, "artifact5.tar.gz")
require.NoError(tb, os.WriteFile(filepath.Join(tmpdir, "artifact5.tar.gz.sbom"), []byte("sbom(foo)"), 0o644))
artifacts = append(artifacts, "artifact5.tar.gz.sbom")
ctx.Artifacts.Add(&artifact.Artifact{
Name: "artifact1",
Path: filepath.Join(tmpdir, "artifact1"),
@ -632,6 +638,11 @@ func testSign(tb testing.TB, ctx *context.Context, certificateNames, signaturePa
Path: filepath.Join(tmpdir, "artifact5.tar.gz"),
Type: artifact.UploadableSourceArchive,
})
ctx.Artifacts.Add(&artifact.Artifact{
Name: "artifact5.tar.gz.sbom",
Path: filepath.Join(tmpdir, "artifact5.tar.gz.sbom"),
Type: artifact.SBOM,
})
ctx.Artifacts.Add(&artifact.Artifact{
Name: "package1.deb",
Path: filepath.Join(tmpdir, "package1.deb"),
@ -677,12 +688,8 @@ func testSign(tb testing.TB, ctx *context.Context, certificateNames, signaturePa
certNames = append(certNames, cert.Name)
require.True(tb, strings.HasPrefix(cert.Path, ctx.Config.Dist))
}
sort.Strings(certificateNames)
sort.Strings(certNames)
require.Equal(tb, len(certificateNames), len(certificates))
if len(certificateNames) > 0 {
require.Equal(tb, certificateNames, certNames)
}
assert.ElementsMatch(tb, certificateNames, certNames)
// verify that only the artifacts and the signatures are in the dist dir
gotFiles := []string{}

View File

@ -23,6 +23,7 @@ import (
"github.com/goreleaser/goreleaser/internal/pipe/krew"
"github.com/goreleaser/goreleaser/internal/pipe/nfpm"
"github.com/goreleaser/goreleaser/internal/pipe/publish"
"github.com/goreleaser/goreleaser/internal/pipe/sbom"
"github.com/goreleaser/goreleaser/internal/pipe/scoop"
"github.com/goreleaser/goreleaser/internal/pipe/semver"
"github.com/goreleaser/goreleaser/internal/pipe/sign"
@ -75,6 +76,7 @@ var Pipeline = append(
gofish.Pipe{}, // create gofish rig
krew.Pipe{}, // krew plugins
scoop.Pipe{}, // create scoop buckets
sbom.Pipe{}, // create SBOMs of artifacts
checksums.Pipe{}, // checksums of the files
sign.Pipe{}, // sign artifacts
docker.Pipe{}, // create and push docker images

View File

@ -619,6 +619,17 @@ type NFPMOverridables struct {
APK NFPMAPK `yaml:"apk,omitempty"`
}
// SBOM config.
type SBOM struct {
ID string `yaml:"id,omitempty"`
Cmd string `yaml:"cmd,omitempty"`
Env []string `yaml:"env,omitempty"`
Args []string `yaml:"args,omitempty"`
Documents []string `yaml:"documents,omitempty"`
Artifacts string `yaml:"artifacts,omitempty"`
IDs []string `yaml:"ids,omitempty"`
}
// Sign config.
type Sign struct {
ID string `yaml:"id,omitempty"`
@ -836,6 +847,7 @@ type Project struct {
Source Source `yaml:"source,omitempty"`
GoMod GoMod `yaml:"gomod,omitempty"`
Announce Announce `yaml:"announce,omitempty"`
SBOMs []SBOM `yaml:"sboms,omitempty"`
UniversalBinaries []UniversalBinary `yaml:"universal_binaries,omitempty"`

View File

@ -92,6 +92,7 @@ type Context struct {
SkipAnnounce bool
SkipSign bool
SkipValidate bool
SkipSBOMCataloging bool
RmDist bool
PreRelease bool
Deprecated bool

View File

@ -23,6 +23,7 @@ import (
"github.com/goreleaser/goreleaser/internal/pipe/project"
"github.com/goreleaser/goreleaser/internal/pipe/reddit"
"github.com/goreleaser/goreleaser/internal/pipe/release"
"github.com/goreleaser/goreleaser/internal/pipe/sbom"
"github.com/goreleaser/goreleaser/internal/pipe/scoop"
"github.com/goreleaser/goreleaser/internal/pipe/sign"
"github.com/goreleaser/goreleaser/internal/pipe/slack"
@ -62,6 +63,7 @@ var Defaulters = []Defaulter{
checksums.Pipe{},
sign.Pipe{},
sign.DockerPipe{},
sbom.Pipe{},
docker.Pipe{},
docker.ManifestPipe{},
artifactory.Pipe{},

View File

@ -0,0 +1,102 @@
# Cataloging artifacts
A Software Bill of Materials (SBOM) is a description of the components that make up a software artifact.
Goreleaser can create one or more SBOMs for any artifacts generated by Goreleaser.
## Usage
The artifact cataloging step can analyze one or more artifacts generated by Goreleaser and output one or more
SBOM files into the dist directory.
The default is configured to create an SBOM for each binary produced with [Syft](https://github.com/anchore/syft).
To enable artifact cataloging just add:
```yaml
# .goreleaser.yml
sboms:
- artifacts: archive
```
To customize the artifact cataloging pipeline you can use the following options:
```yaml
# .goreleaser.yml
sboms:
-
# ID of the sbom config, must be unique.
#
# Defaults to "default".
id: foo
# List of Names/templates of the SBOM documents created at this step (relative to the dist dir).
#
# Each element configured is made available as variables. For example:
# documents: ["foo", "bar"]
#
# would make the following variables that can be referenced as template keys:
# document0: "foo"
# document1: "bar"
#
# Default value is conditional based on the value of "artifacts"
# - "binary": ["{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}.sbom"]
# - "any": []
# - otherwise: ["{{ .ArtifactName }}.sbom"]
#
# Note that multiple sbom values are only allowed if the value of "artifacts" is "any".
documents:
- "${artifact}.spdx.sbom"
# Path to the SBOM generator command
#
# Note: the process CWD will be set to the same location as "dist"
#
# Defaults to `syft`
cmd: syft
# Command line templateable arguments for the command
#
# Defaults to `["$artifact", "--file", "$document", "--output", "spdx-json"]`
args: ["$artifact", "--file", "$sbom", "--output", "spdx-json"]
# List of environment variables that will be passed to the SBOM command as well as the templates.
#
# Defaults to [ "SYFT_FILE_METADATA_CATALOGER_ENABLED=true" ]
env:
- FOO=bar
- HONK=honkhonk
# Which artifacts to catalog
#
# any: let the SBOM tool decide what artifacts available in the cwd should be cataloged
# source: source archive
# package: linux packages (deb, rpm, apk)
# archive: archives from archive pipe
# binary: binaries output from the build stage
#
# Defaults to `archive`
artifacts: archive
# IDs of the artifacts to catalog.
#
# If `artifacts` is "source" or "any" then this fields has no effect.
#
# Defaults to empty (which implies no filtering).
ids:
- foo
- bar
```
### Available variable names
These environment variables might be available in the fields that are templateable:
- `${artifact}`: the path to the artifact that will be cataloged (unless "artifacts" config item is "any")
- `${artifactID}`: the ID of the artifact that will be cataloged (unless "artifacts" config item is "any")
- `${document}`: the SBOM filename generated (corresponds to `${document0}` if the "artifacts" config item is "any")
- `${document#}`: the SBOM filenames generated, where `#` corresponds to the list index under the "documents" config item (e.g. `${document0}`)
## Limitations
Container images generated by Goreleaser are not available to be cataloged by the SBOM tool.

View File

@ -60,6 +60,7 @@ signs:
# package: linux packages (deb, rpm, apk)
# archive: archives from archive pipe
# binary: binaries if archiving format is set to binary
# sbom: any Software Bill of Materials generated for other artifacts
#
# Defaults to `none`
artifacts: all

View File

@ -91,6 +91,7 @@ nav:
- customization/snapcraft.md
- customization/docker.md
- customization/docker_manifest.md
- customization/sbom.md
- Signing:
- Checksums and artifacts: customization/sign.md
- Docker Images and Manifests: customization/docker_sign.md