1
0
mirror of https://github.com/goreleaser/goreleaser.git synced 2025-11-06 09:09:29 +02:00

feat: add sbom generation pipe (#2648)

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

Co-authored-by: Carlos Alexandro Becker <caarlos0@users.noreply.github.com>
This commit is contained in:
Alex Goodman
2021-12-11 22:21:51 -05:00
committed by GitHub
parent a965789203
commit bfdec808ab
18 changed files with 1079 additions and 51 deletions

View File

@@ -61,6 +61,8 @@ const (
KrewPluginManifest
// ScoopManifest is an uploadable scoop manifest file.
ScoopManifest
// SBOM is a Software Bill of Materials file.
SBOM
)
func (t Type) String() string {
@@ -95,6 +97,8 @@ func (t Type) String() string {
return "Krew Plugin Manifest"
case ScoopManifest:
return "Scoop Manifest"
case SBOM:
return "SBOM"
default:
return "unknown"
}

View File

@@ -385,6 +385,7 @@ func TestTypeToString(t *testing.T) {
GoFishRig,
KrewPluginManifest,
ScoopManifest,
SBOM,
} {
t.Run(a.String(), func(t *testing.T) {
require.NotEqual(t, "unknown", a.String())

View File

@@ -70,6 +70,7 @@ func refresh(ctx *context.Context, filepath string) error {
artifact.ByType(artifact.UploadableBinary),
artifact.ByType(artifact.UploadableSourceArchive),
artifact.ByType(artifact.LinuxPackage),
artifact.ByType(artifact.SBOM),
)
if len(ctx.Config.Checksum.IDs) > 0 {
filter = artifact.And(filter, artifact.ByIDs(ctx.Config.Checksum.IDs...))

282
internal/pipe/sbom/sbom.go Normal file
View File

@@ -0,0 +1,282 @@
package sbom
import (
"bytes"
"fmt"
"io"
"os"
"os/exec"
"path/filepath"
"strings"
"github.com/apex/log"
"github.com/goreleaser/goreleaser/internal/artifact"
"github.com/goreleaser/goreleaser/internal/gio"
"github.com/goreleaser/goreleaser/internal/ids"
"github.com/goreleaser/goreleaser/internal/logext"
"github.com/goreleaser/goreleaser/internal/semerrgroup"
"github.com/goreleaser/goreleaser/internal/tmpl"
"github.com/goreleaser/goreleaser/pkg/config"
"github.com/goreleaser/goreleaser/pkg/context"
)
// Environment variables to pass through to exec
var passthroughEnvVars = []string{"HOME", "USER", "USERPROFILE", "TMPDIR", "TMP", "TEMP", "PATH"}
// Pipe that catalogs common artifacts as an SBOM.
type Pipe struct{}
func (Pipe) String() string { return "cataloging artifacts" }
func (Pipe) Skip(ctx *context.Context) bool {
return ctx.SkipSBOMCataloging || len(ctx.Config.SBOMs) == 0
}
// Default sets the Pipes defaults.
func (Pipe) Default(ctx *context.Context) error {
ids := ids.New("sboms")
for i := range ctx.Config.SBOMs {
cfg := &ctx.Config.SBOMs[i]
if cfg.Cmd == "" {
cfg.Cmd = "syft"
}
if cfg.Artifacts == "" {
cfg.Artifacts = "archive"
}
if len(cfg.Documents) == 0 {
switch cfg.Artifacts {
case "binary":
cfg.Documents = []string{"{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}.sbom"}
case "any":
cfg.Documents = []string{}
default:
cfg.Documents = []string{"{{ .ArtifactName }}.sbom"}
}
}
if cfg.Cmd == "syft" {
if len(cfg.Args) == 0 {
cfg.Args = []string{"$artifact", "--file", "$document", "--output", "spdx-json"}
}
if len(cfg.Env) == 0 && cfg.Artifacts == "source" || cfg.Artifacts == "archive" {
cfg.Env = []string{
"SYFT_FILE_METADATA_CATALOGER_ENABLED=true",
}
}
}
if cfg.ID == "" {
cfg.ID = "default"
}
if cfg.Artifacts != "any" && len(cfg.Documents) > 1 {
return fmt.Errorf("multiple SBOM outputs when artifacts=%q is unsupported", cfg.Artifacts)
}
ids.Inc(cfg.ID)
}
return ids.Validate()
}
// Run executes the Pipe.
func (Pipe) Run(ctx *context.Context) error {
g := semerrgroup.New(ctx.Parallelism)
for _, cfg := range ctx.Config.SBOMs {
g.Go(catalogTask(ctx, cfg))
}
return g.Wait()
}
func catalogTask(ctx *context.Context, cfg config.SBOM) func() error {
return func() error {
var filters []artifact.Filter
switch cfg.Artifacts {
case "source":
filters = append(filters, artifact.ByType(artifact.UploadableSourceArchive))
if len(cfg.IDs) > 0 {
log.Warn("when artifacts is `source`, `ids` has no effect. ignoring")
}
case "archive":
filters = append(filters, artifact.ByType(artifact.UploadableArchive))
case "binary":
filters = append(filters, artifact.Or(
artifact.ByType(artifact.Binary),
artifact.ByType(artifact.UploadableBinary),
artifact.ByType(artifact.UniversalBinary),
))
case "package":
filters = append(filters, artifact.ByType(artifact.LinuxPackage))
case "any":
newArtifacts, err := catalogArtifact(ctx, cfg, nil)
if err != nil {
return err
}
for _, newArtifact := range newArtifacts {
ctx.Artifacts.Add(newArtifact)
}
return nil
default:
return fmt.Errorf("invalid list of artifacts to catalog: %s", cfg.Artifacts)
}
if len(cfg.IDs) > 0 {
filters = append(filters, artifact.ByIDs(cfg.IDs...))
}
artifacts := ctx.Artifacts.Filter(artifact.And(filters...)).List()
return catalog(ctx, cfg, artifacts)
}
}
func catalog(ctx *context.Context, cfg config.SBOM, artifacts []*artifact.Artifact) error {
for _, a := range artifacts {
newArtifacts, err := catalogArtifact(ctx, cfg, a)
if err != nil {
return err
}
for _, newArtifact := range newArtifacts {
ctx.Artifacts.Add(newArtifact)
}
}
return nil
}
func subprocessDistPath(distDir string, pathRelativeToCwd string) (string, error) {
cwd, err := os.Getwd()
if err != nil {
return "", err
}
if !filepath.IsAbs(distDir) {
distDir, err = filepath.Abs(distDir)
if err != nil {
return "", err
}
}
relativePath, err := filepath.Rel(cwd, distDir)
if err != nil {
return "", err
}
return strings.TrimPrefix(pathRelativeToCwd, relativePath+string(filepath.Separator)), nil
}
func catalogArtifact(ctx *context.Context, cfg config.SBOM, a *artifact.Artifact) ([]*artifact.Artifact, error) {
env := ctx.Env.Copy()
artifactDisplayName := "(any)"
templater := tmpl.New(ctx).WithEnv(env)
if a != nil {
procPath, err := subprocessDistPath(ctx.Config.Dist, a.Path)
if err != nil {
return nil, fmt.Errorf("cataloging artifacts failed: cannot determine artifact path for %q: %w", a.Path, err)
}
env["artifact"] = procPath
env["artifactID"] = a.ID()
templater = templater.WithArtifact(a, nil)
artifactDisplayName = a.Path
}
var paths []string
for idx, sbom := range cfg.Documents {
input := filepath.Join(ctx.Config.Dist, expand(sbom, env))
path, err := templater.Apply(input)
if err != nil {
return nil, fmt.Errorf("cataloging artifacts failed: %s: invalid template: %w", input, err)
}
path, err = filepath.Abs(path)
if err != nil {
return nil, fmt.Errorf("cataloging artifacts failed: unable to create artifact path %q: %w", sbom, err)
}
procPath, err := subprocessDistPath(ctx.Config.Dist, path)
if err != nil {
return nil, fmt.Errorf("cataloging artifacts failed: cannot determine document path for %q: %w", path, err)
}
env[fmt.Sprintf("document%d", idx)] = procPath
if idx == 0 {
env["document"] = procPath
}
paths = append(paths, procPath)
}
var names []string
for _, p := range paths {
names = append(names, filepath.Base(p))
}
fields := log.Fields{"cmd": cfg.Cmd, "artifact": artifactDisplayName, "sboms": strings.Join(names, ", ")}
// nolint:prealloc
var args []string
for _, arg := range cfg.Args {
renderedArg, err := templater.Apply(expand(arg, env))
if err != nil {
return nil, fmt.Errorf("cataloging artifacts failed: %s: invalid template: %w", arg, err)
}
args = append(args, renderedArg)
}
// The GoASTScanner flags this as a security risk.
// However, this works as intended. The nosec annotation
// tells the scanner to ignore this.
// #nosec
cmd := exec.CommandContext(ctx, cfg.Cmd, args...)
cmd.Env = []string{}
for _, key := range passthroughEnvVars {
if value := os.Getenv(key); value != "" {
cmd.Env = append(cmd.Env, fmt.Sprintf("%s=%s", key, value))
}
}
cmd.Env = append(cmd.Env, cfg.Env...)
cmd.Dir = ctx.Config.Dist
var b bytes.Buffer
w := gio.Safe(&b)
cmd.Stderr = io.MultiWriter(logext.NewWriter(fields, logext.Error), w)
cmd.Stdout = io.MultiWriter(logext.NewWriter(fields, logext.Info), w)
log.WithFields(fields).Info("cataloging")
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("cataloging artifacts: %s failed: %w: %s", cfg.Cmd, err, b.String())
}
var artifacts []*artifact.Artifact
for _, sbom := range cfg.Documents {
templater = tmpl.New(ctx).WithEnv(env)
if a != nil {
env["artifact"] = a.Name
templater = templater.WithArtifact(a, nil)
}
name, err := templater.Apply(expand(sbom, env))
if err != nil {
return nil, fmt.Errorf("cataloging artifacts failed: %s: invalid template: %w", a, err)
}
search := filepath.Join(ctx.Config.Dist, name)
matches, err := filepath.Glob(search)
if err != nil {
return nil, fmt.Errorf("cataloging artifacts: failed to find SBOM artifact %q: %w", search, err)
}
for _, match := range matches {
artifacts = append(artifacts, &artifact.Artifact{
Type: artifact.SBOM,
Name: name,
Path: match,
Extra: map[string]interface{}{
artifact.ExtraID: cfg.ID,
},
})
}
}
return artifacts, nil
}
func expand(s string, env map[string]string) string {
return os.Expand(s, func(key string) string {
return env[key]
})
}

View File

@@ -0,0 +1,593 @@
package sbom
import (
"fmt"
"os"
"path/filepath"
"sort"
"testing"
"github.com/stretchr/testify/assert"
"github.com/goreleaser/goreleaser/internal/artifact"
"github.com/goreleaser/goreleaser/pkg/config"
"github.com/goreleaser/goreleaser/pkg/context"
"github.com/stretchr/testify/require"
)
func TestDescription(t *testing.T) {
require.NotEmpty(t, Pipe{}.String())
}
func TestSBOMCatalogDefault(t *testing.T) {
defaultArgs := []string{"$artifact", "--file", "$document", "--output", "spdx-json"}
defaultSboms := []string{
"{{ .ArtifactName }}.sbom",
}
defaultCmd := "syft"
tests := []struct {
configs []config.SBOM
artifact string
cmd string
sboms []string
args []string
env []string
err bool
}{
{
configs: []config.SBOM{
{
// empty
},
},
artifact: "archive",
cmd: defaultCmd,
sboms: defaultSboms,
args: defaultArgs,
env: []string{
"SYFT_FILE_METADATA_CATALOGER_ENABLED=true",
},
},
{
configs: []config.SBOM{
{
Artifacts: "package",
},
},
artifact: "package",
cmd: defaultCmd,
sboms: defaultSboms,
args: defaultArgs,
},
{
configs: []config.SBOM{
{
Artifacts: "archive",
},
},
artifact: "archive",
cmd: defaultCmd,
sboms: defaultSboms,
args: defaultArgs,
env: []string{
"SYFT_FILE_METADATA_CATALOGER_ENABLED=true",
},
},
{
configs: []config.SBOM{
{
Artifacts: "any",
},
},
artifact: "any",
cmd: defaultCmd,
sboms: []string{},
args: defaultArgs,
},
{
configs: []config.SBOM{
{
Artifacts: "binary",
},
},
artifact: "binary",
cmd: defaultCmd,
sboms: []string{"{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}.sbom"},
args: defaultArgs,
},
{
configs: []config.SBOM{
{
Artifacts: "source",
},
},
artifact: "source",
cmd: defaultCmd,
sboms: defaultSboms,
args: defaultArgs,
env: []string{
"SYFT_FILE_METADATA_CATALOGER_ENABLED=true",
},
},
{
// multiple documents are not allowed when artifacts != "any"
configs: []config.SBOM{
{
Artifacts: "binary",
Documents: []string{
"doc1",
"doc2",
},
},
},
err: true,
},
}
for _, test := range tests {
t.Run(fmt.Sprintf("artifact=%q", test.configs[0].Artifacts), func(t *testing.T) {
ctx := &context.Context{
Config: config.Project{
SBOMs: test.configs,
},
}
err := Pipe{}.Default(ctx)
if test.err {
require.Error(t, err)
return
}
require.NoError(t, err)
require.Equal(t, ctx.Config.SBOMs[0].Cmd, test.cmd)
require.Equal(t, ctx.Config.SBOMs[0].Documents, test.sboms)
require.Equal(t, ctx.Config.SBOMs[0].Args, test.args)
require.Equal(t, ctx.Config.SBOMs[0].Env, test.env)
require.Equal(t, ctx.Config.SBOMs[0].Artifacts, test.artifact)
})
}
}
func TestSBOMCatalogInvalidArtifacts(t *testing.T) {
ctx := context.New(config.Project{})
ctx.Config.SBOMs = []config.SBOM{
{Artifacts: "foo"},
}
err := Pipe{}.Run(ctx)
require.EqualError(t, err, "invalid list of artifacts to catalog: foo")
}
func TestSeveralSBOMsWithTheSameID(t *testing.T) {
ctx := &context.Context{
Config: config.Project{
SBOMs: []config.SBOM{
{
ID: "a",
},
{
ID: "a",
},
},
},
}
require.EqualError(t, Pipe{}.Default(ctx), "found 2 sboms with the ID 'a', please fix your config")
}
func TestSkipCataloging(t *testing.T) {
t.Run("skip", func(t *testing.T) {
require.True(t, Pipe{}.Skip(context.New(config.Project{})))
})
t.Run("skip SBOM cataloging", func(t *testing.T) {
ctx := context.New(config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "all",
},
},
})
ctx.SkipSBOMCataloging = true
require.True(t, Pipe{}.Skip(ctx))
})
t.Run("dont skip", func(t *testing.T) {
ctx := context.New(config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "all",
},
},
})
require.False(t, Pipe{}.Skip(ctx))
})
}
func TestSBOMCatalogArtifacts(t *testing.T) {
tests := []struct {
desc string
ctx *context.Context
sbomPaths []string
sbomNames []string
expectedErrMsg string
}{
{
desc: "catalog errors",
expectedErrMsg: "cataloging artifacts: exit failed",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "binary",
Cmd: "exit",
Args: []string{"1"},
},
},
},
),
},
{
desc: "invalid args template",
expectedErrMsg: `cataloging artifacts failed: ${FOO}-{{ .foo }{{}}{: invalid template: template: tmpl:1: unexpected "}" in operand`,
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "binary",
Cmd: "exit",
Args: []string{"${FOO}-{{ .foo }{{}}{"},
},
},
Env: []string{
"FOO=BAR",
},
},
),
},
{
desc: "catalog source archives",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{Artifacts: "source"},
},
},
),
sbomPaths: []string{"artifact5.tar.gz.sbom"},
sbomNames: []string{"artifact5.tar.gz.sbom"},
},
{
desc: "catalog archives",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{Artifacts: "archive"},
},
},
),
sbomPaths: []string{"artifact1.sbom", "artifact2.sbom"},
sbomNames: []string{"artifact1.sbom", "artifact2.sbom"},
},
{
desc: "catalog linux packages",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{Artifacts: "package"},
},
},
),
sbomPaths: []string{"package1.deb.sbom"},
sbomNames: []string{"package1.deb.sbom"},
},
{
desc: "catalog binaries",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{Artifacts: "binary"},
},
},
),
sbomPaths: []string{
"artifact3-name_1.2.2_linux_amd64.sbom",
"artifact4-name_1.2.2_linux_amd64.sbom",
},
sbomNames: []string{
"artifact3-name_1.2.2_linux_amd64.sbom",
"artifact4-name_1.2.2_linux_amd64.sbom",
},
},
{
desc: "manual cataloging",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "any",
Args: []string{
"--file",
"$document0",
"--output",
"spdx-json",
"artifact5.tar.gz",
},
Documents: []string{
"final.sbom",
},
},
},
},
),
sbomPaths: []string{"final.sbom"},
sbomNames: []string{"final.sbom"},
},
{
desc: "multiple SBOM configs",
ctx: context.New(
config.Project{
Env: []string{
"SBOM_SUFFIX=s2-ish",
},
SBOMs: []config.SBOM{
{
ID: "s1",
Artifacts: "binary",
},
{
ID: "s2",
Artifacts: "archive",
Documents: []string{"{{ .ArtifactName }}.{{ .Env.SBOM_SUFFIX }}.sbom"},
},
},
},
),
sbomPaths: []string{
"artifact1.s2-ish.sbom",
"artifact2.s2-ish.sbom",
"artifact3-name_1.2.2_linux_amd64.sbom",
"artifact4-name_1.2.2_linux_amd64.sbom",
},
sbomNames: []string{
"artifact1.s2-ish.sbom",
"artifact2.s2-ish.sbom",
"artifact3-name_1.2.2_linux_amd64.sbom",
"artifact4-name_1.2.2_linux_amd64.sbom",
},
},
{
desc: "catalog artifacts with filtered by ID",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "binary",
IDs: []string{"foo"},
},
},
},
),
sbomPaths: []string{
"artifact3-name_1.2.2_linux_amd64.sbom",
},
sbomNames: []string{
"artifact3-name_1.2.2_linux_amd64.sbom",
},
},
{
desc: "catalog binary artifacts with env in arguments",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "binary",
Args: []string{
"--file",
"$document",
"--output",
"spdx-json",
"$artifact",
},
Documents: []string{
"{{ .ArtifactName }}.{{ .Env.TEST_USER }}.sbom",
},
},
},
Env: []string{
"TEST_USER=test-user-name",
},
},
),
sbomPaths: []string{
"artifact3-name.test-user-name.sbom",
"artifact4.test-user-name.sbom",
},
sbomNames: []string{
"artifact3-name.test-user-name.sbom",
"artifact4.test-user-name.sbom",
},
},
{
desc: "cataloging 'any' artifacts fails",
ctx: context.New(
config.Project{
SBOMs: []config.SBOM{
{
Artifacts: "any",
Cmd: "false",
},
},
},
),
expectedErrMsg: "cataloging artifacts: false failed: exit status 1: ",
},
}
for _, test := range tests {
t.Run(test.desc, func(t *testing.T) {
testSBOMCataloging(t, test.ctx, test.sbomPaths, test.sbomNames, test.expectedErrMsg)
})
}
}
func testSBOMCataloging(tb testing.TB, ctx *context.Context, sbomPaths, sbomNames []string, expectedErrMsg string) {
tb.Helper()
tmpdir := tb.TempDir()
ctx.Config.Dist = tmpdir
ctx.Version = "1.2.2"
// create some fake artifacts
artifacts := []string{"artifact1", "artifact2", "artifact3", "package1.deb"}
require.NoError(tb, os.Mkdir(filepath.Join(tmpdir, "linux_amd64"), os.ModePerm))
for _, f := range artifacts {
file := filepath.Join(tmpdir, f)
require.NoError(tb, os.WriteFile(file, []byte("foo"), 0o644))
}
require.NoError(tb, os.WriteFile(filepath.Join(tmpdir, "linux_amd64", "artifact4"), []byte("foo"), 0o644))
artifacts = append(artifacts, "linux_amd64/artifact4")
require.NoError(tb, os.WriteFile(filepath.Join(tmpdir, "artifact5.tar.gz"), []byte("foo"), 0o644))
artifacts = append(artifacts, "artifact5.tar.gz")
ctx.Artifacts.Add(&artifact.Artifact{
Name: "artifact1",
Path: filepath.Join(tmpdir, "artifact1"),
Type: artifact.UploadableArchive,
Extra: map[string]interface{}{
artifact.ExtraID: "foo",
},
})
ctx.Artifacts.Add(&artifact.Artifact{
Name: "artifact2",
Path: filepath.Join(tmpdir, "artifact2"),
Type: artifact.UploadableArchive,
Extra: map[string]interface{}{
artifact.ExtraID: "foo3",
},
})
ctx.Artifacts.Add(&artifact.Artifact{
Name: "artifact3-name",
Path: filepath.Join(tmpdir, "artifact3"),
Goos: "linux",
Goarch: "amd64",
Type: artifact.UploadableBinary,
Extra: map[string]interface{}{
artifact.ExtraID: "foo",
artifact.ExtraBinary: "artifact3-name",
},
})
ctx.Artifacts.Add(&artifact.Artifact{
Name: "artifact4",
Path: filepath.Join(tmpdir, "linux_amd64", "artifact4"),
Goos: "linux",
Goarch: "amd64",
Type: artifact.Binary,
Extra: map[string]interface{}{
artifact.ExtraID: "foo3",
artifact.ExtraBinary: "artifact4-name",
},
})
ctx.Artifacts.Add(&artifact.Artifact{
Name: "artifact5.tar.gz",
Path: filepath.Join(tmpdir, "artifact5.tar.gz"),
Type: artifact.UploadableSourceArchive,
})
ctx.Artifacts.Add(&artifact.Artifact{
Name: "package1.deb",
Path: filepath.Join(tmpdir, "package1.deb"),
Type: artifact.LinuxPackage,
Extra: map[string]interface{}{
artifact.ExtraID: "foo",
},
})
// configure the pipeline
require.NoError(tb, Pipe{}.Default(ctx))
// run the pipeline
if expectedErrMsg != "" {
err := Pipe{}.Run(ctx)
require.Error(tb, err)
require.Contains(tb, err.Error(), expectedErrMsg)
return
}
require.NoError(tb, Pipe{}.Run(ctx))
// ensure all artifacts have an ID
for _, arti := range ctx.Artifacts.Filter(artifact.ByType(artifact.SBOM)).List() {
require.NotEmptyf(tb, arti.ID(), ".Extra.ID on %s", arti.Path)
}
// verify that only the artifacts and the sboms are in the dist dir
gotFiles := []string{}
require.NoError(tb, filepath.Walk(tmpdir,
func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
relPath, err := filepath.Rel(tmpdir, path)
if err != nil {
return err
}
gotFiles = append(gotFiles, relPath)
return nil
}),
)
wantFiles := append(artifacts, sbomPaths...)
sort.Strings(wantFiles)
require.ElementsMatch(tb, wantFiles, gotFiles, "SBOM paths differ")
var sbomArtifacts []string
for _, sig := range ctx.Artifacts.Filter(artifact.ByType(artifact.SBOM)).List() {
sbomArtifacts = append(sbomArtifacts, sig.Name)
}
require.ElementsMatch(tb, sbomArtifacts, sbomNames, "SBOM names differ")
}
func Test_subprocessDistPath(t *testing.T) {
cwd, err := os.Getwd()
require.NoError(t, err)
tests := []struct {
name string
distDir string
pathRelativeToCwd string
expects string
}{
{
name: "relative dist with anchor",
distDir: "./dist",
pathRelativeToCwd: "dist/my.sbom",
expects: "my.sbom",
},
{
name: "relative dist without anchor",
distDir: "dist",
pathRelativeToCwd: "dist/my.sbom",
expects: "my.sbom",
},
{
name: "relative dist with nested resource",
distDir: "dist",
pathRelativeToCwd: "dist/something/my.sbom",
expects: "something/my.sbom",
},
{
name: "absolute dist with nested resource",
distDir: filepath.Join(cwd, "dist/"),
pathRelativeToCwd: "dist/something/my.sbom",
expects: "something/my.sbom",
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
actual, err := subprocessDistPath(test.distDir, test.pathRelativeToCwd)
require.NoError(t, err)
assert.Equal(t, test.expects, actual)
})
}
}

View File

@@ -76,11 +76,14 @@ func (Pipe) Run(ctx *context.Context) error {
artifact.ByType(artifact.UploadableSourceArchive),
artifact.ByType(artifact.Checksum),
artifact.ByType(artifact.LinuxPackage),
artifact.ByType(artifact.SBOM),
))
case "archive":
filters = append(filters, artifact.ByType(artifact.UploadableArchive))
case "binary":
filters = append(filters, artifact.ByType(artifact.UploadableBinary))
case "sbom":
filters = append(filters, artifact.ByType(artifact.SBOM))
case "package":
filters = append(filters, artifact.ByType(artifact.LinuxPackage))
case "none": // TODO(caarlos0): this is not very useful, lets remove it.
@@ -200,7 +203,7 @@ func signone(ctx *context.Context, cfg config.Sign, art *artifact.Artifact) ([]*
return nil, nil
}
// re-execute template results, using artifact name as artifact so they eval to the actual needed file name.
// re-execute template results, using artifact desc as artifact so they eval to the actual needed file desc.
env["artifact"] = art.Name
name, _ = tmpl.New(ctx).WithEnv(env).Apply(expand(cfg.Signature, env)) // could never error as it passed the previous check
cert, _ = tmpl.New(ctx).WithEnv(env).Apply(expand(cfg.Certificate, env)) // could never error as it passed the previous check

View File

@@ -12,6 +12,8 @@ import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/goreleaser/goreleaser/internal/artifact"
"github.com/goreleaser/goreleaser/pkg/config"
"github.com/goreleaser/goreleaser/pkg/context"
@@ -183,18 +185,6 @@ func TestSignArtifacts(t *testing.T) {
},
),
},
{
desc: "sign single",
ctx: context.New(
config.Project{
Signs: []config.Sign{
{Artifacts: "all"},
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
},
{
desc: "sign all artifacts",
ctx: context.New(
@@ -206,8 +196,8 @@ func TestSignArtifacts(t *testing.T) {
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
},
{
desc: "sign archives",
@@ -357,6 +347,20 @@ func TestSignArtifacts(t *testing.T) {
signaturePaths: []string{"artifact5.tar.gz.sig"},
signatureNames: []string{"artifact5.tar.gz.sig"},
},
{
desc: "sign only sbom",
ctx: context.New(
config.Project{
Signs: []config.Sign{
{
Artifacts: "sbom",
},
},
},
),
signaturePaths: []string{"artifact5.tar.gz.sbom.sig"},
signatureNames: []string{"artifact5.tar.gz.sbom.sig"},
},
{
desc: "sign all artifacts with env",
ctx: context.New(
@@ -379,8 +383,8 @@ func TestSignArtifacts(t *testing.T) {
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
},
{
desc: "sign all artifacts with template",
@@ -404,8 +408,8 @@ func TestSignArtifacts(t *testing.T) {
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
},
{
desc: "sign single with password from stdin",
@@ -432,8 +436,8 @@ func TestSignArtifacts(t *testing.T) {
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
user: passwordUser,
},
{
@@ -462,8 +466,8 @@ func TestSignArtifacts(t *testing.T) {
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
user: passwordUser,
},
{
@@ -491,8 +495,8 @@ func TestSignArtifacts(t *testing.T) {
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
user: passwordUser,
},
{
@@ -545,9 +549,9 @@ func TestSignArtifacts(t *testing.T) {
},
},
),
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "package1.deb.sig"},
certificateNames: []string{"artifact1_honk.pem", "artifact2_honk.pem", "artifact3_1.0.0_linux_amd64_honk.pem", "checksum_honk.pem", "checksum2_honk.pem", "artifact4_1.0.0_linux_amd64_honk.pem", "artifact5_honk.pem", "package1_honk.pem"},
signaturePaths: []string{"artifact1.sig", "artifact2.sig", "artifact3.sig", "checksum.sig", "checksum2.sig", "linux_amd64/artifact4.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
signatureNames: []string{"artifact1.sig", "artifact2.sig", "artifact3_1.0.0_linux_amd64.sig", "checksum.sig", "checksum2.sig", "artifact4_1.0.0_linux_amd64.sig", "artifact5.tar.gz.sig", "artifact5.tar.gz.sbom.sig", "package1.deb.sig"},
certificateNames: []string{"artifact1_honk.pem", "artifact2_honk.pem", "artifact3_1.0.0_linux_amd64_honk.pem", "checksum_honk.pem", "checksum2_honk.pem", "artifact4_1.0.0_linux_amd64_honk.pem", "artifact5_honk.pem", "artifact5.tar.gz.sbom_honk.pem", "package1_honk.pem"},
},
}
@@ -579,6 +583,8 @@ func testSign(tb testing.TB, ctx *context.Context, certificateNames, signaturePa
artifacts = append(artifacts, "linux_amd64/artifact4")
require.NoError(tb, os.WriteFile(filepath.Join(tmpdir, "artifact5.tar.gz"), []byte("foo"), 0o644))
artifacts = append(artifacts, "artifact5.tar.gz")
require.NoError(tb, os.WriteFile(filepath.Join(tmpdir, "artifact5.tar.gz.sbom"), []byte("sbom(foo)"), 0o644))
artifacts = append(artifacts, "artifact5.tar.gz.sbom")
ctx.Artifacts.Add(&artifact.Artifact{
Name: "artifact1",
Path: filepath.Join(tmpdir, "artifact1"),
@@ -632,6 +638,11 @@ func testSign(tb testing.TB, ctx *context.Context, certificateNames, signaturePa
Path: filepath.Join(tmpdir, "artifact5.tar.gz"),
Type: artifact.UploadableSourceArchive,
})
ctx.Artifacts.Add(&artifact.Artifact{
Name: "artifact5.tar.gz.sbom",
Path: filepath.Join(tmpdir, "artifact5.tar.gz.sbom"),
Type: artifact.SBOM,
})
ctx.Artifacts.Add(&artifact.Artifact{
Name: "package1.deb",
Path: filepath.Join(tmpdir, "package1.deb"),
@@ -677,12 +688,8 @@ func testSign(tb testing.TB, ctx *context.Context, certificateNames, signaturePa
certNames = append(certNames, cert.Name)
require.True(tb, strings.HasPrefix(cert.Path, ctx.Config.Dist))
}
sort.Strings(certificateNames)
sort.Strings(certNames)
require.Equal(tb, len(certificateNames), len(certificates))
if len(certificateNames) > 0 {
require.Equal(tb, certificateNames, certNames)
}
assert.ElementsMatch(tb, certificateNames, certNames)
// verify that only the artifacts and the signatures are in the dist dir
gotFiles := []string{}

View File

@@ -23,6 +23,7 @@ import (
"github.com/goreleaser/goreleaser/internal/pipe/krew"
"github.com/goreleaser/goreleaser/internal/pipe/nfpm"
"github.com/goreleaser/goreleaser/internal/pipe/publish"
"github.com/goreleaser/goreleaser/internal/pipe/sbom"
"github.com/goreleaser/goreleaser/internal/pipe/scoop"
"github.com/goreleaser/goreleaser/internal/pipe/semver"
"github.com/goreleaser/goreleaser/internal/pipe/sign"
@@ -75,6 +76,7 @@ var Pipeline = append(
gofish.Pipe{}, // create gofish rig
krew.Pipe{}, // krew plugins
scoop.Pipe{}, // create scoop buckets
sbom.Pipe{}, // create SBOMs of artifacts
checksums.Pipe{}, // checksums of the files
sign.Pipe{}, // sign artifacts
docker.Pipe{}, // create and push docker images