mirror of
https://github.com/SAP/jenkins-library.git
synced 2025-01-04 04:07:16 +02:00
feat(detectExecuteScan): SARIF export and GH issue creation (#3637)
* Added SARIF and GH issue creation
This commit is contained in:
parent
3f6e4b9e3b
commit
c30e93bc6a
@ -357,7 +357,7 @@ func verifyCxProjectCompliance(config checkmarxExecuteScanOptions, sys checkmarx
|
||||
|
||||
if insecure && config.CreateResultIssue && len(config.GithubToken) > 0 && len(config.GithubAPIURL) > 0 && len(config.Owner) > 0 && len(config.Repository) > 0 {
|
||||
log.Entry().Debug("Creating/updating GitHub issue with check results")
|
||||
err := reporting.UploadSingleReportToGithub(scanReport, config.GithubToken, config.GithubAPIURL, config.Owner, config.Repository, "Checkmarx SAST Results", config.Assignees, utils)
|
||||
err := reporting.UploadSingleReportToGithub(scanReport, config.GithubToken, config.GithubAPIURL, config.Owner, config.Repository, config.Assignees, utils)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to upload scan results into GitHub: %w", err)
|
||||
}
|
||||
|
@ -14,17 +14,18 @@ import (
|
||||
"time"
|
||||
|
||||
bd "github.com/SAP/jenkins-library/pkg/blackduck"
|
||||
piperGithub "github.com/SAP/jenkins-library/pkg/github"
|
||||
piperhttp "github.com/SAP/jenkins-library/pkg/http"
|
||||
"github.com/SAP/jenkins-library/pkg/maven"
|
||||
"github.com/SAP/jenkins-library/pkg/reporting"
|
||||
"github.com/SAP/jenkins-library/pkg/versioning"
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/command"
|
||||
"github.com/SAP/jenkins-library/pkg/log"
|
||||
"github.com/SAP/jenkins-library/pkg/piperutils"
|
||||
"github.com/SAP/jenkins-library/pkg/reporting"
|
||||
"github.com/SAP/jenkins-library/pkg/telemetry"
|
||||
"github.com/SAP/jenkins-library/pkg/toolrecord"
|
||||
"github.com/SAP/jenkins-library/pkg/versioning"
|
||||
)
|
||||
|
||||
type detectUtils interface {
|
||||
@ -32,12 +33,21 @@ type detectUtils interface {
|
||||
FileExists(filename string) (bool, error)
|
||||
FileRemove(filename string) error
|
||||
Copy(src, dest string) (int64, error)
|
||||
Move(src, dest string) error
|
||||
DirExists(dest string) (bool, error)
|
||||
FileRead(path string) ([]byte, error)
|
||||
FileWrite(path string, content []byte, perm os.FileMode) error
|
||||
MkdirAll(path string, perm os.FileMode) error
|
||||
Chmod(path string, mode os.FileMode) error
|
||||
Glob(pattern string) (matches []string, err error)
|
||||
Chdir(path string) error
|
||||
TempDir(string, string) (string, error)
|
||||
RemoveAll(string) error
|
||||
FileRename(string, string) error
|
||||
Getwd() (string, error)
|
||||
Symlink(oldname string, newname string) error
|
||||
SHA256(path string) (string, error)
|
||||
CurrentTime(format string) string
|
||||
|
||||
GetExitCode() int
|
||||
GetOsEnv() []string
|
||||
@ -49,6 +59,8 @@ type detectUtils interface {
|
||||
RunShell(shell, script string) error
|
||||
|
||||
DownloadFile(url, filename string, header http.Header, cookies []*http.Cookie) error
|
||||
|
||||
CreateIssue(ghCreateIssueOptions *piperGithub.CreateIssueOptions) error
|
||||
}
|
||||
|
||||
type detectUtilsBundle struct {
|
||||
@ -57,6 +69,11 @@ type detectUtilsBundle struct {
|
||||
*piperhttp.Client
|
||||
}
|
||||
|
||||
// CreateIssue supplies capability for GitHub issue creation
|
||||
func (d *detectUtilsBundle) CreateIssue(ghCreateIssueOptions *piperGithub.CreateIssueOptions) error {
|
||||
return piperGithub.CreateIssue(ghCreateIssueOptions)
|
||||
}
|
||||
|
||||
type blackduckSystem struct {
|
||||
Client bd.Client
|
||||
}
|
||||
@ -380,68 +397,11 @@ func getVersionName(config detectExecuteScanOptions) string {
|
||||
return detectVersionName
|
||||
}
|
||||
|
||||
func postScanChecksAndReporting(config detectExecuteScanOptions, influx *detectExecuteScanInflux, utils detectUtils, sys *blackduckSystem) error {
|
||||
vulns, _, err := getVulnsAndComponents(config, influx, sys)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
scanReport := createVulnerabilityReport(config, vulns, influx, sys)
|
||||
paths, err := writeVulnerabilityReports(scanReport, config, utils)
|
||||
|
||||
policyStatus, err := getPolicyStatus(config, influx, sys)
|
||||
policyReport := createPolicyStatusReport(config, policyStatus, influx, sys)
|
||||
policyReportPaths, err := writePolicyStatusReports(policyReport, config, utils)
|
||||
|
||||
paths = append(paths, policyReportPaths...)
|
||||
piperutils.PersistReportsAndLinks("detectExecuteScan", "", paths, nil)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "failed to check and report scan results")
|
||||
}
|
||||
policyJsonErr, violationCount := writeIpPolicyJson(config, utils, paths, sys)
|
||||
if policyJsonErr != nil {
|
||||
return errors.Wrapf(policyJsonErr, "failed to write IP policy violations json file")
|
||||
}
|
||||
if violationCount > 0 {
|
||||
return errors.Errorf("License Policy Violations found")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func getVulnsAndComponents(config detectExecuteScanOptions, influx *detectExecuteScanInflux, sys *blackduckSystem) (*bd.Vulnerabilities, *bd.Components, error) {
|
||||
detectVersionName := getVersionName(config)
|
||||
vulns, err := sys.Client.GetVulnerabilities(config.ProjectName, detectVersionName)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
majorVulns := 0
|
||||
activeVulns := 0
|
||||
for _, vuln := range vulns.Items {
|
||||
if isActiveVulnerability(vuln) {
|
||||
activeVulns++
|
||||
if isMajorVulnerability(vuln) {
|
||||
majorVulns++
|
||||
}
|
||||
}
|
||||
}
|
||||
influx.detect_data.fields.vulnerabilities = activeVulns
|
||||
influx.detect_data.fields.major_vulnerabilities = majorVulns
|
||||
influx.detect_data.fields.minor_vulnerabilities = activeVulns - majorVulns
|
||||
|
||||
components, err := sys.Client.GetComponents(config.ProjectName, detectVersionName)
|
||||
if err != nil {
|
||||
return vulns, nil, err
|
||||
}
|
||||
influx.detect_data.fields.components = components.TotalCount
|
||||
|
||||
return vulns, components, nil
|
||||
}
|
||||
|
||||
func createVulnerabilityReport(config detectExecuteScanOptions, vulns *bd.Vulnerabilities, influx *detectExecuteScanInflux, sys *blackduckSystem) reporting.ScanReport {
|
||||
versionName := getVersionName(config)
|
||||
versionUrl, _ := sys.Client.GetProjectVersionLink(config.ProjectName, versionName)
|
||||
scanReport := reporting.ScanReport{
|
||||
Title: "BlackDuck Security Vulnerability Report",
|
||||
ReportTitle: "BlackDuck Security Vulnerability Report",
|
||||
Subheaders: []reporting.Subheader{
|
||||
{Description: "BlackDuck Project Name ", Details: config.ProjectName},
|
||||
{Description: "BlackDuck Project Version ", Details: fmt.Sprintf("<a href='%v'>%v</a>", versionUrl, versionName)},
|
||||
@ -501,29 +461,118 @@ func createVulnerabilityReport(config detectExecuteScanOptions, vulns *bd.Vulner
|
||||
return scanReport
|
||||
}
|
||||
|
||||
func writeVulnerabilityReports(scanReport reporting.ScanReport, config detectExecuteScanOptions, utils detectUtils) ([]piperutils.Path, error) {
|
||||
reportPaths := []piperutils.Path{}
|
||||
|
||||
htmlReport, _ := scanReport.ToHTML()
|
||||
htmlReportPath := "piper_detect_vulnerability_report.html"
|
||||
if err := utils.FileWrite(htmlReportPath, htmlReport, 0666); err != nil {
|
||||
log.SetErrorCategory(log.ErrorConfiguration)
|
||||
return reportPaths, errors.Wrapf(err, "failed to write html report")
|
||||
func isActiveVulnerability(v bd.Vulnerability) bool {
|
||||
switch v.VulnerabilityWithRemediation.RemediationStatus {
|
||||
case "NEW":
|
||||
return true
|
||||
case "REMEDIATION_REQUIRED":
|
||||
return true
|
||||
case "NEEDS_REVIEW":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
reportPaths = append(reportPaths, piperutils.Path{Name: "BlackDuck Vulnerability Report", Target: htmlReportPath})
|
||||
}
|
||||
|
||||
jsonReport, _ := scanReport.ToJSON()
|
||||
if exists, _ := utils.DirExists(reporting.StepReportDirectory); !exists {
|
||||
err := utils.MkdirAll(reporting.StepReportDirectory, 0777)
|
||||
func isMajorVulnerability(v bd.Vulnerability) bool {
|
||||
switch v.VulnerabilityWithRemediation.Severity {
|
||||
case "CRITICAL":
|
||||
return true
|
||||
case "HIGH":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func postScanChecksAndReporting(config detectExecuteScanOptions, influx *detectExecuteScanInflux, utils detectUtils, sys *blackduckSystem) error {
|
||||
errorsOccured := []string{}
|
||||
vulns, _, err := getVulnsAndComponents(config, influx, sys)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to fetch vulnerabilities")
|
||||
}
|
||||
|
||||
if config.CreateResultIssue && len(config.GithubToken) > 0 && len(config.GithubAPIURL) > 0 && len(config.Owner) > 0 && len(config.Repository) > 0 {
|
||||
log.Entry().Debugf("Creating result issues for %v alert(s)", len(vulns.Items))
|
||||
issueDetails := make([]reporting.IssueDetail, len(vulns.Items))
|
||||
piperutils.CopyAtoB(vulns.Items, issueDetails)
|
||||
err = reporting.UploadMultipleReportsToGithub(&issueDetails, config.GithubToken, config.GithubAPIURL, config.Owner, config.Repository, config.Assignees, config.CustomTLSCertificateLinks, utils)
|
||||
if err != nil {
|
||||
return reportPaths, errors.Wrap(err, "failed to create reporting directory")
|
||||
errorsOccured = append(errorsOccured, fmt.Sprint(err))
|
||||
}
|
||||
}
|
||||
if err := utils.FileWrite(filepath.Join(reporting.StepReportDirectory, fmt.Sprintf("detectExecuteScan_oss_%v.json", fmt.Sprintf("%v", time.Now()))), jsonReport, 0666); err != nil {
|
||||
return reportPaths, errors.Wrapf(err, "failed to write json report")
|
||||
|
||||
sarif := bd.CreateSarifResultFile(vulns)
|
||||
paths, err := bd.WriteSarifFile(sarif, utils)
|
||||
if err != nil {
|
||||
errorsOccured = append(errorsOccured, fmt.Sprint(err))
|
||||
}
|
||||
|
||||
return reportPaths, nil
|
||||
scanReport := createVulnerabilityReport(config, vulns, influx, sys)
|
||||
vulnerabilityReportPaths, err := bd.WriteVulnerabilityReports(scanReport, utils)
|
||||
if err != nil {
|
||||
errorsOccured = append(errorsOccured, fmt.Sprint(err))
|
||||
}
|
||||
paths = append(paths, vulnerabilityReportPaths...)
|
||||
|
||||
policyStatus, err := getPolicyStatus(config, influx, sys)
|
||||
policyReport := createPolicyStatusReport(config, policyStatus, influx, sys)
|
||||
policyReportPaths, err := writePolicyStatusReports(policyReport, config, utils)
|
||||
if err != nil {
|
||||
errorsOccured = append(errorsOccured, fmt.Sprint(err))
|
||||
}
|
||||
paths = append(paths, policyReportPaths...)
|
||||
|
||||
piperutils.PersistReportsAndLinks("detectExecuteScan", "", paths, nil)
|
||||
if err != nil {
|
||||
errorsOccured = append(errorsOccured, fmt.Sprint(err))
|
||||
}
|
||||
|
||||
err, violationCount := writeIpPolicyJson(config, utils, paths, sys)
|
||||
if err != nil {
|
||||
errorsOccured = append(errorsOccured, fmt.Sprint(err))
|
||||
}
|
||||
|
||||
if violationCount > 0 {
|
||||
log.SetErrorCategory(log.ErrorCompliance)
|
||||
errorsOccured = append(errorsOccured, fmt.Sprint("License Policy Violations found"))
|
||||
}
|
||||
|
||||
if len(errorsOccured) > 0 {
|
||||
return fmt.Errorf(strings.Join(errorsOccured, ": "))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getVulnsAndComponents(config detectExecuteScanOptions, influx *detectExecuteScanInflux, sys *blackduckSystem) (*bd.Vulnerabilities, *bd.Components, error) {
|
||||
detectVersionName := getVersionName(config)
|
||||
vulns, err := sys.Client.GetVulnerabilities(config.ProjectName, detectVersionName)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
majorVulns := 0
|
||||
activeVulns := 0
|
||||
for _, vuln := range vulns.Items {
|
||||
if isActiveVulnerability(vuln) {
|
||||
activeVulns++
|
||||
if isMajorVulnerability(vuln) {
|
||||
majorVulns++
|
||||
}
|
||||
}
|
||||
}
|
||||
influx.detect_data.fields.vulnerabilities = activeVulns
|
||||
influx.detect_data.fields.major_vulnerabilities = majorVulns
|
||||
influx.detect_data.fields.minor_vulnerabilities = activeVulns - majorVulns
|
||||
|
||||
components, err := sys.Client.GetComponents(config.ProjectName, detectVersionName)
|
||||
if err != nil {
|
||||
return vulns, nil, err
|
||||
}
|
||||
influx.detect_data.fields.components = components.TotalCount
|
||||
|
||||
return vulns, components, nil
|
||||
}
|
||||
|
||||
func getPolicyStatus(config detectExecuteScanOptions, influx *detectExecuteScanInflux, sys *blackduckSystem) (*bd.PolicyStatus, error) {
|
||||
@ -545,7 +594,7 @@ func createPolicyStatusReport(config detectExecuteScanOptions, policyStatus *bd.
|
||||
versionName := getVersionName(config)
|
||||
versionUrl, _ := sys.Client.GetProjectVersionLink(config.ProjectName, versionName)
|
||||
policyReport := reporting.ScanReport{
|
||||
Title: "BlackDuck Policy Violations Report",
|
||||
ReportTitle: "BlackDuck Policy Violations Report",
|
||||
Subheaders: []reporting.Subheader{
|
||||
{Description: "BlackDuck project name ", Details: config.ProjectName},
|
||||
{Description: "BlackDuck project version name", Details: fmt.Sprintf("<a href='%v'>%v</a>", versionUrl, versionName)},
|
||||
@ -658,30 +707,6 @@ func isActivePolicyViolation(status string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func isActiveVulnerability(v bd.Vulnerability) bool {
|
||||
switch v.VulnerabilityWithRemediation.RemediationStatus {
|
||||
case "NEW":
|
||||
return true
|
||||
case "REMEDIATION_REQUIRED":
|
||||
return true
|
||||
case "NEEDS_REVIEW":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func isMajorVulnerability(v bd.Vulnerability) bool {
|
||||
switch v.VulnerabilityWithRemediation.Severity {
|
||||
case "CRITICAL":
|
||||
return true
|
||||
case "HIGH":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// create toolrecord file for detectExecute
|
||||
func createToolRecordDetect(workspace string, config detectExecuteScanOptions, sys *blackduckSystem) (string, error) {
|
||||
record := toolrecord.New(workspace, "detectExecute", config.ServerURL)
|
||||
|
@ -46,6 +46,13 @@ type detectExecuteScanOptions struct {
|
||||
DetectTools []string `json:"detectTools,omitempty"`
|
||||
ScanOnChanges bool `json:"scanOnChanges,omitempty"`
|
||||
CustomEnvironmentVariables []string `json:"customEnvironmentVariables,omitempty"`
|
||||
GithubToken string `json:"githubToken,omitempty"`
|
||||
CreateResultIssue bool `json:"createResultIssue,omitempty"`
|
||||
GithubAPIURL string `json:"githubApiUrl,omitempty"`
|
||||
Owner string `json:"owner,omitempty"`
|
||||
Repository string `json:"repository,omitempty"`
|
||||
Assignees []string `json:"assignees,omitempty"`
|
||||
CustomTLSCertificateLinks []string `json:"customTlsCertificateLinks,omitempty"`
|
||||
}
|
||||
|
||||
type detectExecuteScanInflux struct {
|
||||
@ -175,6 +182,7 @@ Please configure your BlackDuck server Url using the serverUrl parameter and the
|
||||
return err
|
||||
}
|
||||
log.RegisterSecret(stepConfig.Token)
|
||||
log.RegisterSecret(stepConfig.GithubToken)
|
||||
|
||||
if len(GeneralConfig.HookConfig.SentryConfig.Dsn) > 0 {
|
||||
sentryHook := log.NewSentryHook(GeneralConfig.HookConfig.SentryConfig.Dsn, GeneralConfig.CorrelationID)
|
||||
@ -259,6 +267,13 @@ func addDetectExecuteScanFlags(cmd *cobra.Command, stepConfig *detectExecuteScan
|
||||
cmd.Flags().StringSliceVar(&stepConfig.DetectTools, "detectTools", []string{}, "The type of BlackDuck scanners to include while running the BlackDuck scan. By default All scanners are included. For the complete list of possible values, Please refer [Synopsys detect documentation](https://synopsys.atlassian.net/wiki/spaces/INTDOCS/pages/631407160/Configuring+Detect+General+Properties#Detect-tools-included)")
|
||||
cmd.Flags().BoolVar(&stepConfig.ScanOnChanges, "scanOnChanges", false, "This flag determines if the scan is submitted to the server. If set to true, then the scan request is submitted to the server only when changes are detected in the Open Source Bill of Materials If the flag is set to false, then the scan request is submitted to server regardless of any changes. For more details please refer to the [documentation](https://github.com/blackducksoftware/detect_rescan/blob/master/README.md)")
|
||||
cmd.Flags().StringSliceVar(&stepConfig.CustomEnvironmentVariables, "customEnvironmentVariables", []string{}, "A list of environment variables which can be set to prepare the environment to run a BlackDuck scan. This includes a list of environment variables defined by Synopsys. The full list can be found [here](https://synopsys.atlassian.net/wiki/spaces/IA/pages/1562214619/Shell+Script+Reference+6.9.0) This list affects the detect script downloaded while running the scan. By default detect7.sh will be used. To continue using detect6, please use DETECT_LATEST_RELEASE_VERSION and set it to a valid value defined [here](https://community.synopsys.com/s/document-item?bundleId=integrations-detect&topicId=releasenotes.html&_LANG=enus)")
|
||||
cmd.Flags().StringVar(&stepConfig.GithubToken, "githubToken", os.Getenv("PIPER_githubToken"), "GitHub personal access token as per https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line")
|
||||
cmd.Flags().BoolVar(&stepConfig.CreateResultIssue, "createResultIssue", false, "Whether the step creates a GitHub issue containing the scan results in the originating repo. Since optimized pipelines are headless the creation is implicitly activated for scheduled runs.")
|
||||
cmd.Flags().StringVar(&stepConfig.GithubAPIURL, "githubApiUrl", `https://api.github.com`, "Set the GitHub API URL.")
|
||||
cmd.Flags().StringVar(&stepConfig.Owner, "owner", os.Getenv("PIPER_owner"), "Set the GitHub organization.")
|
||||
cmd.Flags().StringVar(&stepConfig.Repository, "repository", os.Getenv("PIPER_repository"), "Set the GitHub repository.")
|
||||
cmd.Flags().StringSliceVar(&stepConfig.Assignees, "assignees", []string{``}, "Defines the assignees for the Github Issue created/updated with the results of the scan as a list of login names.")
|
||||
cmd.Flags().StringSliceVar(&stepConfig.CustomTLSCertificateLinks, "customTlsCertificateLinks", []string{}, "List of download links to custom TLS certificates. This is required to ensure trusted connections to instances with repositories (like nexus) when publish flag is set to true.")
|
||||
|
||||
cmd.MarkFlagRequired("token")
|
||||
cmd.MarkFlagRequired("projectName")
|
||||
@ -515,6 +530,95 @@ func detectExecuteScanMetadata() config.StepData {
|
||||
Aliases: []config.Alias{},
|
||||
Default: []string{},
|
||||
},
|
||||
{
|
||||
Name: "githubToken",
|
||||
ResourceRef: []config.ResourceReference{
|
||||
{
|
||||
Name: "githubTokenCredentialsId",
|
||||
Type: "secret",
|
||||
},
|
||||
|
||||
{
|
||||
Name: "githubVaultSecretName",
|
||||
Type: "vaultSecret",
|
||||
Default: "github",
|
||||
},
|
||||
},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{{Name: "access_token"}},
|
||||
Default: os.Getenv("PIPER_githubToken"),
|
||||
},
|
||||
{
|
||||
Name: "createResultIssue",
|
||||
ResourceRef: []config.ResourceReference{
|
||||
{
|
||||
Name: "commonPipelineEnvironment",
|
||||
Param: "custom/optimizedAndScheduled",
|
||||
},
|
||||
},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "bool",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: false,
|
||||
},
|
||||
{
|
||||
Name: "githubApiUrl",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: `https://api.github.com`,
|
||||
},
|
||||
{
|
||||
Name: "owner",
|
||||
ResourceRef: []config.ResourceReference{
|
||||
{
|
||||
Name: "commonPipelineEnvironment",
|
||||
Param: "github/owner",
|
||||
},
|
||||
},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{{Name: "githubOrg"}},
|
||||
Default: os.Getenv("PIPER_owner"),
|
||||
},
|
||||
{
|
||||
Name: "repository",
|
||||
ResourceRef: []config.ResourceReference{
|
||||
{
|
||||
Name: "commonPipelineEnvironment",
|
||||
Param: "github/repository",
|
||||
},
|
||||
},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{{Name: "githubRepo"}},
|
||||
Default: os.Getenv("PIPER_repository"),
|
||||
},
|
||||
{
|
||||
Name: "assignees",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "[]string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: []string{``},
|
||||
},
|
||||
{
|
||||
Name: "customTlsCertificateLinks",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "[]string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
Containers: []config.Container{
|
||||
|
@ -11,6 +11,7 @@ import (
|
||||
"testing"
|
||||
|
||||
bd "github.com/SAP/jenkins-library/pkg/blackduck"
|
||||
piperGithub "github.com/SAP/jenkins-library/pkg/github"
|
||||
piperhttp "github.com/SAP/jenkins-library/pkg/http"
|
||||
"github.com/SAP/jenkins-library/pkg/mock"
|
||||
|
||||
@ -183,6 +184,10 @@ func (c *detectTestUtilsBundle) DownloadFile(url, filename string, _ http.Header
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *detectTestUtilsBundle) CreateIssue(ghCreateIssueOptions *piperGithub.CreateIssueOptions) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func newDetectTestUtilsBundle() *detectTestUtilsBundle {
|
||||
utilsBundle := detectTestUtilsBundle{
|
||||
ShellMockRunner: &mock.ShellMockRunner{},
|
||||
|
@ -329,7 +329,7 @@ func verifyFFProjectCompliance(config fortifyExecuteScanOptions, utils fortifyUt
|
||||
log.Entry().Debugf("%v, %v, %v, %v, %v, %v", config.CreateResultIssue, numberOfViolations > 0, len(config.GithubToken) > 0, len(config.GithubAPIURL) > 0, len(config.Owner) > 0, len(config.Repository) > 0)
|
||||
if config.CreateResultIssue && numberOfViolations > 0 && len(config.GithubToken) > 0 && len(config.GithubAPIURL) > 0 && len(config.Owner) > 0 && len(config.Repository) > 0 {
|
||||
log.Entry().Debug("Creating/updating GitHub issue with scan results")
|
||||
err = reporting.UploadSingleReportToGithub(scanReport, config.GithubToken, config.GithubAPIURL, config.Owner, config.Repository, "Fortify SAST Results", config.Assignees, utils)
|
||||
err = reporting.UploadSingleReportToGithub(scanReport, config.GithubToken, config.GithubAPIURL, config.Owner, config.Repository, config.Assignees, utils)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to upload scan results into GitHub"), reports
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ import (
|
||||
"time"
|
||||
|
||||
piperDocker "github.com/SAP/jenkins-library/pkg/docker"
|
||||
piperGithub "github.com/SAP/jenkins-library/pkg/github"
|
||||
piperhttp "github.com/SAP/jenkins-library/pkg/http"
|
||||
ws "github.com/SAP/jenkins-library/pkg/whitesource"
|
||||
|
||||
@ -50,6 +51,8 @@ type whitesourceUtils interface {
|
||||
GetArtifactCoordinates(buildTool, buildDescriptorFile string,
|
||||
options *versioning.Options) (versioning.Coordinates, error)
|
||||
|
||||
CreateIssue(ghCreateIssueOptions *piperGithub.CreateIssueOptions) error
|
||||
|
||||
Now() time.Time
|
||||
}
|
||||
|
||||
@ -60,6 +63,11 @@ type whitesourceUtilsBundle struct {
|
||||
npmExecutor npm.Executor
|
||||
}
|
||||
|
||||
// CreateIssue supplies capability for GitHub issue creation
|
||||
func (w *whitesourceUtilsBundle) CreateIssue(ghCreateIssueOptions *piperGithub.CreateIssueOptions) error {
|
||||
return piperGithub.CreateIssue(ghCreateIssueOptions)
|
||||
}
|
||||
|
||||
func (w *whitesourceUtilsBundle) FileOpen(name string, flag int, perm os.FileMode) (ws.File, error) {
|
||||
return os.OpenFile(name, flag, perm)
|
||||
}
|
||||
@ -492,7 +500,7 @@ func checkPolicyViolations(config *ScanOptions, scan *ws.Scan, sys whitesource,
|
||||
|
||||
// create a json report to be used later, e.g. issue creation in GitHub
|
||||
ipReport := reporting.ScanReport{
|
||||
Title: "WhiteSource IP Report",
|
||||
ReportTitle: "WhiteSource IP Report",
|
||||
Subheaders: []reporting.Subheader{
|
||||
{Description: "WhiteSource product name", Details: config.ProductName},
|
||||
{Description: "Filtered project names", Details: strings.Join(scan.ScannedProjectNames(), ", ")},
|
||||
@ -563,7 +571,9 @@ func checkSecurityViolations(config *ScanOptions, scan *ws.Scan, sys whitesource
|
||||
|
||||
if config.CreateResultIssue && vulnerabilitiesCount > 0 && len(config.GithubToken) > 0 && len(config.GithubAPIURL) > 0 && len(config.Owner) > 0 && len(config.Repository) > 0 {
|
||||
log.Entry().Debugf("Creating result issues for %v alert(s)", vulnerabilitiesCount)
|
||||
err = ws.CreateGithubResultIssues(scan, &allAlerts, config.GithubToken, config.GithubAPIURL, config.Owner, config.Repository, config.Assignees, config.CustomTLSCertificateLinks)
|
||||
issueDetails := make([]reporting.IssueDetail, len(allAlerts))
|
||||
piperutils.CopyAtoB(allAlerts, issueDetails)
|
||||
err = reporting.UploadMultipleReportsToGithub(&issueDetails, config.GithubToken, config.GithubAPIURL, config.Owner, config.Repository, config.Assignees, config.CustomTLSCertificateLinks, utils)
|
||||
if err != nil {
|
||||
errorsOccured = append(errorsOccured, fmt.Sprint(err))
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
piperGithub "github.com/SAP/jenkins-library/pkg/github"
|
||||
"github.com/SAP/jenkins-library/pkg/mock"
|
||||
"github.com/SAP/jenkins-library/pkg/piperenv"
|
||||
"github.com/SAP/jenkins-library/pkg/piperutils"
|
||||
@ -31,6 +32,10 @@ func (w *whitesourceUtilsMock) GetArtifactCoordinates(buildTool, buildDescriptor
|
||||
return w.coordinates, nil
|
||||
}
|
||||
|
||||
func (w *whitesourceUtilsMock) CreateIssue(ghCreateIssueOptions *piperGithub.CreateIssueOptions) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
const wsTimeNow = "2010-05-10 00:15:42"
|
||||
|
||||
func (w *whitesourceUtilsMock) Now() time.Time {
|
||||
|
@ -14,6 +14,9 @@ import (
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// ReportsDirectory defines the subfolder for the Blackduck reports which are generated
|
||||
const ReportsDirectory = "blackduck"
|
||||
|
||||
const (
|
||||
HEADER_PROJECT_DETAILS_V4 = "application/vnd.blackducksoftware.project-detail-4+json"
|
||||
HEADER_USER_V4 = "application/vnd.blackducksoftware.user-4+json"
|
||||
@ -88,6 +91,50 @@ type VulnerabilityWithRemediation struct {
|
||||
OverallScore float32 `json:"overallScore,omitempty"`
|
||||
}
|
||||
|
||||
// Title returns the issue title representation of the contents
|
||||
func (v Vulnerability) Title() string {
|
||||
return fmt.Sprintf("%v/%v/%v-%v", "SECURITY_VULNERABILITY", v.VulnerabilityName, v.Name, v.Version)
|
||||
}
|
||||
|
||||
// ToMarkdown returns the markdown representation of the contents
|
||||
func (v Vulnerability) ToMarkdown() ([]byte, error) {
|
||||
return []byte(fmt.Sprintf(
|
||||
`**Vulnerability %v**
|
||||
| Severity | Package | Installed Version | Description | Fix Resolution | Link |
|
||||
| --- | --- | --- | --- | --- | --- |
|
||||
|%v|%v|%v|%v|%v|[%v](%v)|
|
||||
`,
|
||||
v.VulnerabilityWithRemediation.VulnerabilityName,
|
||||
v.VulnerabilityWithRemediation.Severity,
|
||||
v.Name,
|
||||
v.Version,
|
||||
v.Description,
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
)), nil
|
||||
}
|
||||
|
||||
// ToTxt returns the textual representation of the contents
|
||||
func (v Vulnerability) ToTxt() string {
|
||||
return fmt.Sprintf(`Vulnerability %v
|
||||
Severity: %v
|
||||
Package: %v
|
||||
Installed Version: %v
|
||||
Description: %v
|
||||
Fix Resolution: %v
|
||||
Link: [%v](%v)`,
|
||||
v.VulnerabilityName,
|
||||
v.Severity,
|
||||
v.Name,
|
||||
v.Version,
|
||||
v.Description,
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
)
|
||||
}
|
||||
|
||||
type PolicyStatus struct {
|
||||
OverallStatus string `json:"overallStatus,omitempty"`
|
||||
PolicyVersionDetails `json:"componentVersionPolicyViolationDetails,omitempty"`
|
||||
|
136
pkg/blackduck/reporting.go
Normal file
136
pkg/blackduck/reporting.go
Normal file
@ -0,0 +1,136 @@
|
||||
package blackduck
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/format"
|
||||
"github.com/SAP/jenkins-library/pkg/log"
|
||||
"github.com/SAP/jenkins-library/pkg/piperutils"
|
||||
"github.com/SAP/jenkins-library/pkg/reporting"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// CreateSarifResultFile creates a SARIF result from the Vulnerabilities that were brought up by the scan
|
||||
func CreateSarifResultFile(vulns *Vulnerabilities) *format.SARIF {
|
||||
//Now, we handle the sarif
|
||||
log.Entry().Debug("Creating SARIF file for data transfer")
|
||||
var sarif format.SARIF
|
||||
sarif.Schema = "https://docs.oasis-open.org/sarif/sarif/v2.1.0/cos01/schemas/sarif-schema-2.1.0.json"
|
||||
sarif.Version = "2.1.0"
|
||||
var wsRun format.Runs
|
||||
sarif.Runs = append(sarif.Runs, wsRun)
|
||||
|
||||
//handle the tool object
|
||||
tool := *new(format.Tool)
|
||||
tool.Driver = *new(format.Driver)
|
||||
tool.Driver.Name = "Blackduck Hub Detect"
|
||||
tool.Driver.Version = "unknown"
|
||||
tool.Driver.InformationUri = "https://community.synopsys.com/s/document-item?bundleId=integrations-detect&topicId=introduction.html&_LANG=enus"
|
||||
|
||||
// Handle results/vulnerabilities
|
||||
if vulns != nil && vulns.Items != nil {
|
||||
for i := 0; i < len(vulns.Items); i++ {
|
||||
v := vulns.Items[i]
|
||||
result := *new(format.Results)
|
||||
id := v.Title()
|
||||
log.Entry().Debugf("Transforming alert %v into SARIF format", id)
|
||||
result.RuleID = id
|
||||
result.Level = v.VulnerabilityWithRemediation.Severity
|
||||
result.RuleIndex = i //Seems very abstract
|
||||
result.Message = new(format.Message)
|
||||
result.Message.Text = v.VulnerabilityWithRemediation.Description
|
||||
result.AnalysisTarget = new(format.ArtifactLocation)
|
||||
result.AnalysisTarget.URI = v.Name
|
||||
result.AnalysisTarget.Index = 0
|
||||
location := format.Location{PhysicalLocation: format.PhysicalLocation{ArtifactLocation: format.ArtifactLocation{URI: v.Name}, Region: format.Region{}, LogicalLocations: []format.LogicalLocation{{FullyQualifiedName: ""}}}}
|
||||
result.Locations = append(result.Locations, location)
|
||||
|
||||
sarifRule := *new(format.SarifRule)
|
||||
sarifRule.ID = id
|
||||
sarifRule.ShortDescription = new(format.Message)
|
||||
sarifRule.ShortDescription.Text = fmt.Sprintf("%v Package %v", v.VulnerabilityName, v.Name)
|
||||
sarifRule.FullDescription = new(format.Message)
|
||||
sarifRule.FullDescription.Text = v.VulnerabilityWithRemediation.Description
|
||||
sarifRule.DefaultConfiguration = new(format.DefaultConfiguration)
|
||||
sarifRule.DefaultConfiguration.Level = v.Severity
|
||||
sarifRule.HelpURI = ""
|
||||
markdown, _ := v.ToMarkdown()
|
||||
sarifRule.Help = new(format.Help)
|
||||
sarifRule.Help.Text = v.ToTxt()
|
||||
sarifRule.Help.Markdown = string(markdown)
|
||||
|
||||
// Avoid empty descriptions to respect standard
|
||||
if sarifRule.ShortDescription.Text == "" {
|
||||
sarifRule.ShortDescription.Text = "None."
|
||||
}
|
||||
if sarifRule.FullDescription.Text == "" { // OR USE OMITEMPTY
|
||||
sarifRule.FullDescription.Text = "None."
|
||||
}
|
||||
|
||||
ruleProp := *new(format.SarifRuleProperties)
|
||||
ruleProp.Tags = append(ruleProp.Tags, "SECURITY_VULNERABILITY")
|
||||
ruleProp.Tags = append(ruleProp.Tags, v.VulnerabilityWithRemediation.Description)
|
||||
ruleProp.Tags = append(ruleProp.Tags, v.Name)
|
||||
ruleProp.Precision = "very-high"
|
||||
sarifRule.Properties = &ruleProp
|
||||
|
||||
//Finalize: append the result and the rule
|
||||
sarif.Runs[0].Results = append(sarif.Runs[0].Results, result)
|
||||
tool.Driver.Rules = append(tool.Driver.Rules, sarifRule)
|
||||
}
|
||||
}
|
||||
//Finalize: tool
|
||||
sarif.Runs[0].Tool = tool
|
||||
|
||||
return &sarif
|
||||
}
|
||||
|
||||
// WriteVulnerabilityReports writes vulnerability information from ScanReport into dedicated outputs e.g. HTML
|
||||
func WriteVulnerabilityReports(scanReport reporting.ScanReport, utils piperutils.FileUtils) ([]piperutils.Path, error) {
|
||||
reportPaths := []piperutils.Path{}
|
||||
|
||||
htmlReport, _ := scanReport.ToHTML()
|
||||
htmlReportPath := "piper_detect_vulnerability_report.html"
|
||||
if err := utils.FileWrite(htmlReportPath, htmlReport, 0666); err != nil {
|
||||
log.SetErrorCategory(log.ErrorConfiguration)
|
||||
return reportPaths, errors.Wrapf(err, "failed to write html report")
|
||||
}
|
||||
reportPaths = append(reportPaths, piperutils.Path{Name: "BlackDuck Vulnerability Report", Target: htmlReportPath})
|
||||
|
||||
jsonReport, _ := scanReport.ToJSON()
|
||||
if exists, _ := utils.DirExists(reporting.StepReportDirectory); !exists {
|
||||
err := utils.MkdirAll(reporting.StepReportDirectory, 0777)
|
||||
if err != nil {
|
||||
return reportPaths, errors.Wrap(err, "failed to create reporting directory")
|
||||
}
|
||||
}
|
||||
if err := utils.FileWrite(filepath.Join(reporting.StepReportDirectory, fmt.Sprintf("detectExecuteScan_oss_%v.json", fmt.Sprintf("%v", utils.CurrentTime("")))), jsonReport, 0666); err != nil {
|
||||
return reportPaths, errors.Wrapf(err, "failed to write json report")
|
||||
}
|
||||
|
||||
return reportPaths, nil
|
||||
}
|
||||
|
||||
// WriteSarifFile write a JSON sarif format file for upload into e.g. GCP
|
||||
func WriteSarifFile(sarif *format.SARIF, utils piperutils.FileUtils) ([]piperutils.Path, error) {
|
||||
reportPaths := []piperutils.Path{}
|
||||
|
||||
// ignore templating errors since template is in our hands and issues will be detected with the automated tests
|
||||
sarifReport, errorMarshall := json.Marshal(sarif)
|
||||
if errorMarshall != nil {
|
||||
return reportPaths, errors.Wrapf(errorMarshall, "failed to marshall SARIF json file")
|
||||
}
|
||||
if err := utils.MkdirAll(ReportsDirectory, 0777); err != nil {
|
||||
return reportPaths, errors.Wrapf(err, "failed to create report directory")
|
||||
}
|
||||
sarifReportPath := filepath.Join(ReportsDirectory, "piper_detect_vulnerability.sarif")
|
||||
if err := utils.FileWrite(sarifReportPath, sarifReport, 0666); err != nil {
|
||||
log.SetErrorCategory(log.ErrorConfiguration)
|
||||
return reportPaths, errors.Wrapf(err, "failed to write SARIF file")
|
||||
}
|
||||
reportPaths = append(reportPaths, piperutils.Path{Name: "Blackduck Detect Vulnerability SARIF file", Target: sarifReportPath})
|
||||
|
||||
return reportPaths, nil
|
||||
}
|
100
pkg/blackduck/reporting_test.go
Normal file
100
pkg/blackduck/reporting_test.go
Normal file
@ -0,0 +1,100 @@
|
||||
package blackduck
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/format"
|
||||
"github.com/SAP/jenkins-library/pkg/mock"
|
||||
"github.com/SAP/jenkins-library/pkg/piperutils"
|
||||
"github.com/SAP/jenkins-library/pkg/reporting"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestCreateSarifResultFile(t *testing.T) {
|
||||
alerts := []Vulnerability{
|
||||
{Name: "test1", Version: "1.2.3", VulnerabilityWithRemediation: VulnerabilityWithRemediation{VulnerabilityName: "CVE-45456543", Severity: "Critical", Description: "Some vulnerability that can be exploited by peeling the glue off.", BaseScore: 9.8, OverallScore: 10}},
|
||||
{Name: "test2", Version: "1.2.4", VulnerabilityWithRemediation: VulnerabilityWithRemediation{VulnerabilityName: "CVE-45456542", Severity: "Critical", Description: "Some other vulnerability that can be exploited by filling the glass.", BaseScore: 9, OverallScore: 9}},
|
||||
{Name: "test3", Version: "1.2.5", VulnerabilityWithRemediation: VulnerabilityWithRemediation{VulnerabilityName: "CVE-45456541", Severity: "Medium", Description: "Some vulnerability that can be exploited by turning it upside down.", BaseScore: 6.5, OverallScore: 7}},
|
||||
}
|
||||
vulns := Vulnerabilities{
|
||||
Items: alerts,
|
||||
}
|
||||
|
||||
sarif := CreateSarifResultFile(&vulns)
|
||||
|
||||
assert.Equal(t, "https://docs.oasis-open.org/sarif/sarif/v2.1.0/cos01/schemas/sarif-schema-2.1.0.json", sarif.Schema)
|
||||
assert.Equal(t, "2.1.0", sarif.Version)
|
||||
assert.Equal(t, 1, len(sarif.Runs))
|
||||
assert.Equal(t, "Blackduck Hub Detect", sarif.Runs[0].Tool.Driver.Name)
|
||||
assert.Equal(t, "unknown", sarif.Runs[0].Tool.Driver.Version)
|
||||
assert.Equal(t, 3, len(sarif.Runs[0].Tool.Driver.Rules))
|
||||
assert.Equal(t, 3, len(sarif.Runs[0].Results))
|
||||
// TODO add more extensive verification once we agree on the format details
|
||||
}
|
||||
|
||||
func TestWriteCustomVulnerabilityReports(t *testing.T) {
|
||||
|
||||
t.Run("success", func(t *testing.T) {
|
||||
scanReport := reporting.ScanReport{}
|
||||
utilsMock := &mock.FilesMock{}
|
||||
|
||||
reportPaths, err := WriteVulnerabilityReports(scanReport, utilsMock)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1, len(reportPaths))
|
||||
|
||||
exists, err := utilsMock.FileExists(reportPaths[0].Target)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, exists)
|
||||
|
||||
exists, err = utilsMock.FileExists(filepath.Join(reporting.StepReportDirectory, "detectExecuteScan_oss_20220102-150405.json"))
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, exists)
|
||||
})
|
||||
|
||||
t.Run("failed to write json report", func(t *testing.T) {
|
||||
scanReport := reporting.ScanReport{}
|
||||
utilsMock := &mock.FilesMock{}
|
||||
utilsMock.FileWriteErrors = map[string]error{
|
||||
filepath.Join(reporting.StepReportDirectory, "detectExecuteScan_oss_20220102-150405.json"): fmt.Errorf("write error"),
|
||||
}
|
||||
|
||||
_, err := WriteVulnerabilityReports(scanReport, utilsMock)
|
||||
assert.Contains(t, fmt.Sprint(err), "failed to write json report")
|
||||
})
|
||||
}
|
||||
|
||||
func TestWriteSarifFile(t *testing.T) {
|
||||
|
||||
t.Run("success", func(t *testing.T) {
|
||||
sarif := format.SARIF{}
|
||||
var utilsMock piperutils.FileUtils
|
||||
utilsMock = &mock.FilesMock{}
|
||||
|
||||
reportPaths, err := WriteSarifFile(&sarif, utilsMock)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1, len(reportPaths))
|
||||
|
||||
exists, err := utilsMock.FileExists(reportPaths[0].Target)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, exists)
|
||||
|
||||
exists, err = utilsMock.FileExists(filepath.Join(ReportsDirectory, "piper_detect_vulnerability.sarif"))
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, exists)
|
||||
})
|
||||
|
||||
t.Run("failed to write HTML report", func(t *testing.T) {
|
||||
sarif := format.SARIF{}
|
||||
utilsMock := &mock.FilesMock{}
|
||||
utilsMock.FileWriteErrors = map[string]error{
|
||||
filepath.Join(ReportsDirectory, "piper_detect_vulnerability.sarif"): fmt.Errorf("write error"),
|
||||
}
|
||||
|
||||
_, err := WriteSarifFile(&sarif, utilsMock)
|
||||
assert.Contains(t, fmt.Sprint(err), "failed to write SARIF file")
|
||||
})
|
||||
}
|
@ -41,7 +41,7 @@ func CreateCustomReport(data map[string]interface{}, insecure, neutral []string)
|
||||
deepLink := fmt.Sprintf(`<a href="%v" target="_blank">Link to scan in CX UI</a>`, data["DeepLink"])
|
||||
|
||||
scanReport := reporting.ScanReport{
|
||||
Title: "Checkmarx SAST Report",
|
||||
ReportTitle: "Checkmarx SAST Report",
|
||||
Subheaders: []reporting.Subheader{
|
||||
{Description: "Project name", Details: fmt.Sprint(data["ProjectName"])},
|
||||
{Description: "Project ID", Details: fmt.Sprint(data["ProjectID"])},
|
||||
|
@ -50,7 +50,7 @@ type SpotChecksAuditCount struct {
|
||||
func CreateCustomReport(data FortifyReportData, issueGroups []*models.ProjectVersionIssueGroup) reporting.ScanReport {
|
||||
|
||||
scanReport := reporting.ScanReport{
|
||||
Title: "Fortify SAST Report",
|
||||
ReportTitle: "Fortify SAST Report",
|
||||
Subheaders: []reporting.Subheader{
|
||||
{Description: "Fortify project name", Details: data.ProjectName},
|
||||
{Description: "Fortify project version", Details: data.ProjectVersion},
|
||||
|
@ -4,6 +4,7 @@
|
||||
package mock
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
@ -114,6 +115,17 @@ func (f *FilesMock) AddDirWithMode(path string, mode os.FileMode) {
|
||||
f.associateContent(path, &dirContent, mode)
|
||||
}
|
||||
|
||||
// SHA256 returns a random SHA256
|
||||
func (f *FilesMock) SHA256(path string) (string, error) {
|
||||
hash := sha256.New()
|
||||
return fmt.Sprintf("%x", string(hash.Sum(nil))), nil
|
||||
}
|
||||
|
||||
// CurrentTime returns the current time as a fixed value
|
||||
func (f *FilesMock) CurrentTime(format string) string {
|
||||
return "20220102-150405"
|
||||
}
|
||||
|
||||
func (f *FilesMock) associateContent(path string, content *[]byte, mode os.FileMode) {
|
||||
f.init()
|
||||
path = f.toAbsPath(path)
|
||||
|
@ -12,6 +12,7 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/bmatcuk/doublestar"
|
||||
)
|
||||
@ -35,6 +36,8 @@ type FileUtils interface {
|
||||
FileRename(string, string) error
|
||||
Getwd() (string, error)
|
||||
Symlink(oldname string, newname string) error
|
||||
SHA256(path string) (string, error)
|
||||
CurrentTime(format string) string
|
||||
}
|
||||
|
||||
// Files ...
|
||||
@ -414,7 +417,7 @@ func (f Files) Symlink(oldname, newname string) error {
|
||||
return os.Symlink(oldname, newname)
|
||||
}
|
||||
|
||||
// Computes a SHA256 for a given file
|
||||
// SHA256 computes a SHA256 for a given file
|
||||
func (f Files) SHA256(path string) (string, error) {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
@ -430,3 +433,12 @@ func (f Files) SHA256(path string) (string, error) {
|
||||
|
||||
return fmt.Sprintf("%x", string(hash.Sum(nil))), nil
|
||||
}
|
||||
|
||||
// CurrentTime returns the current time in the specified format
|
||||
func (f Files) CurrentTime(format string) string {
|
||||
fString := format
|
||||
if len(format) == 0 {
|
||||
fString = "20060102-150405"
|
||||
}
|
||||
return fmt.Sprint(time.Now().Format(fString))
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
package piperutils
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
@ -115,3 +116,25 @@ func UniqueStrings(values []string) []string {
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
// CopyAtoB copies the contents of a into slice b given that they are of equal size and compatible type
|
||||
func CopyAtoB(a, b interface{}) {
|
||||
src := reflect.ValueOf(a)
|
||||
tgt := reflect.ValueOf(b)
|
||||
if src.Kind() != reflect.Slice || tgt.Kind() != reflect.Slice {
|
||||
panic("CopyAtoB() given a non-slice type")
|
||||
}
|
||||
|
||||
if src.Len() != tgt.Len() {
|
||||
panic("CopyAtoB() given non equal sized slices")
|
||||
}
|
||||
|
||||
// Keep the distinction between nil and empty slice input
|
||||
if src.IsNil() {
|
||||
return
|
||||
}
|
||||
|
||||
for i := 0; i < src.Len(); i++ {
|
||||
tgt.Index(i).Set(src.Index(i))
|
||||
}
|
||||
}
|
||||
|
@ -153,3 +153,12 @@ func TestUniqueStrings(t *testing.T) {
|
||||
assert.Subset(t, []string{"123", "abc", "xyz"}, unique)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCopyAtoB(t *testing.T) {
|
||||
src := []string{"abc", "xyz", "123", "abc"}
|
||||
target := make([]string, 4)
|
||||
CopyAtoB(src, target)
|
||||
if assert.Len(t, target, 4) {
|
||||
assert.EqualValues(t, src, target)
|
||||
}
|
||||
}
|
||||
|
@ -59,7 +59,7 @@ func writeJSON(path, name string, data interface{}, writeToFile func(f string, d
|
||||
|
||||
func CreateCustomReport(productName string, productID int, data map[string]int, vulns []Vuln) reporting.ScanReport {
|
||||
scanReport := reporting.ScanReport{
|
||||
Title: "Protecode Vulnerability Report",
|
||||
ReportTitle: "Protecode Vulnerability Report",
|
||||
Subheaders: []reporting.Subheader{
|
||||
{Description: "Product name", Details: productName},
|
||||
{Description: "Product ID", Details: fmt.Sprint(productID)},
|
||||
|
@ -4,13 +4,15 @@ import (
|
||||
"fmt"
|
||||
|
||||
piperGithub "github.com/SAP/jenkins-library/pkg/github"
|
||||
"github.com/SAP/jenkins-library/pkg/log"
|
||||
)
|
||||
|
||||
type Uploader interface {
|
||||
CreateIssue(ghCreateIssueOptions *piperGithub.CreateIssueOptions) error
|
||||
}
|
||||
|
||||
func UploadSingleReportToGithub(scanReport ScanReport, token, APIURL, owner, repository, title string, assignees []string, uploader Uploader) error {
|
||||
// UploadSingleReportToGithub uploads a single report to GitHub
|
||||
func UploadSingleReportToGithub(scanReport IssueDetail, token, APIURL, owner, repository string, assignees []string, uploader Uploader) error {
|
||||
// JSON reports are used by step pipelineCreateSummary in order to e.g. prepare an issue creation in GitHub
|
||||
// ignore JSON errors since structure is in our hands
|
||||
markdownReport, _ := scanReport.ToMarkdown()
|
||||
@ -19,14 +21,42 @@ func UploadSingleReportToGithub(scanReport ScanReport, token, APIURL, owner, rep
|
||||
APIURL: APIURL,
|
||||
Owner: owner,
|
||||
Repository: repository,
|
||||
Title: title,
|
||||
Title: scanReport.Title(),
|
||||
Body: markdownReport,
|
||||
Assignees: assignees,
|
||||
UpdateExisting: true,
|
||||
}
|
||||
err := uploader.CreateIssue(&options)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to upload results for '%v' into GitHub issue: %w", title, err)
|
||||
return fmt.Errorf("failed to upload results for '%v' into GitHub issue: %w", scanReport.Title(), err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// UploadMultipleReportsToGithub uploads a number of reports to GitHub, one per IssueDetail to create transparency
|
||||
func UploadMultipleReportsToGithub(scanReports *[]IssueDetail, token, APIURL, owner, repository string, assignees, trustedCerts []string, uploader Uploader) error {
|
||||
for i := 0; i < len(*scanReports); i++ {
|
||||
vuln := (*scanReports)[i]
|
||||
title := vuln.Title()
|
||||
markdownReport, _ := vuln.ToMarkdown()
|
||||
options := piperGithub.CreateIssueOptions{
|
||||
Token: token,
|
||||
APIURL: APIURL,
|
||||
Owner: owner,
|
||||
Repository: repository,
|
||||
Title: title,
|
||||
Body: markdownReport,
|
||||
Assignees: assignees,
|
||||
UpdateExisting: true,
|
||||
TrustedCerts: trustedCerts,
|
||||
}
|
||||
|
||||
log.Entry().Debugf("Creating/updating GitHub issue(s) with title %v in org %v and repo %v", title, owner, repository)
|
||||
err := uploader.CreateIssue(&options)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to upload results for '%v' into GitHub issue: %w", vuln.Title(), err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -18,6 +18,38 @@ func (m *mockUploader) CreateIssue(ghCreateIssueOptions *piperGithub.CreateIssue
|
||||
return m.uploadError
|
||||
}
|
||||
|
||||
type issueDetailMock struct {
|
||||
vulnerabilityType string
|
||||
vulnerabilityName string
|
||||
libraryName string
|
||||
vulnerabilitySeverity string
|
||||
vulnerabilityScore float64
|
||||
vulnerabilityCVSS3Score float64
|
||||
}
|
||||
|
||||
func (idm issueDetailMock) Title() string {
|
||||
return fmt.Sprintf("%v/%v/%v", idm.vulnerabilityType, idm.vulnerabilityName, idm.libraryName)
|
||||
}
|
||||
|
||||
func (idm issueDetailMock) ToMarkdown() ([]byte, error) {
|
||||
return []byte(fmt.Sprintf(`**Vulnerability %v**
|
||||
| Severity | Package | Installed Version | Description | Fix Resolution | Link |
|
||||
| --- | --- | --- | --- | --- | --- |
|
||||
|%v|%v|%v|%v|%v|[%v](%v)|
|
||||
`, idm.vulnerabilityName, idm.vulnerabilitySeverity, idm.libraryName, "", "", "", "", "")), nil
|
||||
}
|
||||
|
||||
func (idm issueDetailMock) ToTxt() string {
|
||||
return fmt.Sprintf(`Vulnerability %v
|
||||
Severity: %v
|
||||
Package: %v
|
||||
Installed Version: %v
|
||||
Description: %v
|
||||
Fix Resolution: %v
|
||||
Link: %v
|
||||
`, idm.vulnerabilityName, idm.vulnerabilitySeverity, idm.libraryName, "", "", "", "")
|
||||
}
|
||||
|
||||
func TestUploadSingleReportToGithub(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
@ -30,21 +62,19 @@ func TestUploadSingleReportToGithub(t *testing.T) {
|
||||
apiurl string
|
||||
owner string
|
||||
repository string
|
||||
title string
|
||||
assignees []string
|
||||
uploader Uploader
|
||||
}{
|
||||
scanReport: ScanReport{Title: "testReportTitle"},
|
||||
scanReport: ScanReport{ReportTitle: "testReportTitle"},
|
||||
token: "testToken",
|
||||
apiurl: "testApiUrl",
|
||||
owner: "testOwner",
|
||||
repository: "testRepository",
|
||||
title: "testTitle",
|
||||
assignees: []string{"testAssignee1", "testAssignee2"},
|
||||
uploader: &testUploader,
|
||||
}
|
||||
|
||||
err := UploadSingleReportToGithub(testData.scanReport, testData.token, testData.apiurl, testData.owner, testData.repository, testData.title, testData.assignees, testData.uploader)
|
||||
err := UploadSingleReportToGithub(testData.scanReport, testData.token, testData.apiurl, testData.owner, testData.repository, testData.assignees, testData.uploader)
|
||||
|
||||
assert.NoError(t, err)
|
||||
|
||||
@ -52,7 +82,7 @@ func TestUploadSingleReportToGithub(t *testing.T) {
|
||||
assert.Equal(t, testData.apiurl, testUploader.issueOptions.APIURL)
|
||||
assert.Equal(t, testData.owner, testUploader.issueOptions.Owner)
|
||||
assert.Equal(t, testData.repository, testUploader.issueOptions.Repository)
|
||||
assert.Equal(t, testData.title, testUploader.issueOptions.Title)
|
||||
assert.Equal(t, testData.scanReport.ReportTitle, testUploader.issueOptions.Title)
|
||||
assert.Contains(t, string(testUploader.issueOptions.Body), "testReportTitle")
|
||||
assert.Equal(t, testData.assignees, testUploader.issueOptions.Assignees)
|
||||
assert.True(t, testUploader.issueOptions.UpdateExisting)
|
||||
@ -61,7 +91,57 @@ func TestUploadSingleReportToGithub(t *testing.T) {
|
||||
t.Run("error case", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
testUploader := mockUploader{uploadError: fmt.Errorf("upload failed")}
|
||||
err := UploadSingleReportToGithub(ScanReport{}, "", "", "", "", "", []string{}, &testUploader)
|
||||
var report IssueDetail
|
||||
report = ScanReport{}
|
||||
err := UploadSingleReportToGithub(report, "", "", "", "", []string{}, &testUploader)
|
||||
|
||||
assert.Contains(t, fmt.Sprint(err), "upload failed")
|
||||
})
|
||||
}
|
||||
|
||||
func TestUploadMultipleReportsToGithub(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("success case", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
testUploader := mockUploader{}
|
||||
testData := struct {
|
||||
reports []IssueDetail
|
||||
token string
|
||||
apiurl string
|
||||
owner string
|
||||
repository string
|
||||
assignees []string
|
||||
uploader Uploader
|
||||
}{
|
||||
reports: []IssueDetail{issueDetailMock{vulnerabilityType: "SECURITY_VULNERABILITY", libraryName: "test-component", vulnerabilityName: "CVE-2022001", vulnerabilitySeverity: "MEDIUM", vulnerabilityScore: 5.3}},
|
||||
token: "testToken",
|
||||
apiurl: "testApiUrl",
|
||||
owner: "testOwner",
|
||||
repository: "testRepository",
|
||||
assignees: []string{"testAssignee1", "testAssignee2"},
|
||||
uploader: &testUploader,
|
||||
}
|
||||
|
||||
err := UploadMultipleReportsToGithub(&testData.reports, testData.token, testData.apiurl, testData.owner, testData.repository, testData.assignees, []string{}, testData.uploader)
|
||||
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, testData.token, testUploader.issueOptions.Token)
|
||||
assert.Equal(t, testData.apiurl, testUploader.issueOptions.APIURL)
|
||||
assert.Equal(t, testData.owner, testUploader.issueOptions.Owner)
|
||||
assert.Equal(t, testData.repository, testUploader.issueOptions.Repository)
|
||||
assert.Equal(t, testData.reports[0].Title(), testUploader.issueOptions.Title)
|
||||
assert.Contains(t, string(testUploader.issueOptions.Body), "CVE-2022001")
|
||||
assert.Equal(t, testData.assignees, testUploader.issueOptions.Assignees)
|
||||
assert.True(t, testUploader.issueOptions.UpdateExisting)
|
||||
})
|
||||
|
||||
t.Run("error case", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
testUploader := mockUploader{uploadError: fmt.Errorf("upload failed")}
|
||||
reports := []IssueDetail{issueDetailMock{vulnerabilityType: "SECURITY_VULNERABILITY", libraryName: "test-component", vulnerabilityName: "CVE-2022001", vulnerabilitySeverity: "MEDIUM", vulnerabilityScore: 5.3}}
|
||||
err := UploadMultipleReportsToGithub(&reports, "", "", "", "", []string{}, []string{}, &testUploader)
|
||||
|
||||
assert.Contains(t, fmt.Sprint(err), "upload failed")
|
||||
})
|
||||
|
@ -10,10 +10,17 @@ import (
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// IssueDetail represents any content that can be transformed into the body of a GitHub issue
|
||||
type IssueDetail interface {
|
||||
Title() string
|
||||
ToMarkdown() ([]byte, error)
|
||||
ToTxt() string
|
||||
}
|
||||
|
||||
// ScanReport defines the elements of a scan report used by various scan steps
|
||||
type ScanReport struct {
|
||||
StepName string `json:"stepName"`
|
||||
Title string `json:"title"`
|
||||
ReportTitle string `json:"title"`
|
||||
Subheaders []Subheader `json:"subheaders"`
|
||||
Overview []OverviewRow `json:"overview"`
|
||||
FurtherInfo string `json:"furtherInfo"`
|
||||
@ -93,6 +100,12 @@ func (s *ScanReport) ToJSON() ([]byte, error) {
|
||||
return json.Marshal(s)
|
||||
}
|
||||
|
||||
// ToTxt up to now returns the report in JSON format
|
||||
func (s ScanReport) ToTxt() string {
|
||||
txt, _ := s.ToJSON()
|
||||
return string(txt)
|
||||
}
|
||||
|
||||
const reportHTMLTemplate = `<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
@ -266,8 +279,13 @@ Snapshot taken: <i>{{reportTime .ReportTime}}</i>
|
||||
|
||||
`
|
||||
|
||||
// Title returns the title of the report
|
||||
func (s ScanReport) Title() string {
|
||||
return s.ReportTitle
|
||||
}
|
||||
|
||||
// ToMarkdown creates a markdown version of the report content
|
||||
func (s *ScanReport) ToMarkdown() ([]byte, error) {
|
||||
func (s ScanReport) ToMarkdown() ([]byte, error) {
|
||||
funcMap := template.FuncMap{
|
||||
"columnCount": tableColumnCount,
|
||||
"drawCell": drawCell,
|
||||
|
@ -10,8 +10,8 @@ import (
|
||||
func TestToHTML(t *testing.T) {
|
||||
t.Run("empty table", func(t *testing.T) {
|
||||
report := ScanReport{
|
||||
Title: "Report Test Title",
|
||||
Subheaders: []Subheader{{Description: "sub 1", Details: "1"}, {Description: "sub 2", Details: "2"}},
|
||||
ReportTitle: "Report Test Title",
|
||||
Subheaders: []Subheader{{Description: "sub 1", Details: "1"}, {Description: "sub 2", Details: "2"}},
|
||||
Overview: []OverviewRow{
|
||||
{"overview 1", "1", Green},
|
||||
{"overview 2", "2", Green},
|
||||
@ -52,8 +52,8 @@ func TestToHTML(t *testing.T) {
|
||||
|
||||
t.Run("table with content", func(t *testing.T) {
|
||||
report := ScanReport{
|
||||
Title: "Report Test Title",
|
||||
ReportTime: time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||
ReportTitle: "Report Test Title",
|
||||
ReportTime: time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||
DetailTable: ScanDetailTable{
|
||||
Headers: []string{"column 1", "column 2"},
|
||||
Rows: []ScanRow{
|
||||
@ -84,8 +84,8 @@ func TestToHTML(t *testing.T) {
|
||||
func TestToMarkdown(t *testing.T) {
|
||||
t.Run("table with details", func(t *testing.T) {
|
||||
report := ScanReport{
|
||||
Title: "Report Test Title",
|
||||
Subheaders: []Subheader{{Description: "sub 1", Details: "1"}, {Description: "sub 2", Details: "2"}},
|
||||
ReportTitle: "Report Test Title",
|
||||
Subheaders: []Subheader{{Description: "sub 1", Details: "1"}, {Description: "sub 2", Details: "2"}},
|
||||
Overview: []OverviewRow{
|
||||
{"overview 1", "1", Green},
|
||||
{"overview 2", "2", Green},
|
||||
@ -120,8 +120,8 @@ func TestToMarkdown(t *testing.T) {
|
||||
|
||||
t.Run("table without details", func(t *testing.T) {
|
||||
report := ScanReport{
|
||||
Title: "Report Test Title",
|
||||
Subheaders: []Subheader{{Description: "sub 1", Details: "1"}, {Description: "sub 2", Details: "2"}},
|
||||
ReportTitle: "Report Test Title",
|
||||
Subheaders: []Subheader{{Description: "sub 1", Details: "1"}, {Description: "sub 2", Details: "2"}},
|
||||
Overview: []OverviewRow{
|
||||
{"overview 1", "1", Green},
|
||||
{"overview 2", "2", Green},
|
||||
|
@ -10,7 +10,6 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/format"
|
||||
piperGithub "github.com/SAP/jenkins-library/pkg/github"
|
||||
"github.com/SAP/jenkins-library/pkg/log"
|
||||
"github.com/SAP/jenkins-library/pkg/piperutils"
|
||||
"github.com/SAP/jenkins-library/pkg/reporting"
|
||||
@ -29,7 +28,7 @@ func CreateCustomVulnerabilityReport(productName string, scan *Scan, alerts *[]A
|
||||
projectNames := scan.ScannedProjectNames()
|
||||
|
||||
scanReport := reporting.ScanReport{
|
||||
Title: "WhiteSource Security Vulnerability Report",
|
||||
ReportTitle: "WhiteSource Security Vulnerability Report",
|
||||
Subheaders: []reporting.Subheader{
|
||||
{Description: "WhiteSource product name", Details: productName},
|
||||
{Description: "Filtered project names", Details: strings.Join(projectNames, ", ")},
|
||||
@ -220,10 +219,10 @@ func CreateSarifResultFile(scan *Scan, alerts *[]Alert) *format.SARIF {
|
||||
defaultConfig.Level = alert.Level
|
||||
sarifRule.DefaultConfiguration = defaultConfig
|
||||
sarifRule.HelpURI = alert.Vulnerability.URL
|
||||
help := new(format.Help)
|
||||
help.Text = fmt.Sprintf("Vulnerability %v\nSeverity: %v\nPackage: %v\nInstalled Version: %v\nFix Resolution: %v\nLink: [%v](%v)", alert.Vulnerability.Name, alert.Vulnerability.Severity, alert.Library.ArtifactID, alert.Library.Version, alert.Vulnerability.TopFix.FixResolution, alert.Vulnerability.Name, alert.Vulnerability.URL)
|
||||
help.Markdown = alert.ToMarkdown()
|
||||
sarifRule.Help = help
|
||||
markdown, _ := alert.ToMarkdown()
|
||||
sarifRule.Help = new(format.Help)
|
||||
sarifRule.Help.Text = alert.ToTxt()
|
||||
sarifRule.Help.Markdown = string(markdown)
|
||||
|
||||
// Avoid empty descriptions to respect standard
|
||||
if sarifRule.ShortDescription.Text == "" {
|
||||
@ -250,7 +249,7 @@ func CreateSarifResultFile(scan *Scan, alerts *[]Alert) *format.SARIF {
|
||||
return &sarif
|
||||
}
|
||||
|
||||
// WriteSarifFile write a JSON sarif format file for upload into Cumulus
|
||||
// WriteSarifFile write a JSON sarif format file for upload into e.g. GCP
|
||||
func WriteSarifFile(sarif *format.SARIF, utils piperutils.FileUtils) ([]piperutils.Path, error) {
|
||||
reportPaths := []piperutils.Path{}
|
||||
|
||||
@ -271,31 +270,3 @@ func WriteSarifFile(sarif *format.SARIF, utils piperutils.FileUtils) ([]piperuti
|
||||
|
||||
return reportPaths, nil
|
||||
}
|
||||
|
||||
// CreateGithubResultIssues creates a number of GitHub issues, one per Alert to create transparency on the findings
|
||||
func CreateGithubResultIssues(scan *Scan, alerts *[]Alert, token, APIURL, owner, repository string, assignees, trustedCerts []string) error {
|
||||
for i := 0; i < len(*alerts); i++ {
|
||||
alert := (*alerts)[i]
|
||||
title := fmt.Sprintf("%v/%v/%v", alert.Type, alert.Vulnerability.Name, alert.Library.ArtifactID)
|
||||
markdownReport := alert.ToMarkdown()
|
||||
options := piperGithub.CreateIssueOptions{
|
||||
Token: token,
|
||||
APIURL: APIURL,
|
||||
Owner: owner,
|
||||
Repository: repository,
|
||||
Title: title,
|
||||
Body: []byte(markdownReport),
|
||||
Assignees: assignees,
|
||||
UpdateExisting: true,
|
||||
TrustedCerts: trustedCerts,
|
||||
}
|
||||
|
||||
log.Entry().Debugf("Creating/updating GitHub issue(s) with title %v in org %v and repo %v", title, owner, repository)
|
||||
err := piperGithub.CreateIssue(&options)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "Failed to upload WhiteSource result for %v into GitHub issue", alert.Vulnerability.Name)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -30,7 +30,7 @@ func TestCreateCustomVulnerabilityReport(t *testing.T) {
|
||||
|
||||
scanReport := CreateCustomVulnerabilityReport(config.ProductName, scan, &alerts, 7.0)
|
||||
|
||||
assert.Equal(t, "WhiteSource Security Vulnerability Report", scanReport.Title)
|
||||
assert.Equal(t, "WhiteSource Security Vulnerability Report", scanReport.Title())
|
||||
assert.Equal(t, 3, len(scanReport.DetailTable.Rows))
|
||||
|
||||
// assert that library info is filled and sorting has been executed
|
||||
|
@ -55,9 +55,48 @@ type Alert struct {
|
||||
Status string `json:"status,omitempty"`
|
||||
}
|
||||
|
||||
// Title returns the issue title representation of the contents
|
||||
func (a Alert) Title() string {
|
||||
return fmt.Sprintf("%v/%v/%v", a.Type, a.Vulnerability.Name, a.Library.ArtifactID)
|
||||
}
|
||||
|
||||
// ToMarkdown returns the markdown representation of the contents
|
||||
func (a *Alert) ToMarkdown() string {
|
||||
return fmt.Sprintf("**Vulnerability %v**\n| Severity | Package | Installed Version | Fix Resolution | Link |\n| --- | --- | --- | --- | --- |\n|%v|%v|%v|%v|[%v](%v)|\n", a.Vulnerability.Name, a.Vulnerability.Severity, a.Library.ArtifactID, a.Library.Version, a.Vulnerability.TopFix.FixResolution, a.Vulnerability.Name, a.Vulnerability.URL)
|
||||
func (a Alert) ToMarkdown() ([]byte, error) {
|
||||
return []byte(fmt.Sprintf(
|
||||
`**Vulnerability %v**
|
||||
| Severity | Package | Installed Version | Description | Fix Resolution | Link |
|
||||
| --- | --- | --- | --- | --- | --- |
|
||||
|%v|%v|%v|%v|%v|[%v](%v)|
|
||||
`,
|
||||
a.Vulnerability.Name,
|
||||
a.Vulnerability.Severity,
|
||||
a.Library.ArtifactID,
|
||||
a.Library.Version,
|
||||
a.Vulnerability.Description,
|
||||
a.Vulnerability.TopFix.FixResolution,
|
||||
a.Vulnerability.Name,
|
||||
a.Vulnerability.URL,
|
||||
)), nil
|
||||
}
|
||||
|
||||
// ToTxt returns the textual representation of the contents
|
||||
func (a Alert) ToTxt() string {
|
||||
return fmt.Sprintf(`Vulnerability %v
|
||||
Severity: %v
|
||||
Package: %v
|
||||
Installed Version: %v
|
||||
Description: %v
|
||||
Fix Resolution: %v
|
||||
Link: [%v](%v)`,
|
||||
a.Vulnerability.Name,
|
||||
a.Vulnerability.Severity,
|
||||
a.Library.ArtifactID,
|
||||
a.Library.Version,
|
||||
a.Vulnerability.Description,
|
||||
a.Vulnerability.TopFix.FixResolution,
|
||||
a.Vulnerability.Name,
|
||||
a.Vulnerability.URL,
|
||||
)
|
||||
}
|
||||
|
||||
// Library
|
||||
|
@ -304,6 +304,86 @@ spec:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
- name: githubToken
|
||||
description: "GitHub personal access token as per
|
||||
https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line"
|
||||
scope:
|
||||
- GENERAL
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
type: string
|
||||
secret: true
|
||||
aliases:
|
||||
- name: access_token
|
||||
resourceRef:
|
||||
- name: githubTokenCredentialsId
|
||||
type: secret
|
||||
- type: vaultSecret
|
||||
default: github
|
||||
name: githubVaultSecretName
|
||||
- name: createResultIssue
|
||||
type: bool
|
||||
description: "Whether the step creates a GitHub issue containing the scan results in the originating repo.
|
||||
Since optimized pipelines are headless the creation is implicitly activated for scheduled runs."
|
||||
resourceRef:
|
||||
- name: commonPipelineEnvironment
|
||||
param: custom/optimizedAndScheduled
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: false
|
||||
- name: githubApiUrl
|
||||
description: "Set the GitHub API URL."
|
||||
scope:
|
||||
- GENERAL
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
type: string
|
||||
default: "https://api.github.com"
|
||||
- name: owner
|
||||
aliases:
|
||||
- name: githubOrg
|
||||
description: "Set the GitHub organization."
|
||||
resourceRef:
|
||||
- name: commonPipelineEnvironment
|
||||
param: github/owner
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
type: string
|
||||
- name: repository
|
||||
aliases:
|
||||
- name: githubRepo
|
||||
description: "Set the GitHub repository."
|
||||
resourceRef:
|
||||
- name: commonPipelineEnvironment
|
||||
param: github/repository
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
type: string
|
||||
- name: assignees
|
||||
description: Defines the assignees for the Github Issue created/updated with the results of the scan as a list of login names.
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
type: "[]string"
|
||||
default: []
|
||||
mandatory: false
|
||||
- name: customTlsCertificateLinks
|
||||
type: "[]string"
|
||||
description: "List of download links to custom TLS certificates. This is required to ensure trusted connections to instances with repositories (like nexus) when publish flag is set to true."
|
||||
scope:
|
||||
- GENERAL
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
outputs:
|
||||
resources:
|
||||
- name: influx
|
||||
|
@ -11,7 +11,8 @@ void call(Map parameters = [:]) {
|
||||
final script = checkScript(this, parameters) ?: this
|
||||
parameters = DownloadCacheUtils.injectDownloadCacheInParameters(script, parameters, BuildTool.MAVEN)
|
||||
List credentials = [
|
||||
[type: 'token', id: 'detectTokenCredentialsId', env: ['PIPER_token']]
|
||||
[type: 'token', id: 'detectTokenCredentialsId', env: ['PIPER_token']],
|
||||
[type: 'token', id: 'githubTokenCredentialsId', env: ['PIPER_githubToken']]
|
||||
]
|
||||
piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials)
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user