mirror of
https://github.com/SAP/jenkins-library.git
synced 2024-12-12 10:55:20 +02:00
feat(whitesource): consolidated reporting and versioning alignment (#2571)
* update reporting and add todo comments * enhance reporting, allow directory creation for reports * properly pass reports * update templating and increase verbosity of errors * add todo * add detail table * update sorting * add test and improve error message * fix error message in test * extend tests * enhance tests * enhance versioning behavior accoring to #1846 * create markdown overview report * small fix * fix small issue * make sure that report directory exists * align reporting directory with default directory from UA * add missing comments * add policy check incl. tests * enhance logging and tests * update versioning to allow custom version usage properly * fix report paths and golang image * update styling of md * update test
This commit is contained in:
parent
e9a93ed384
commit
d47a17c8fc
@ -2,10 +2,11 @@ package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/SAP/jenkins-library/pkg/versioning"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/versioning"
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/telemetry"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
@ -46,7 +47,7 @@ func (a *artifactVersioningMock) SetVersion(version string) error {
|
||||
}
|
||||
|
||||
func (a *artifactVersioningMock) GetCoordinates() (versioning.Coordinates, error) {
|
||||
return nil, fmt.Errorf("not implemented")
|
||||
return versioning.Coordinates{}, fmt.Errorf("not implemented")
|
||||
}
|
||||
|
||||
type gitRepositoryMock struct {
|
||||
|
@ -127,9 +127,7 @@ func getDetectScript(config detectExecuteScanOptions, utils detectUtils) error {
|
||||
|
||||
func addDetectArgs(args []string, config detectExecuteScanOptions, utils detectUtils) ([]string, error) {
|
||||
|
||||
coordinates := struct {
|
||||
Version string
|
||||
}{
|
||||
coordinates := versioning.Coordinates{
|
||||
Version: config.Version,
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,6 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/SAP/jenkins-library/pkg/mock"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
@ -15,6 +14,8 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/mock"
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/fortify"
|
||||
"github.com/SAP/jenkins-library/pkg/log"
|
||||
"github.com/SAP/jenkins-library/pkg/versioning"
|
||||
@ -51,17 +52,11 @@ func newFortifyTestUtilsBundle() fortifyTestUtilsBundle {
|
||||
}
|
||||
|
||||
type artifactMock struct {
|
||||
Coordinates coordinatesMock
|
||||
Coordinates versioning.Coordinates
|
||||
}
|
||||
|
||||
type coordinatesMock struct {
|
||||
GroupID string
|
||||
ArtifactID string
|
||||
Version string
|
||||
}
|
||||
|
||||
func newCoordinatesMock() coordinatesMock {
|
||||
return coordinatesMock{
|
||||
func newCoordinatesMock() versioning.Coordinates {
|
||||
return versioning.Coordinates{
|
||||
GroupID: "a",
|
||||
ArtifactID: "b",
|
||||
Version: "1.0.0",
|
||||
|
@ -9,6 +9,8 @@ import (
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/config"
|
||||
"github.com/SAP/jenkins-library/pkg/log"
|
||||
"github.com/SAP/jenkins-library/pkg/reporting"
|
||||
ws "github.com/SAP/jenkins-library/pkg/whitesource"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
@ -178,4 +180,19 @@ func prepareOutputEnvironment(outputResources []config.StepResources, envRootPat
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// prepare additional output directories known to possibly create permission issues when created from within a container
|
||||
// ToDo: evaluate if we can rather call this only in the correct step context (we know the step when calling getConfig!)
|
||||
// Could this be part of the container definition in the step.yaml?
|
||||
stepOutputDirectories := []string{
|
||||
reporting.MarkdownReportDirectory, // standard directory to collect md reports for pipelineCreateScanSummary
|
||||
ws.ReportsDirectory, // standard directory for reports created by whitesourceExecuteScan
|
||||
}
|
||||
|
||||
for _, dir := range stepOutputDirectories {
|
||||
if _, err := os.Stat(dir); os.IsNotExist(err) {
|
||||
log.Entry().Debugf("Creating directory: %v", dir)
|
||||
os.MkdirAll(dir, 0777)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -37,11 +37,9 @@ func pipelineCreateScanSummary(config pipelineCreateScanSummaryOptions, telemetr
|
||||
}
|
||||
}
|
||||
|
||||
const reportDir = ".pipeline/stepReports"
|
||||
|
||||
func runPipelineCreateScanSummary(config *pipelineCreateScanSummaryOptions, telemetryData *telemetry.CustomData, utils pipelineCreateScanSummaryUtils) error {
|
||||
|
||||
pattern := reportDir + "/*.json"
|
||||
pattern := reporting.MarkdownReportDirectory + "/*.json"
|
||||
reports, _ := utils.Glob(pattern)
|
||||
|
||||
scanReports := []reporting.ScanReport{}
|
||||
@ -61,7 +59,8 @@ func runPipelineCreateScanSummary(config *pipelineCreateScanSummaryOptions, tele
|
||||
output := []byte{}
|
||||
for _, scanReport := range scanReports {
|
||||
if (config.FailedOnly && !scanReport.SuccessfulScan) || !config.FailedOnly {
|
||||
output = append(output, scanReport.ToMarkdown()...)
|
||||
mdReport, _ := scanReport.ToMarkdown()
|
||||
output = append(output, mdReport...)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
@ -18,12 +19,13 @@ import (
|
||||
"github.com/SAP/jenkins-library/pkg/log"
|
||||
"github.com/SAP/jenkins-library/pkg/npm"
|
||||
"github.com/SAP/jenkins-library/pkg/piperutils"
|
||||
"github.com/SAP/jenkins-library/pkg/reporting"
|
||||
"github.com/SAP/jenkins-library/pkg/telemetry"
|
||||
"github.com/SAP/jenkins-library/pkg/versioning"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// just to make the lines less long
|
||||
// ScanOptions is just used to make the lines less long
|
||||
type ScanOptions = whitesourceExecuteScanOptions
|
||||
|
||||
// whitesource defines the functions that are expected by the step implementation to
|
||||
@ -38,12 +40,13 @@ type whitesource interface {
|
||||
GetProjectRiskReport(projectToken string) ([]byte, error)
|
||||
GetProjectVulnerabilityReport(projectToken string, format string) ([]byte, error)
|
||||
GetProjectAlerts(projectToken string) ([]ws.Alert, error)
|
||||
GetProjectAlertsByType(projectToken, alertType string) ([]ws.Alert, error)
|
||||
GetProjectLibraryLocations(projectToken string) ([]ws.Library, error)
|
||||
}
|
||||
|
||||
type whitesourceUtils interface {
|
||||
ws.Utils
|
||||
|
||||
DirExists(path string) (bool, error)
|
||||
GetArtifactCoordinates(buildTool, buildDescriptorFile string,
|
||||
options *versioning.Options) (versioning.Coordinates, error)
|
||||
|
||||
@ -61,11 +64,10 @@ func (w *whitesourceUtilsBundle) FileOpen(name string, flag int, perm os.FileMod
|
||||
return os.OpenFile(name, flag, perm)
|
||||
}
|
||||
|
||||
func (w *whitesourceUtilsBundle) GetArtifactCoordinates(buildTool, buildDescriptorFile string,
|
||||
options *versioning.Options) (versioning.Coordinates, error) {
|
||||
func (w *whitesourceUtilsBundle) GetArtifactCoordinates(buildTool, buildDescriptorFile string, options *versioning.Options) (versioning.Coordinates, error) {
|
||||
artifact, err := versioning.GetArtifact(buildTool, buildDescriptorFile, options, w)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return versioning.Coordinates{}, err
|
||||
}
|
||||
return artifact.GetCoordinates()
|
||||
}
|
||||
@ -106,7 +108,7 @@ func newWhitesourceUtils(config *ScanOptions) *whitesourceUtilsBundle {
|
||||
func newWhitesourceScan(config *ScanOptions) *ws.Scan {
|
||||
return &ws.Scan{
|
||||
AggregateProjectName: config.ProjectName,
|
||||
ProductVersion: config.ProductVersion,
|
||||
ProductVersion: config.Version,
|
||||
}
|
||||
}
|
||||
|
||||
@ -179,46 +181,62 @@ func runWhitesourceScan(config *ScanOptions, scan *ws.Scan, utils whitesourceUti
|
||||
}
|
||||
|
||||
log.Entry().Info("-----------------------------------------------------")
|
||||
log.Entry().Infof("Product Version: '%s'", config.ProductVersion)
|
||||
log.Entry().Infof("Product Version: '%s'", config.Version)
|
||||
log.Entry().Info("Scanned projects:")
|
||||
for _, project := range scan.ScannedProjects() {
|
||||
log.Entry().Infof(" Name: '%s', token: %s", project.Name, project.Token)
|
||||
}
|
||||
log.Entry().Info("-----------------------------------------------------")
|
||||
|
||||
if err := checkAndReportScanResults(config, scan, utils, sys); err != nil {
|
||||
paths, err := checkAndReportScanResults(config, scan, utils, sys)
|
||||
piperutils.PersistReportsAndLinks("whitesourceExecuteScan", "", paths, nil)
|
||||
persistScannedProjects(config, scan, commonPipelineEnvironment)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
persistScannedProjects(config, scan, commonPipelineEnvironment)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func checkAndReportScanResults(config *ScanOptions, scan *ws.Scan, utils whitesourceUtils, sys whitesource) error {
|
||||
func checkAndReportScanResults(config *ScanOptions, scan *ws.Scan, utils whitesourceUtils, sys whitesource) ([]piperutils.Path, error) {
|
||||
reportPaths := []piperutils.Path{}
|
||||
if !config.Reporting && !config.SecurityVulnerabilities {
|
||||
return nil
|
||||
return reportPaths, nil
|
||||
}
|
||||
// Wait for WhiteSource backend to propagate the changes before downloading any reports.
|
||||
if err := scan.BlockUntilReportsAreReady(sys); err != nil {
|
||||
return err
|
||||
return reportPaths, err
|
||||
}
|
||||
|
||||
if config.Reporting {
|
||||
paths, err := scan.DownloadReports(ws.ReportOptions{
|
||||
ReportDirectory: config.ReportDirectoryName,
|
||||
var err error
|
||||
reportPaths, err = scan.DownloadReports(ws.ReportOptions{
|
||||
ReportDirectory: ws.ReportsDirectory,
|
||||
VulnerabilityReportFormat: config.VulnerabilityReportFormat,
|
||||
}, utils, sys)
|
||||
if err != nil {
|
||||
return err
|
||||
return reportPaths, err
|
||||
}
|
||||
piperutils.PersistReportsAndLinks("whitesourceExecuteScan", "", paths, nil)
|
||||
}
|
||||
|
||||
checkErrors := []string{}
|
||||
|
||||
rPath, err := checkPolicyViolations(config, scan, sys, utils, reportPaths)
|
||||
if err != nil {
|
||||
checkErrors = append(checkErrors, fmt.Sprint(err))
|
||||
}
|
||||
reportPaths = append(reportPaths, rPath)
|
||||
|
||||
if config.SecurityVulnerabilities {
|
||||
if err := checkSecurityViolations(config, scan, sys); err != nil {
|
||||
return err
|
||||
rPaths, err := checkSecurityViolations(config, scan, sys, utils)
|
||||
reportPaths = append(reportPaths, rPaths...)
|
||||
if err != nil {
|
||||
checkErrors = append(checkErrors, fmt.Sprint(err))
|
||||
}
|
||||
}
|
||||
return nil
|
||||
if len(checkErrors) > 0 {
|
||||
return reportPaths, fmt.Errorf(strings.Join(checkErrors, ": "))
|
||||
}
|
||||
return reportPaths, nil
|
||||
}
|
||||
|
||||
func createWhiteSourceProduct(config *ScanOptions, sys whitesource) (string, error) {
|
||||
@ -242,7 +260,11 @@ func createWhiteSourceProduct(config *ScanOptions, sys whitesource) (string, err
|
||||
}
|
||||
|
||||
func resolveProjectIdentifiers(config *ScanOptions, scan *ws.Scan, utils whitesourceUtils, sys whitesource) error {
|
||||
if scan.AggregateProjectName == "" || config.ProductVersion == "" {
|
||||
if len(scan.AggregateProjectName) > 0 && (len(config.Version)+len(config.CustomScanVersion) > 0) {
|
||||
if config.Version == "" {
|
||||
config.Version = config.CustomScanVersion
|
||||
}
|
||||
} else {
|
||||
options := &versioning.Options{
|
||||
DockerImage: config.ScanImage,
|
||||
ProjectSettingsFile: config.ProjectSettingsFile,
|
||||
@ -254,21 +276,23 @@ func resolveProjectIdentifiers(config *ScanOptions, scan *ws.Scan, utils whiteso
|
||||
return fmt.Errorf("failed to get build artifact description: %w", err)
|
||||
}
|
||||
|
||||
//ToDo: fill version in coordinates with version from pipeline environment
|
||||
if len(config.Version) > 0 {
|
||||
log.Entry().Infof("Resolving product version from default provided '%s' with versioning '%s'", config.Version, config.VersioningModel)
|
||||
coordinates.Version = config.Version
|
||||
}
|
||||
|
||||
nameTmpl := `{{list .GroupID .ArtifactID | join "-" | trimAll "-"}}`
|
||||
name, version := versioning.DetermineProjectCoordinates(nameTmpl, config.VersioningModel, coordinates)
|
||||
name, version := versioning.DetermineProjectCoordinatesWithCustomVersion(nameTmpl, config.VersioningModel, config.CustomScanVersion, coordinates)
|
||||
if scan.AggregateProjectName == "" {
|
||||
log.Entry().Infof("Resolved project name '%s' from descriptor file", name)
|
||||
scan.AggregateProjectName = name
|
||||
}
|
||||
if config.ProductVersion == "" {
|
||||
log.Entry().Infof("Resolved product version '%s' from descriptor file with versioning '%s'",
|
||||
version, config.VersioningModel)
|
||||
config.ProductVersion = version
|
||||
}
|
||||
|
||||
config.Version = version
|
||||
log.Entry().Infof("Resolved product version '%s'", version)
|
||||
}
|
||||
scan.ProductVersion = validateProductVersion(config.ProductVersion)
|
||||
|
||||
scan.ProductVersion = validateProductVersion(config.Version)
|
||||
|
||||
if err := resolveProductToken(config, sys); err != nil {
|
||||
return err
|
||||
@ -333,7 +357,7 @@ func resolveAggregateProjectToken(config *ScanOptions, sys whitesource) error {
|
||||
return nil
|
||||
}
|
||||
log.Entry().Infof("Attempting to resolve project token for project '%s'..", config.ProjectName)
|
||||
fullProjName := fmt.Sprintf("%s - %s", config.ProjectName, config.ProductVersion)
|
||||
fullProjName := fmt.Sprintf("%s - %s", config.ProjectName, config.Version)
|
||||
projectToken, err := sys.GetProjectToken(config.ProductToken, fullProjName)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -367,7 +391,7 @@ func wsScanOptions(config *ScanOptions) *ws.ScanOptions {
|
||||
UserToken: config.UserToken,
|
||||
ProductName: config.ProductName,
|
||||
ProductToken: config.ProductToken,
|
||||
ProductVersion: config.ProductVersion,
|
||||
ProductVersion: config.Version,
|
||||
ProjectName: config.ProjectName,
|
||||
BuildDescriptorFile: config.BuildDescriptorFile,
|
||||
BuildDescriptorExcludeList: config.BuildDescriptorExcludeList,
|
||||
@ -428,66 +452,106 @@ func executeScan(config *ScanOptions, scan *ws.Scan, utils whitesourceUtils) err
|
||||
return nil
|
||||
}
|
||||
|
||||
func checkSecurityViolations(config *ScanOptions, scan *ws.Scan, sys whitesource) error {
|
||||
func checkPolicyViolations(config *ScanOptions, scan *ws.Scan, sys whitesource, utils whitesourceUtils, reportPaths []piperutils.Path) (piperutils.Path, error) {
|
||||
|
||||
policyViolationCount := 0
|
||||
for _, project := range scan.ScannedProjects() {
|
||||
alerts, err := sys.GetProjectAlertsByType(project.Token, "REJECTED_BY_POLICY_RESOURCE")
|
||||
if err != nil {
|
||||
return piperutils.Path{}, fmt.Errorf("failed to retrieve project policy alerts from WhiteSource: %w", err)
|
||||
}
|
||||
policyViolationCount += len(alerts)
|
||||
}
|
||||
|
||||
violations := struct {
|
||||
PolicyViolations int `json:"policyViolations"`
|
||||
Reports []string `json:"reports"`
|
||||
}{
|
||||
PolicyViolations: policyViolationCount,
|
||||
Reports: []string{},
|
||||
}
|
||||
for _, report := range reportPaths {
|
||||
_, reportFile := filepath.Split(report.Target)
|
||||
violations.Reports = append(violations.Reports, reportFile)
|
||||
}
|
||||
|
||||
violationContent, err := json.Marshal(violations)
|
||||
if err != nil {
|
||||
return piperutils.Path{}, fmt.Errorf("failed to marshal policy violation data: %w", err)
|
||||
}
|
||||
|
||||
jsonViolationReportPath := filepath.Join(ws.ReportsDirectory, "whitesource-ip.json")
|
||||
err = utils.FileWrite(jsonViolationReportPath, violationContent, 0666)
|
||||
if err != nil {
|
||||
return piperutils.Path{}, fmt.Errorf("failed to write policy violation report: %w", err)
|
||||
}
|
||||
|
||||
policyReport := piperutils.Path{Name: "WhiteSource Policy Violation Report", Target: jsonViolationReportPath}
|
||||
|
||||
if policyViolationCount > 0 {
|
||||
log.SetErrorCategory(log.ErrorCompliance)
|
||||
return policyReport, fmt.Errorf("%v policy violation(s) found", policyViolationCount)
|
||||
}
|
||||
|
||||
return policyReport, nil
|
||||
}
|
||||
|
||||
func checkSecurityViolations(config *ScanOptions, scan *ws.Scan, sys whitesource, utils whitesourceUtils) ([]piperutils.Path, error) {
|
||||
var reportPaths []piperutils.Path
|
||||
// Check for security vulnerabilities and fail the build if cvssSeverityLimit threshold is crossed
|
||||
// convert config.CvssSeverityLimit to float64
|
||||
cvssSeverityLimit, err := strconv.ParseFloat(config.CvssSeverityLimit, 64)
|
||||
if err != nil {
|
||||
log.SetErrorCategory(log.ErrorConfiguration)
|
||||
return fmt.Errorf("failed to parse parameter cvssSeverityLimit (%s) "+
|
||||
return reportPaths, fmt.Errorf("failed to parse parameter cvssSeverityLimit (%s) "+
|
||||
"as floating point number: %w", config.CvssSeverityLimit, err)
|
||||
}
|
||||
|
||||
if config.ProjectToken != "" {
|
||||
project := ws.Project{Name: config.ProjectName, Token: config.ProjectToken}
|
||||
if _, err := checkProjectSecurityViolations(cvssSeverityLimit, project, sys); err != nil {
|
||||
return err
|
||||
// ToDo: see if HTML report generation is really required here
|
||||
// we anyway need to do some refactoring here since config.ProjectToken != "" essentially indicates an aggregated project
|
||||
if _, _, err := checkProjectSecurityViolations(cvssSeverityLimit, project, sys); err != nil {
|
||||
return reportPaths, err
|
||||
}
|
||||
} else {
|
||||
vulnerabilitiesCount := 0
|
||||
var errorsOccured []string
|
||||
allAlerts := []ws.Alert{}
|
||||
for _, project := range scan.ScannedProjects() {
|
||||
// collect errors and aggregate vulnerabilities from all projects
|
||||
if vulCount, err := checkProjectSecurityViolations(cvssSeverityLimit, project, sys); err != nil {
|
||||
if vulCount, alerts, err := checkProjectSecurityViolations(cvssSeverityLimit, project, sys); err != nil {
|
||||
allAlerts = append(allAlerts, alerts...)
|
||||
vulnerabilitiesCount += vulCount
|
||||
errorsOccured = append(errorsOccured, fmt.Sprint(err))
|
||||
}
|
||||
}
|
||||
|
||||
scanReport := createCustomVulnerabilityReport(config, scan, allAlerts, cvssSeverityLimit, utils)
|
||||
reportPaths, err = writeCustomVulnerabilityReports(scanReport, utils)
|
||||
if err != nil {
|
||||
errorsOccured = append(errorsOccured, fmt.Sprint(err))
|
||||
}
|
||||
|
||||
if len(errorsOccured) > 0 {
|
||||
if vulnerabilitiesCount > 0 {
|
||||
log.SetErrorCategory(log.ErrorCompliance)
|
||||
}
|
||||
return fmt.Errorf(strings.Join(errorsOccured, ": "))
|
||||
return reportPaths, fmt.Errorf(strings.Join(errorsOccured, ": "))
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return reportPaths, nil
|
||||
}
|
||||
|
||||
// checkSecurityViolations checks security violations and returns an error if the configured severity limit is crossed.
|
||||
func checkProjectSecurityViolations(cvssSeverityLimit float64, project ws.Project, sys whitesource) (int, error) {
|
||||
func checkProjectSecurityViolations(cvssSeverityLimit float64, project ws.Project, sys whitesource) (int, []ws.Alert, error) {
|
||||
// get project alerts (vulnerabilities)
|
||||
//ToDo: use getProjectAlertsByType with alertType : "SECURITY_VULNERABILITY"?
|
||||
//ToDo: also return reference to alerts in order to use it for reporting later
|
||||
alerts, err := sys.GetProjectAlerts(project.Token)
|
||||
alerts, err := sys.GetProjectAlertsByType(project.Token, "SECURITY_VULNERABILITY")
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("failed to retrieve project alerts from Whitesource: %w", err)
|
||||
return 0, alerts, fmt.Errorf("failed to retrieve project alerts from WhiteSource: %w", err)
|
||||
}
|
||||
|
||||
severeVulnerabilities := 0
|
||||
// https://github.com/SAP/jenkins-library/blob/master/vars/whitesourceExecuteScan.groovy#L537
|
||||
for _, alert := range alerts {
|
||||
vuln := alert.Vulnerability
|
||||
if (vuln.Score >= cvssSeverityLimit || vuln.CVSS3Score >= cvssSeverityLimit) && cvssSeverityLimit >= 0 {
|
||||
log.Entry().Infof("Vulnerability with Score %v / CVSS3Score %v treated as severe",
|
||||
vuln.Score, vuln.CVSS3Score)
|
||||
severeVulnerabilities++
|
||||
} else {
|
||||
log.Entry().Infof("Ignoring vulnerability with Score %v / CVSS3Score %v",
|
||||
vuln.Score, vuln.CVSS3Score)
|
||||
}
|
||||
}
|
||||
|
||||
//https://github.com/SAP/jenkins-library/blob/master/vars/whitesourceExecuteScan.groovy#L547
|
||||
nonSevereVulnerabilities := len(alerts) - severeVulnerabilities
|
||||
severeVulnerabilities, nonSevereVulnerabilities := countSecurityVulnerabilities(&alerts, cvssSeverityLimit)
|
||||
if nonSevereVulnerabilities > 0 {
|
||||
log.Entry().Warnf("WARNING: %v Open Source Software Security vulnerabilities with "+
|
||||
"CVSS score below threshold %.1f detected in project %s.", nonSevereVulnerabilities,
|
||||
@ -499,15 +563,163 @@ func checkProjectSecurityViolations(cvssSeverityLimit float64, project ws.Projec
|
||||
|
||||
// https://github.com/SAP/jenkins-library/blob/master/vars/whitesourceExecuteScan.groovy#L558
|
||||
if severeVulnerabilities > 0 {
|
||||
return severeVulnerabilities, fmt.Errorf("%v Open Source Software Security vulnerabilities with CVSS score greater "+
|
||||
return severeVulnerabilities, alerts, fmt.Errorf("%v Open Source Software Security vulnerabilities with CVSS score greater "+
|
||||
"or equal to %.1f detected in project %s",
|
||||
severeVulnerabilities, cvssSeverityLimit, project.Name)
|
||||
}
|
||||
return 0, nil
|
||||
return 0, alerts, nil
|
||||
}
|
||||
|
||||
func countSecurityVulnerabilities(alerts *[]ws.Alert, cvssSeverityLimit float64) (int, int) {
|
||||
severeVulnerabilities := 0
|
||||
for _, alert := range *alerts {
|
||||
if isSevereVulnerability(alert, cvssSeverityLimit) {
|
||||
severeVulnerabilities++
|
||||
}
|
||||
}
|
||||
|
||||
nonSevereVulnerabilities := len(*alerts) - severeVulnerabilities
|
||||
return severeVulnerabilities, nonSevereVulnerabilities
|
||||
}
|
||||
|
||||
func isSevereVulnerability(alert ws.Alert, cvssSeverityLimit float64) bool {
|
||||
|
||||
if vulnerabilityScore(alert) >= cvssSeverityLimit && cvssSeverityLimit >= 0 {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func createCustomVulnerabilityReport(config *ScanOptions, scan *ws.Scan, alerts []ws.Alert, cvssSeverityLimit float64, utils whitesourceUtils) reporting.ScanReport {
|
||||
|
||||
severe, _ := countSecurityVulnerabilities(&alerts, cvssSeverityLimit)
|
||||
|
||||
// sort according to vulnarability severity
|
||||
sort.Slice(alerts, func(i, j int) bool {
|
||||
return vulnerabilityScore(alerts[i]) > vulnerabilityScore(alerts[j])
|
||||
})
|
||||
|
||||
projectNames := []string{}
|
||||
for _, project := range scan.ScannedProjects() {
|
||||
projectNames = append(projectNames, project.Name)
|
||||
}
|
||||
// Sorting helps the list become stable across pipeline runs (and in the unit tests)
|
||||
sort.Strings(projectNames)
|
||||
|
||||
scanReport := reporting.ScanReport{
|
||||
Title: "WhiteSource Security Vulnerability Report",
|
||||
Subheaders: []reporting.Subheader{
|
||||
{Description: "WhiteSource product name", Details: config.ProductName},
|
||||
{Description: "Filtered project names", Details: strings.Join(projectNames, ", ")},
|
||||
},
|
||||
Overview: []reporting.OverviewRow{
|
||||
{Description: "Total number of vulnerabilities", Details: fmt.Sprint(len(alerts))},
|
||||
{Description: "Total number of high/critical vulnerabilities with CVSS score >= 7.0", Details: fmt.Sprint(severe)},
|
||||
},
|
||||
ReportTime: utils.Now(),
|
||||
}
|
||||
|
||||
detailTable := reporting.ScanDetailTable{
|
||||
NoRowsMessage: "No publicly known vulnerabilities detected",
|
||||
Headers: []string{
|
||||
"Date",
|
||||
"CVE",
|
||||
"CVSS Score",
|
||||
"CVSS Version",
|
||||
"Project",
|
||||
"Library file name",
|
||||
"Library group ID",
|
||||
"Library artifact ID",
|
||||
"Library version",
|
||||
"Description",
|
||||
"Top fix",
|
||||
},
|
||||
WithCounter: true,
|
||||
CounterHeader: "Entry #",
|
||||
}
|
||||
|
||||
for _, alert := range alerts {
|
||||
var score float64
|
||||
var scoreStyle reporting.ColumnStyle = reporting.Yellow
|
||||
if isSevereVulnerability(alert, cvssSeverityLimit) {
|
||||
scoreStyle = reporting.Red
|
||||
}
|
||||
var cveVersion string
|
||||
if alert.Vulnerability.CVSS3Score > 0 {
|
||||
score = alert.Vulnerability.CVSS3Score
|
||||
cveVersion = "v3"
|
||||
} else {
|
||||
score = alert.Vulnerability.Score
|
||||
cveVersion = "v2"
|
||||
}
|
||||
|
||||
var topFix string
|
||||
emptyFix := ws.Fix{}
|
||||
if alert.Vulnerability.TopFix != emptyFix {
|
||||
topFix = fmt.Sprintf(`%v<br>%v<br><a href="%v">%v</a>}"`, alert.Vulnerability.TopFix.Message, alert.Vulnerability.TopFix.FixResolution, alert.Vulnerability.TopFix.URL, alert.Vulnerability.TopFix.URL)
|
||||
}
|
||||
|
||||
row := reporting.ScanRow{}
|
||||
row.AddColumn(alert.Vulnerability.PublishDate, 0)
|
||||
row.AddColumn(fmt.Sprintf(`<a href="%v">%v</a>`, alert.Vulnerability.URL, alert.Vulnerability.Name), 0)
|
||||
row.AddColumn(score, scoreStyle)
|
||||
row.AddColumn(cveVersion, 0)
|
||||
row.AddColumn(alert.Project, 0)
|
||||
row.AddColumn(alert.Library.Filename, 0)
|
||||
row.AddColumn(alert.Library.GroupID, 0)
|
||||
row.AddColumn(alert.Library.ArtifactID, 0)
|
||||
row.AddColumn(alert.Library.Version, 0)
|
||||
row.AddColumn(alert.Vulnerability.Description, 0)
|
||||
row.AddColumn(topFix, 0)
|
||||
|
||||
detailTable.Rows = append(detailTable.Rows, row)
|
||||
}
|
||||
scanReport.DetailTable = detailTable
|
||||
|
||||
return scanReport
|
||||
}
|
||||
|
||||
func writeCustomVulnerabilityReports(scanReport reporting.ScanReport, utils whitesourceUtils) ([]piperutils.Path, error) {
|
||||
reportPaths := []piperutils.Path{}
|
||||
|
||||
// ignore templating errors since template is in our hands and issues will be detected with the automated tests
|
||||
htmlReport, _ := scanReport.ToHTML()
|
||||
htmlReportPath := filepath.Join(ws.ReportsDirectory, "piper_whitesource_vulnerability_report.html")
|
||||
if err := utils.FileWrite(htmlReportPath, htmlReport, 0666); err != nil {
|
||||
log.SetErrorCategory(log.ErrorConfiguration)
|
||||
return reportPaths, errors.Wrapf(err, "failed to write html report")
|
||||
}
|
||||
reportPaths = append(reportPaths, piperutils.Path{Name: "WhiteSource Vulnerability Report", Target: htmlReportPath})
|
||||
|
||||
// markdown reports are used by step pipelineCreateSummary in order to e.g. prepare an issue creation in GitHub
|
||||
// ignore templating errors since template is in our hands and issues will be detected with the automated tests
|
||||
mdReport, _ := scanReport.ToMarkdown()
|
||||
if exists, _ := utils.DirExists(reporting.MarkdownReportDirectory); !exists {
|
||||
err := utils.MkdirAll(reporting.MarkdownReportDirectory, 0777)
|
||||
if err != nil {
|
||||
return reportPaths, errors.Wrap(err, "failed to create reporting directory")
|
||||
}
|
||||
}
|
||||
if err := utils.FileWrite(filepath.Join(reporting.MarkdownReportDirectory, fmt.Sprintf("whitesourceExecuteScan_%v.md", utils.Now().Format("20060102150405"))), mdReport, 0666); err != nil {
|
||||
log.SetErrorCategory(log.ErrorConfiguration)
|
||||
return reportPaths, errors.Wrapf(err, "failed to write markdown report")
|
||||
}
|
||||
// we do not add the markdown report to the overall list of reports for now,
|
||||
// since it is just an intermediary report used as input for later
|
||||
// and there does not seem to be real benefit in archiving it.
|
||||
|
||||
return reportPaths, nil
|
||||
}
|
||||
|
||||
func vulnerabilityScore(alert ws.Alert) float64 {
|
||||
if alert.Vulnerability.CVSS3Score > 0 {
|
||||
return alert.Vulnerability.CVSS3Score
|
||||
}
|
||||
return alert.Vulnerability.Score
|
||||
}
|
||||
|
||||
func aggregateVersionWideLibraries(config *ScanOptions, utils whitesourceUtils, sys whitesource) error {
|
||||
log.Entry().Infof("Aggregating list of libraries used for all projects with version: %s", config.ProductVersion)
|
||||
log.Entry().Infof("Aggregating list of libraries used for all projects with version: %s", config.Version)
|
||||
|
||||
projects, err := sys.GetProjectsMetaInfo(config.ProductToken)
|
||||
if err != nil {
|
||||
@ -518,7 +730,7 @@ func aggregateVersionWideLibraries(config *ScanOptions, utils whitesourceUtils,
|
||||
for _, project := range projects {
|
||||
projectVersion := strings.Split(project.Name, " - ")[1]
|
||||
projectName := strings.Split(project.Name, " - ")[0]
|
||||
if projectVersion == config.ProductVersion {
|
||||
if projectVersion == config.Version {
|
||||
libs, err := sys.GetProjectLibraryLocations(project.Token)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -534,7 +746,7 @@ func aggregateVersionWideLibraries(config *ScanOptions, utils whitesourceUtils,
|
||||
}
|
||||
|
||||
func aggregateVersionWideVulnerabilities(config *ScanOptions, utils whitesourceUtils, sys whitesource) error {
|
||||
log.Entry().Infof("Aggregating list of vulnerabilities for all projects with version: %s", config.ProductVersion)
|
||||
log.Entry().Infof("Aggregating list of vulnerabilities for all projects with version: %s", config.Version)
|
||||
|
||||
projects, err := sys.GetProjectsMetaInfo(config.ProductToken)
|
||||
if err != nil {
|
||||
@ -545,7 +757,7 @@ func aggregateVersionWideVulnerabilities(config *ScanOptions, utils whitesourceU
|
||||
projectNames := `` // holds all project tokens considered a part of the report for debugging
|
||||
for _, project := range projects {
|
||||
projectVersion := strings.Split(project.Name, " - ")[1]
|
||||
if projectVersion == config.ProductVersion {
|
||||
if projectVersion == config.Version {
|
||||
projectNames += project.Name + "\n"
|
||||
alerts, err := sys.GetProjectAlerts(project.Token)
|
||||
if err != nil {
|
||||
@ -556,7 +768,7 @@ func aggregateVersionWideVulnerabilities(config *ScanOptions, utils whitesourceU
|
||||
}
|
||||
}
|
||||
|
||||
reportPath := filepath.Join(config.ReportDirectoryName, "project-names-aggregated.txt")
|
||||
reportPath := filepath.Join(ws.ReportsDirectory, "project-names-aggregated.txt")
|
||||
if err := utils.FileWrite(reportPath, []byte(projectNames), 0666); err != nil {
|
||||
return err
|
||||
}
|
||||
@ -586,11 +798,11 @@ func newVulnerabilityExcelReport(alerts []ws.Alert, config *ScanOptions, utils w
|
||||
return err
|
||||
}
|
||||
|
||||
if err := utils.MkdirAll(config.ReportDirectoryName, 0777); err != nil {
|
||||
if err := utils.MkdirAll(ws.ReportsDirectory, 0777); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fileName := filepath.Join(config.ReportDirectoryName,
|
||||
fileName := filepath.Join(ws.ReportsDirectory,
|
||||
fmt.Sprintf("vulnerabilities-%s.xlsx", utils.Now().Format(wsReportTimeStampLayout)))
|
||||
stream, err := utils.FileOpen(fileName, os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0666)
|
||||
if err != nil {
|
||||
@ -636,7 +848,7 @@ func fillVulnerabilityExcelReport(alerts []ws.Alert, streamWriter *excelize.Stre
|
||||
return nil
|
||||
}
|
||||
|
||||
// outputs an slice of libraries to an excel file based on projects with version == config.ProductVersion
|
||||
// outputs an slice of libraries to an excel file based on projects with version == config.Version
|
||||
func newLibraryCSVReport(libraries map[string][]ws.Library, config *ScanOptions, utils whitesourceUtils) error {
|
||||
output := "Library Name, Project Name\n"
|
||||
for projectName, libraries := range libraries {
|
||||
@ -647,12 +859,12 @@ func newLibraryCSVReport(libraries map[string][]ws.Library, config *ScanOptions,
|
||||
}
|
||||
|
||||
// Ensure reporting directory exists
|
||||
if err := utils.MkdirAll(config.ReportDirectoryName, 0777); err != nil {
|
||||
if err := utils.MkdirAll(ws.ReportsDirectory, 0777); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Write result to file
|
||||
fileName := fmt.Sprintf("%s/libraries-%s.csv", config.ReportDirectoryName,
|
||||
fileName := fmt.Sprintf("%s/libraries-%s.csv", ws.ReportsDirectory,
|
||||
utils.Now().Format(wsReportTimeStampLayout))
|
||||
if err := utils.FileWrite(fileName, []byte(output), 0666); err != nil {
|
||||
return err
|
||||
@ -660,12 +872,12 @@ func newLibraryCSVReport(libraries map[string][]ws.Library, config *ScanOptions,
|
||||
return nil
|
||||
}
|
||||
|
||||
// persistScannedProjects writes all actually scanned WhiteSource project names as comma separated
|
||||
// string into the Common Pipeline Environment, from where it can be used by sub-sequent steps.
|
||||
// persistScannedProjects writes all actually scanned WhiteSource project names as list
|
||||
// into the Common Pipeline Environment, from where it can be used by sub-sequent steps.
|
||||
func persistScannedProjects(config *ScanOptions, scan *ws.Scan, commonPipelineEnvironment *whitesourceExecuteScanCommonPipelineEnvironment) {
|
||||
projectNames := []string{}
|
||||
if config.ProjectName != "" {
|
||||
projectNames = []string{config.ProjectName + " - " + config.ProductVersion}
|
||||
projectNames = []string{config.ProjectName + " - " + config.Version}
|
||||
} else {
|
||||
for _, project := range scan.ScannedProjects() {
|
||||
projectNames = append(projectNames, project.Name)
|
||||
|
@ -26,6 +26,7 @@ type whitesourceExecuteScanOptions struct {
|
||||
BuildTool string `json:"buildTool,omitempty"`
|
||||
ConfigFilePath string `json:"configFilePath,omitempty"`
|
||||
CreateProductFromPipeline bool `json:"createProductFromPipeline,omitempty"`
|
||||
CustomScanVersion string `json:"customScanVersion,omitempty"`
|
||||
CvssSeverityLimit string `json:"cvssSeverityLimit,omitempty"`
|
||||
EmailAddressesOfInitialProductAdmins []string `json:"emailAddressesOfInitialProductAdmins,omitempty"`
|
||||
Excludes []string `json:"excludes,omitempty"`
|
||||
@ -37,10 +38,9 @@ type whitesourceExecuteScanOptions struct {
|
||||
ParallelLimit string `json:"parallelLimit,omitempty"`
|
||||
ProductName string `json:"productName,omitempty"`
|
||||
ProductToken string `json:"productToken,omitempty"`
|
||||
ProductVersion string `json:"productVersion,omitempty"`
|
||||
Version string `json:"version,omitempty"`
|
||||
ProjectName string `json:"projectName,omitempty"`
|
||||
ProjectToken string `json:"projectToken,omitempty"`
|
||||
ReportDirectoryName string `json:"reportDirectoryName,omitempty"`
|
||||
Reporting bool `json:"reporting,omitempty"`
|
||||
ScanImage string `json:"scanImage,omitempty"`
|
||||
ScanImageIncludeLayers bool `json:"scanImageIncludeLayers,omitempty"`
|
||||
@ -168,6 +168,7 @@ func addWhitesourceExecuteScanFlags(cmd *cobra.Command, stepConfig *whitesourceE
|
||||
cmd.Flags().StringVar(&stepConfig.BuildTool, "buildTool", os.Getenv("PIPER_buildTool"), "Defines the tool which is used for building the artifact.")
|
||||
cmd.Flags().StringVar(&stepConfig.ConfigFilePath, "configFilePath", `./wss-unified-agent.config`, "Explicit path to the WhiteSource Unified Agent configuration file.")
|
||||
cmd.Flags().BoolVar(&stepConfig.CreateProductFromPipeline, "createProductFromPipeline", true, "Whether to create the related WhiteSource product on the fly based on the supplied pipeline configuration.")
|
||||
cmd.Flags().StringVar(&stepConfig.CustomScanVersion, "customScanVersion", os.Getenv("PIPER_customScanVersion"), "Custom version of the WhiteSource project used as source.")
|
||||
cmd.Flags().StringVar(&stepConfig.CvssSeverityLimit, "cvssSeverityLimit", `-1`, "Limit of tolerable CVSS v3 score upon assessment and in consequence fails the build, defaults to `-1`.")
|
||||
cmd.Flags().StringSliceVar(&stepConfig.EmailAddressesOfInitialProductAdmins, "emailAddressesOfInitialProductAdmins", []string{}, "The list of email addresses to assign as product admins for newly created WhiteSource products.")
|
||||
cmd.Flags().StringSliceVar(&stepConfig.Excludes, "excludes", []string{}, "List of file path patterns to exclude in the scan.")
|
||||
@ -179,10 +180,9 @@ func addWhitesourceExecuteScanFlags(cmd *cobra.Command, stepConfig *whitesourceE
|
||||
cmd.Flags().StringVar(&stepConfig.ParallelLimit, "parallelLimit", `15`, "[NOT IMPLEMENTED] Limit of parallel jobs being run at once in case of `scanType: 'mta'` based scenarios, defaults to `15`.")
|
||||
cmd.Flags().StringVar(&stepConfig.ProductName, "productName", os.Getenv("PIPER_productName"), "Name of the WhiteSource product used for results aggregation. This parameter is mandatory if the parameter `createProductFromPipeline` is set to `true` and the WhiteSource product does not yet exist. It is also mandatory if the parameter `productToken` is not provided.")
|
||||
cmd.Flags().StringVar(&stepConfig.ProductToken, "productToken", os.Getenv("PIPER_productToken"), "Token of the WhiteSource product to be created and used for results aggregation, usually determined automatically. Can optionally be provided as an alternative to `productName`.")
|
||||
cmd.Flags().StringVar(&stepConfig.ProductVersion, "productVersion", os.Getenv("PIPER_productVersion"), "Version of the WhiteSource product to be created and used for results aggregation.")
|
||||
cmd.Flags().StringVar(&stepConfig.Version, "version", os.Getenv("PIPER_version"), "Version of the WhiteSource product to be created and used for results aggregation.")
|
||||
cmd.Flags().StringVar(&stepConfig.ProjectName, "projectName", os.Getenv("PIPER_projectName"), "The project name used for reporting results in WhiteSource. When provided, all source modules will be scanned into one aggregated WhiteSource project. For scan types `maven`, `mta`, `npm`, the default is to generate one WhiteSource project per module, whereas the project name is derived from the module's build descriptor. For NPM modules, project aggregation is not supported, the last scanned NPM module will override all previously aggregated scan results!")
|
||||
cmd.Flags().StringVar(&stepConfig.ProjectToken, "projectToken", os.Getenv("PIPER_projectToken"), "Project token to execute scan on. Ignored for scan types `maven`, `mta` and `npm`. Used for project aggregation when scanning with the Unified Agent and can be provided as an alternative to `projectName`.")
|
||||
cmd.Flags().StringVar(&stepConfig.ReportDirectoryName, "reportDirectoryName", `whitesource-reports`, "Name of the directory to save vulnerability/risk reports to")
|
||||
cmd.Flags().BoolVar(&stepConfig.Reporting, "reporting", true, "Whether assessment is being done at all, defaults to `true`")
|
||||
cmd.Flags().StringVar(&stepConfig.ScanImage, "scanImage", os.Getenv("PIPER_scanImage"), "For `buildTool: docker`: Defines the docker image which should be scanned.")
|
||||
cmd.Flags().BoolVar(&stepConfig.ScanImageIncludeLayers, "scanImageIncludeLayers", true, "For `buildTool: docker`: Defines if layers should be included.")
|
||||
@ -191,7 +191,7 @@ func addWhitesourceExecuteScanFlags(cmd *cobra.Command, stepConfig *whitesourceE
|
||||
cmd.Flags().BoolVar(&stepConfig.SecurityVulnerabilities, "securityVulnerabilities", true, "Whether security compliance is considered and reported as part of the assessment.")
|
||||
cmd.Flags().StringVar(&stepConfig.ServiceURL, "serviceUrl", `https://saas.whitesourcesoftware.com/api`, "URL to the WhiteSource API endpoint.")
|
||||
cmd.Flags().IntVar(&stepConfig.Timeout, "timeout", 900, "Timeout in seconds until an HTTP call is forcefully terminated.")
|
||||
cmd.Flags().StringVar(&stepConfig.UserToken, "userToken", os.Getenv("PIPER_userToken"), "WhiteSource token identifying the user executing the scan.")
|
||||
cmd.Flags().StringVar(&stepConfig.UserToken, "userToken", os.Getenv("PIPER_userToken"), "User token to access WhiteSource. In Jenkins use case this is automatically filled through the credentials.")
|
||||
cmd.Flags().StringVar(&stepConfig.VersioningModel, "versioningModel", `major`, "The default project versioning model used in case `projectVersion` parameter is empty for creating the version based on the build descriptor version to report results in Whitesource, can be one of `'major'`, `'major-minor'`, `'semantic'`, `'full'`")
|
||||
cmd.Flags().StringVar(&stepConfig.VulnerabilityReportFormat, "vulnerabilityReportFormat", `xlsx`, "Format of the file the vulnerability report is written to.")
|
||||
cmd.Flags().StringVar(&stepConfig.VulnerabilityReportTitle, "vulnerabilityReportTitle", `WhiteSource Security Vulnerability Report`, "Title of vulnerability report written during the assessment phase.")
|
||||
@ -302,6 +302,14 @@ func whitesourceExecuteScanMetadata() config.StepData {
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
},
|
||||
{
|
||||
Name: "customScanVersion",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
},
|
||||
{
|
||||
Name: "cvssSeverityLimit",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
@ -396,12 +404,17 @@ func whitesourceExecuteScanMetadata() config.StepData {
|
||||
Aliases: []config.Alias{{Name: "whitesourceProductToken"}, {Name: "whitesource/productToken"}},
|
||||
},
|
||||
{
|
||||
Name: "productVersion",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{{Name: "whitesourceProductVersion"}, {Name: "whitesource/productVersion"}},
|
||||
Name: "version",
|
||||
ResourceRef: []config.ResourceReference{
|
||||
{
|
||||
Name: "commonPipelineEnvironment",
|
||||
Param: "artifactVersion",
|
||||
},
|
||||
},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{{Name: "productVersion"}, {Name: "whitesourceProductVersion"}, {Name: "whitesource/productVersion"}},
|
||||
},
|
||||
{
|
||||
Name: "projectName",
|
||||
@ -419,14 +432,6 @@ func whitesourceExecuteScanMetadata() config.StepData {
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
},
|
||||
{
|
||||
Name: "reportDirectoryName",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
},
|
||||
{
|
||||
Name: "reporting",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
@ -498,6 +503,12 @@ func whitesourceExecuteScanMetadata() config.StepData {
|
||||
Name: "userTokenCredentialsId",
|
||||
Type: "secret",
|
||||
},
|
||||
|
||||
{
|
||||
Name: "",
|
||||
Paths: []string{"$(vaultPath)/whitesource", "$(vaultBasePath)/$(vaultPipelineName)/whitesource", "$(vaultBasePath)/GROUP-SECRETS/whitesource"},
|
||||
Type: "vaultSecret",
|
||||
},
|
||||
},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
@ -573,7 +584,7 @@ func whitesourceExecuteScanMetadata() config.StepData {
|
||||
Containers: []config.Container{
|
||||
{Image: "buildpack-deps:stretch-curl", WorkingDir: "/tmp", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "dub"}, {Name: "buildTool", Value: "docker"}}}}},
|
||||
{Image: "devxci/mbtci:1.0.14", WorkingDir: "/home/mta", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "mta"}}}, {ConditionRef: "strings-equal", Params: []config.Param{{Name: "scanType", Value: "mta"}}}}},
|
||||
{Image: "golang:1", WorkingDir: "/go", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "go"}}}}},
|
||||
{Image: "golang:1", WorkingDir: "/go", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "golang"}}}}},
|
||||
{Image: "hseeberger/scala-sbt:8u181_2.12.8_1.2.8", WorkingDir: "/tmp", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "sbt"}}}}},
|
||||
{Image: "maven:3.5-jdk-8", WorkingDir: "/tmp", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "maven"}}}, {ConditionRef: "strings-equal", Params: []config.Param{{Name: "scanType", Value: "maven"}}}}},
|
||||
{Image: "node:lts-stretch", WorkingDir: "/home/node", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "npm"}}}, {ConditionRef: "strings-equal", Params: []config.Param{{Name: "scanType", Value: "npm"}}}}},
|
||||
|
@ -1,25 +1,22 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/mock"
|
||||
"github.com/SAP/jenkins-library/pkg/piperutils"
|
||||
"github.com/SAP/jenkins-library/pkg/reporting"
|
||||
"github.com/SAP/jenkins-library/pkg/versioning"
|
||||
ws "github.com/SAP/jenkins-library/pkg/whitesource"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
type whitesourceCoordinatesMock struct {
|
||||
GroupID string
|
||||
ArtifactID string
|
||||
Version string
|
||||
}
|
||||
|
||||
type whitesourceUtilsMock struct {
|
||||
*ws.ScanUtilsMock
|
||||
coordinates whitesourceCoordinatesMock
|
||||
coordinates versioning.Coordinates
|
||||
usedBuildTool string
|
||||
usedBuildDescriptorFile string
|
||||
usedOptions versioning.Options
|
||||
@ -46,7 +43,7 @@ func newWhitesourceUtilsMock() *whitesourceUtilsMock {
|
||||
FilesMock: &mock.FilesMock{},
|
||||
ExecMockRunner: &mock.ExecMockRunner{},
|
||||
},
|
||||
coordinates: whitesourceCoordinatesMock{
|
||||
coordinates: versioning.Coordinates{
|
||||
GroupID: "mock-group-id",
|
||||
ArtifactID: "mock-artifact-id",
|
||||
Version: "1.0.42",
|
||||
@ -54,9 +51,158 @@ func newWhitesourceUtilsMock() *whitesourceUtilsMock {
|
||||
}
|
||||
}
|
||||
|
||||
func TestNewWhitesourceUtils(t *testing.T) {
|
||||
t.Parallel()
|
||||
config := ScanOptions{}
|
||||
utils := newWhitesourceUtils(&config)
|
||||
|
||||
assert.NotNil(t, utils.Client)
|
||||
assert.NotNil(t, utils.Command)
|
||||
assert.NotNil(t, utils.Files)
|
||||
}
|
||||
|
||||
func TestRunWhitesourceExecuteScan(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("fails for invalid configured project token", func(t *testing.T) {
|
||||
// init
|
||||
config := ScanOptions{
|
||||
ScanType: "unified-agent",
|
||||
BuildDescriptorFile: "my-mta.yml",
|
||||
VersioningModel: "major",
|
||||
ProductName: "mock-product",
|
||||
ProjectToken: "no-such-project-token",
|
||||
AgentDownloadURL: "https://whitesource.com/agent.jar",
|
||||
AgentFileName: "ua.jar",
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
utilsMock.AddFile("wss-generated-file.config", []byte("key=value"))
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
scan := newWhitesourceScan(&config)
|
||||
cpe := whitesourceExecuteScanCommonPipelineEnvironment{}
|
||||
// test
|
||||
err := runWhitesourceExecuteScan(&config, scan, utilsMock, systemMock, &cpe)
|
||||
// assert
|
||||
assert.EqualError(t, err, "no project with token 'no-such-project-token' found in Whitesource")
|
||||
assert.Equal(t, "", config.ProjectName)
|
||||
assert.Equal(t, "", scan.AggregateProjectName)
|
||||
})
|
||||
t.Run("retrieves aggregate project name by configured token", func(t *testing.T) {
|
||||
// init
|
||||
config := ScanOptions{
|
||||
BuildDescriptorFile: "my-mta.yml",
|
||||
VersioningModel: "major",
|
||||
AgentDownloadURL: "https://whitesource.com/agent.jar",
|
||||
VulnerabilityReportFormat: "pdf",
|
||||
Reporting: true,
|
||||
AgentFileName: "ua.jar",
|
||||
ProductName: "mock-product",
|
||||
ProjectToken: "mock-project-token",
|
||||
ScanType: "unified-agent",
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
utilsMock.AddFile("wss-generated-file.config", []byte("key=value"))
|
||||
lastUpdatedDate := time.Now().Format(ws.DateTimeLayout)
|
||||
systemMock := ws.NewSystemMock(lastUpdatedDate)
|
||||
systemMock.Alerts = []ws.Alert{}
|
||||
scan := newWhitesourceScan(&config)
|
||||
cpe := whitesourceExecuteScanCommonPipelineEnvironment{}
|
||||
// test
|
||||
err := runWhitesourceExecuteScan(&config, scan, utilsMock, systemMock, &cpe)
|
||||
// assert
|
||||
assert.NoError(t, err)
|
||||
// Retrieved project name is stored in scan.AggregateProjectName, but not in config.ProjectName
|
||||
// in order to differentiate between aggregate-project scanning and multi-project scanning.
|
||||
assert.Equal(t, "", config.ProjectName)
|
||||
assert.Equal(t, "mock-project", scan.AggregateProjectName)
|
||||
if assert.Len(t, utilsMock.DownloadedFiles, 1) {
|
||||
assert.Equal(t, ws.DownloadedFile{
|
||||
SourceURL: "https://whitesource.com/agent.jar",
|
||||
FilePath: "ua.jar",
|
||||
}, utilsMock.DownloadedFiles[0])
|
||||
}
|
||||
if assert.Len(t, cpe.custom.whitesourceProjectNames, 1) {
|
||||
assert.Equal(t, []string{"mock-project - 1"}, cpe.custom.whitesourceProjectNames)
|
||||
}
|
||||
assert.True(t, utilsMock.HasWrittenFile(filepath.Join(ws.ReportsDirectory, "mock-project - 1-vulnerability-report.pdf")))
|
||||
assert.True(t, utilsMock.HasWrittenFile(filepath.Join(ws.ReportsDirectory, "mock-project - 1-vulnerability-report.pdf")))
|
||||
})
|
||||
}
|
||||
|
||||
func TestCheckAndReportScanResults(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("no reports requested", func(t *testing.T) {
|
||||
// init
|
||||
config := &ScanOptions{
|
||||
ProductToken: "mock-product-token",
|
||||
ProjectToken: "mock-project-token",
|
||||
Version: "1",
|
||||
}
|
||||
scan := newWhitesourceScan(config)
|
||||
utils := newWhitesourceUtilsMock()
|
||||
system := ws.NewSystemMock(time.Now().Format(ws.DateTimeLayout))
|
||||
// test
|
||||
_, err := checkAndReportScanResults(config, scan, utils, system)
|
||||
// assert
|
||||
assert.NoError(t, err)
|
||||
vPath := filepath.Join(ws.ReportsDirectory, "mock-project-vulnerability-report.txt")
|
||||
assert.False(t, utils.HasWrittenFile(vPath))
|
||||
rPath := filepath.Join(ws.ReportsDirectory, "mock-project-risk-report.pdf")
|
||||
assert.False(t, utils.HasWrittenFile(rPath))
|
||||
})
|
||||
t.Run("check vulnerabilities - invalid limit", func(t *testing.T) {
|
||||
// init
|
||||
config := &ScanOptions{
|
||||
SecurityVulnerabilities: true,
|
||||
CvssSeverityLimit: "invalid",
|
||||
}
|
||||
scan := newWhitesourceScan(config)
|
||||
utils := newWhitesourceUtilsMock()
|
||||
system := ws.NewSystemMock(time.Now().Format(ws.DateTimeLayout))
|
||||
// test
|
||||
_, err := checkAndReportScanResults(config, scan, utils, system)
|
||||
// assert
|
||||
assert.EqualError(t, err, "failed to parse parameter cvssSeverityLimit (invalid) as floating point number: strconv.ParseFloat: parsing \"invalid\": invalid syntax")
|
||||
})
|
||||
t.Run("check vulnerabilities - limit not hit", func(t *testing.T) {
|
||||
// init
|
||||
config := &ScanOptions{
|
||||
ProductToken: "mock-product-token",
|
||||
ProjectToken: "mock-project-token",
|
||||
Version: "1",
|
||||
SecurityVulnerabilities: true,
|
||||
CvssSeverityLimit: "6.0",
|
||||
}
|
||||
scan := newWhitesourceScan(config)
|
||||
utils := newWhitesourceUtilsMock()
|
||||
system := ws.NewSystemMock(time.Now().Format(ws.DateTimeLayout))
|
||||
// test
|
||||
_, err := checkAndReportScanResults(config, scan, utils, system)
|
||||
// assert
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
t.Run("check vulnerabilities - limit exceeded", func(t *testing.T) {
|
||||
// init
|
||||
config := &ScanOptions{
|
||||
ProductToken: "mock-product-token",
|
||||
ProjectName: "mock-project - 1",
|
||||
ProjectToken: "mock-project-token",
|
||||
Version: "1",
|
||||
SecurityVulnerabilities: true,
|
||||
CvssSeverityLimit: "4",
|
||||
}
|
||||
scan := newWhitesourceScan(config)
|
||||
utils := newWhitesourceUtilsMock()
|
||||
system := ws.NewSystemMock(time.Now().Format(ws.DateTimeLayout))
|
||||
// test
|
||||
_, err := checkAndReportScanResults(config, scan, utils, system)
|
||||
// assert
|
||||
assert.EqualError(t, err, "1 Open Source Software Security vulnerabilities with CVSS score greater or equal to 4.0 detected in project mock-project - 1")
|
||||
})
|
||||
}
|
||||
|
||||
func TestResolveProjectIdentifiers(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("happy path", func(t *testing.T) {
|
||||
t.Run("success", func(t *testing.T) {
|
||||
// init
|
||||
config := ScanOptions{
|
||||
BuildTool: "mta",
|
||||
@ -75,7 +221,65 @@ func TestResolveProjectIdentifiers(t *testing.T) {
|
||||
// assert
|
||||
if assert.NoError(t, err) {
|
||||
assert.Equal(t, "mock-group-id-mock-artifact-id", scan.AggregateProjectName)
|
||||
assert.Equal(t, "1", config.ProductVersion)
|
||||
assert.Equal(t, "1", config.Version)
|
||||
assert.Equal(t, "mock-product-token", config.ProductToken)
|
||||
assert.Equal(t, "mta", utilsMock.usedBuildTool)
|
||||
assert.Equal(t, "my-mta.yml", utilsMock.usedBuildDescriptorFile)
|
||||
assert.Equal(t, "project-settings.xml", utilsMock.usedOptions.ProjectSettingsFile)
|
||||
assert.Equal(t, "global-settings.xml", utilsMock.usedOptions.GlobalSettingsFile)
|
||||
assert.Equal(t, "m2/path", utilsMock.usedOptions.M2Path)
|
||||
}
|
||||
})
|
||||
t.Run("success - with version from default", func(t *testing.T) {
|
||||
// init
|
||||
config := ScanOptions{
|
||||
BuildTool: "mta",
|
||||
BuildDescriptorFile: "my-mta.yml",
|
||||
Version: "1.2.3-20200101",
|
||||
VersioningModel: "major",
|
||||
ProductName: "mock-product",
|
||||
M2Path: "m2/path",
|
||||
ProjectSettingsFile: "project-settings.xml",
|
||||
GlobalSettingsFile: "global-settings.xml",
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
scan := newWhitesourceScan(&config)
|
||||
// test
|
||||
err := resolveProjectIdentifiers(&config, scan, utilsMock, systemMock)
|
||||
// assert
|
||||
if assert.NoError(t, err) {
|
||||
assert.Equal(t, "mock-group-id-mock-artifact-id", scan.AggregateProjectName)
|
||||
assert.Equal(t, "1", config.Version)
|
||||
assert.Equal(t, "mock-product-token", config.ProductToken)
|
||||
assert.Equal(t, "mta", utilsMock.usedBuildTool)
|
||||
assert.Equal(t, "my-mta.yml", utilsMock.usedBuildDescriptorFile)
|
||||
assert.Equal(t, "project-settings.xml", utilsMock.usedOptions.ProjectSettingsFile)
|
||||
assert.Equal(t, "global-settings.xml", utilsMock.usedOptions.GlobalSettingsFile)
|
||||
assert.Equal(t, "m2/path", utilsMock.usedOptions.M2Path)
|
||||
}
|
||||
})
|
||||
t.Run("success - with custom scan version", func(t *testing.T) {
|
||||
// init
|
||||
config := ScanOptions{
|
||||
BuildTool: "mta",
|
||||
BuildDescriptorFile: "my-mta.yml",
|
||||
CustomScanVersion: "2.3.4",
|
||||
VersioningModel: "major",
|
||||
ProductName: "mock-product",
|
||||
M2Path: "m2/path",
|
||||
ProjectSettingsFile: "project-settings.xml",
|
||||
GlobalSettingsFile: "global-settings.xml",
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
scan := newWhitesourceScan(&config)
|
||||
// test
|
||||
err := resolveProjectIdentifiers(&config, scan, utilsMock, systemMock)
|
||||
// assert
|
||||
if assert.NoError(t, err) {
|
||||
assert.Equal(t, "mock-group-id-mock-artifact-id", scan.AggregateProjectName)
|
||||
assert.Equal(t, "2.3.4", config.Version)
|
||||
assert.Equal(t, "mock-product-token", config.ProductToken)
|
||||
assert.Equal(t, "mta", utilsMock.usedBuildTool)
|
||||
assert.Equal(t, "my-mta.yml", utilsMock.usedBuildDescriptorFile)
|
||||
@ -101,7 +305,7 @@ func TestResolveProjectIdentifiers(t *testing.T) {
|
||||
// assert
|
||||
if assert.NoError(t, err) {
|
||||
assert.Equal(t, "mock-project", scan.AggregateProjectName)
|
||||
assert.Equal(t, "1", config.ProductVersion)
|
||||
assert.Equal(t, "1", config.Version)
|
||||
assert.Equal(t, "mock-product-token", config.ProductToken)
|
||||
assert.Equal(t, "mta", utilsMock.usedBuildTool)
|
||||
assert.Equal(t, "my-mta.yml", utilsMock.usedBuildDescriptorFile)
|
||||
@ -145,118 +349,359 @@ func TestResolveProjectIdentifiers(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestRunWhitesourceExecuteScan(t *testing.T) {
|
||||
func TestCheckPolicyViolations(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("fails for invalid configured project token", func(t *testing.T) {
|
||||
// init
|
||||
config := ScanOptions{
|
||||
ScanType: "unified-agent",
|
||||
BuildDescriptorFile: "my-mta.yml",
|
||||
VersioningModel: "major",
|
||||
ProductName: "mock-product",
|
||||
ProjectToken: "no-such-project-token",
|
||||
AgentDownloadURL: "https://whitesource.com/agent.jar",
|
||||
AgentFileName: "ua.jar",
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
utilsMock.AddFile("wss-generated-file.config", []byte("key=value"))
|
||||
|
||||
t.Run("success - no violations", func(t *testing.T) {
|
||||
config := ScanOptions{}
|
||||
scan := newWhitesourceScan(&config)
|
||||
scan.AppendScannedProject("testProject1")
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
scan := newWhitesourceScan(&config)
|
||||
cpe := whitesourceExecuteScanCommonPipelineEnvironment{}
|
||||
// test
|
||||
err := runWhitesourceExecuteScan(&config, scan, utilsMock, systemMock, &cpe)
|
||||
// assert
|
||||
assert.EqualError(t, err, "no project with token 'no-such-project-token' found in Whitesource")
|
||||
assert.Equal(t, "", config.ProjectName)
|
||||
assert.Equal(t, "", scan.AggregateProjectName)
|
||||
systemMock.Alerts = []ws.Alert{}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
reportPaths := []piperutils.Path{
|
||||
{Target: filepath.Join("whitesource", "report1.pdf")},
|
||||
{Target: filepath.Join("whitesource", "report2.pdf")},
|
||||
}
|
||||
|
||||
path, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, filepath.Join(ws.ReportsDirectory, "whitesource-ip.json"), path.Target)
|
||||
|
||||
fileContent, _ := utilsMock.FileRead(path.Target)
|
||||
content := string(fileContent)
|
||||
assert.Contains(t, content, `"policyViolations":0`)
|
||||
assert.Contains(t, content, `"reports":["report1.pdf","report2.pdf"]`)
|
||||
})
|
||||
t.Run("retrieves aggregate project name by configured token", func(t *testing.T) {
|
||||
// init
|
||||
config := ScanOptions{
|
||||
BuildDescriptorFile: "my-mta.yml",
|
||||
VersioningModel: "major",
|
||||
AgentDownloadURL: "https://whitesource.com/agent.jar",
|
||||
ReportDirectoryName: "ws-reports",
|
||||
VulnerabilityReportFormat: "pdf",
|
||||
Reporting: true,
|
||||
AgentFileName: "ua.jar",
|
||||
ProductName: "mock-product",
|
||||
ProjectToken: "mock-project-token",
|
||||
ScanType: "unified-agent",
|
||||
|
||||
t.Run("success - no reports", func(t *testing.T) {
|
||||
config := ScanOptions{}
|
||||
scan := newWhitesourceScan(&config)
|
||||
scan.AppendScannedProject("testProject1")
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
systemMock.Alerts = []ws.Alert{}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
reportPaths := []piperutils.Path{}
|
||||
|
||||
path, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths)
|
||||
assert.NoError(t, err)
|
||||
|
||||
fileContent, _ := utilsMock.FileRead(path.Target)
|
||||
content := string(fileContent)
|
||||
assert.Contains(t, content, `reports":[]`)
|
||||
})
|
||||
|
||||
t.Run("error - policy violations", func(t *testing.T) {
|
||||
config := ScanOptions{}
|
||||
scan := newWhitesourceScan(&config)
|
||||
scan.AppendScannedProject("testProject1")
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
systemMock.Alerts = []ws.Alert{
|
||||
{Vulnerability: ws.Vulnerability{Name: "policyVul1"}},
|
||||
{Vulnerability: ws.Vulnerability{Name: "policyVul2"}},
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
utilsMock.AddFile("wss-generated-file.config", []byte("key=value"))
|
||||
lastUpdatedDate := time.Now().Format(ws.DateTimeLayout)
|
||||
systemMock := ws.NewSystemMock(lastUpdatedDate)
|
||||
reportPaths := []piperutils.Path{
|
||||
{Target: "report1.pdf"},
|
||||
{Target: "report2.pdf"},
|
||||
}
|
||||
|
||||
path, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths)
|
||||
assert.Contains(t, fmt.Sprint(err), "2 policy violation(s) found")
|
||||
|
||||
fileContent, _ := utilsMock.FileRead(path.Target)
|
||||
content := string(fileContent)
|
||||
assert.Contains(t, content, `"policyViolations":2`)
|
||||
assert.Contains(t, content, `"reports":["report1.pdf","report2.pdf"]`)
|
||||
})
|
||||
|
||||
t.Run("error - get alerts", func(t *testing.T) {
|
||||
config := ScanOptions{}
|
||||
scan := newWhitesourceScan(&config)
|
||||
cpe := whitesourceExecuteScanCommonPipelineEnvironment{}
|
||||
// test
|
||||
err := runWhitesourceExecuteScan(&config, scan, utilsMock, systemMock, &cpe)
|
||||
// assert
|
||||
assert.NoError(t, err)
|
||||
// Retrieved project name is stored in scan.AggregateProjectName, but not in config.ProjectName
|
||||
// in order to differentiate between aggregate-project scanning and multi-project scanning.
|
||||
assert.Equal(t, "", config.ProjectName)
|
||||
assert.Equal(t, "mock-project", scan.AggregateProjectName)
|
||||
if assert.Len(t, utilsMock.DownloadedFiles, 1) {
|
||||
assert.Equal(t, ws.DownloadedFile{
|
||||
SourceURL: "https://whitesource.com/agent.jar",
|
||||
FilePath: "ua.jar",
|
||||
}, utilsMock.DownloadedFiles[0])
|
||||
}
|
||||
if assert.Len(t, cpe.custom.whitesourceProjectNames, 1) {
|
||||
assert.Equal(t, []string{"mock-project - 1"}, cpe.custom.whitesourceProjectNames)
|
||||
}
|
||||
assert.True(t, utilsMock.HasWrittenFile("ws-reports/mock-project - 1-vulnerability-report.pdf"))
|
||||
assert.True(t, utilsMock.HasWrittenFile("ws-reports/mock-project - 1-risk-report.pdf"))
|
||||
scan.AppendScannedProject("testProject1")
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
systemMock.AlertError = fmt.Errorf("failed to read alerts")
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
reportPaths := []piperutils.Path{}
|
||||
|
||||
_, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths)
|
||||
assert.Contains(t, fmt.Sprint(err), "failed to retrieve project policy alerts from WhiteSource")
|
||||
})
|
||||
|
||||
t.Run("error - write file", func(t *testing.T) {
|
||||
config := ScanOptions{}
|
||||
scan := newWhitesourceScan(&config)
|
||||
scan.AppendScannedProject("testProject1")
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
systemMock.Alerts = []ws.Alert{}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
utilsMock.FileWriteError = fmt.Errorf("failed to write file")
|
||||
reportPaths := []piperutils.Path{}
|
||||
|
||||
_, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths)
|
||||
assert.Contains(t, fmt.Sprint(err), "failed to write policy violation report:")
|
||||
})
|
||||
}
|
||||
|
||||
func TestPersistScannedProjects(t *testing.T) {
|
||||
func TestCheckSecurityViolations(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("write 1 scanned projects", func(t *testing.T) {
|
||||
// init
|
||||
cpe := whitesourceExecuteScanCommonPipelineEnvironment{}
|
||||
config := &ScanOptions{ProductVersion: "1"}
|
||||
scan := newWhitesourceScan(config)
|
||||
_ = scan.AppendScannedProject("project")
|
||||
// test
|
||||
persistScannedProjects(config, scan, &cpe)
|
||||
// assert
|
||||
assert.Equal(t, []string{"project - 1"}, cpe.custom.whitesourceProjectNames)
|
||||
|
||||
t.Run("success - non-aggregated", func(t *testing.T) {
|
||||
config := ScanOptions{
|
||||
CvssSeverityLimit: "7",
|
||||
}
|
||||
scan := newWhitesourceScan(&config)
|
||||
scan.AppendScannedProject("testProject1")
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
systemMock.Alerts = []ws.Alert{
|
||||
{Vulnerability: ws.Vulnerability{Name: "vul1", CVSS3Score: 6.0}},
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
|
||||
reportPaths, err := checkSecurityViolations(&config, scan, systemMock, utilsMock)
|
||||
assert.NoError(t, err)
|
||||
fileContent, err := utilsMock.FileRead(reportPaths[0].Target)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, len(fileContent) > 0)
|
||||
})
|
||||
t.Run("write 2 scanned projects", func(t *testing.T) {
|
||||
// init
|
||||
cpe := whitesourceExecuteScanCommonPipelineEnvironment{}
|
||||
config := &ScanOptions{ProductVersion: "1"}
|
||||
scan := newWhitesourceScan(config)
|
||||
_ = scan.AppendScannedProject("project-app")
|
||||
_ = scan.AppendScannedProject("project-db")
|
||||
// test
|
||||
persistScannedProjects(config, scan, &cpe)
|
||||
// assert
|
||||
assert.Equal(t, []string{"project-app - 1", "project-db - 1"}, cpe.custom.whitesourceProjectNames)
|
||||
|
||||
t.Run("success - aggregated", func(t *testing.T) {
|
||||
config := ScanOptions{
|
||||
CvssSeverityLimit: "7",
|
||||
ProjectToken: "theProjectToken",
|
||||
}
|
||||
scan := newWhitesourceScan(&config)
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
systemMock.Alerts = []ws.Alert{
|
||||
{Vulnerability: ws.Vulnerability{Name: "vul1", CVSS3Score: 6.0}},
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
|
||||
reportPaths, err := checkSecurityViolations(&config, scan, systemMock, utilsMock)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 0, len(reportPaths))
|
||||
})
|
||||
t.Run("write no projects", func(t *testing.T) {
|
||||
// init
|
||||
cpe := whitesourceExecuteScanCommonPipelineEnvironment{}
|
||||
config := &ScanOptions{ProductVersion: "1"}
|
||||
scan := newWhitesourceScan(config)
|
||||
// test
|
||||
persistScannedProjects(config, scan, &cpe)
|
||||
// assert
|
||||
assert.Equal(t, []string{}, cpe.custom.whitesourceProjectNames)
|
||||
|
||||
t.Run("error - wrong limit", func(t *testing.T) {
|
||||
config := ScanOptions{CvssSeverityLimit: "x"}
|
||||
scan := newWhitesourceScan(&config)
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
|
||||
_, err := checkSecurityViolations(&config, scan, systemMock, utilsMock)
|
||||
assert.Contains(t, fmt.Sprint(err), "failed to parse parameter cvssSeverityLimit")
|
||||
|
||||
})
|
||||
t.Run("write aggregated project", func(t *testing.T) {
|
||||
// init
|
||||
cpe := whitesourceExecuteScanCommonPipelineEnvironment{}
|
||||
config := &ScanOptions{ProjectName: "project", ProductVersion: "1"}
|
||||
scan := newWhitesourceScan(config)
|
||||
// test
|
||||
persistScannedProjects(config, scan, &cpe)
|
||||
// assert
|
||||
assert.Equal(t, []string{"project - 1"}, cpe.custom.whitesourceProjectNames)
|
||||
|
||||
t.Run("error - non-aggregated", func(t *testing.T) {
|
||||
config := ScanOptions{
|
||||
CvssSeverityLimit: "5",
|
||||
}
|
||||
scan := newWhitesourceScan(&config)
|
||||
scan.AppendScannedProject("testProject1")
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
systemMock.Alerts = []ws.Alert{
|
||||
{Vulnerability: ws.Vulnerability{Name: "vul1", CVSS3Score: 6.0}},
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
|
||||
reportPaths, err := checkSecurityViolations(&config, scan, systemMock, utilsMock)
|
||||
assert.Contains(t, fmt.Sprint(err), "1 Open Source Software Security vulnerabilities")
|
||||
fileContent, err := utilsMock.FileRead(reportPaths[0].Target)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, len(fileContent) > 0)
|
||||
})
|
||||
|
||||
t.Run("error - aggregated", func(t *testing.T) {
|
||||
config := ScanOptions{
|
||||
CvssSeverityLimit: "5",
|
||||
ProjectToken: "theProjectToken",
|
||||
}
|
||||
scan := newWhitesourceScan(&config)
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
systemMock.Alerts = []ws.Alert{
|
||||
{Vulnerability: ws.Vulnerability{Name: "vul1", CVSS3Score: 6.0}},
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
|
||||
reportPaths, err := checkSecurityViolations(&config, scan, systemMock, utilsMock)
|
||||
assert.Contains(t, fmt.Sprint(err), "1 Open Source Software Security vulnerabilities")
|
||||
assert.Equal(t, 0, len(reportPaths))
|
||||
})
|
||||
}
|
||||
|
||||
func TestCheckProjectSecurityViolations(t *testing.T) {
|
||||
project := ws.Project{Name: "testProject - 1", Token: "testToken"}
|
||||
|
||||
t.Run("success - no alerts", func(t *testing.T) {
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
systemMock.Alerts = []ws.Alert{}
|
||||
|
||||
severeVulnerabilities, alerts, err := checkProjectSecurityViolations(7.0, project, systemMock)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 0, severeVulnerabilities)
|
||||
assert.Equal(t, 0, len(alerts))
|
||||
})
|
||||
|
||||
t.Run("error - some vulnerabilities", func(t *testing.T) {
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
systemMock.Alerts = []ws.Alert{
|
||||
{Vulnerability: ws.Vulnerability{CVSS3Score: 7}},
|
||||
{Vulnerability: ws.Vulnerability{CVSS3Score: 6}},
|
||||
}
|
||||
|
||||
severeVulnerabilities, alerts, err := checkProjectSecurityViolations(7.0, project, systemMock)
|
||||
assert.Contains(t, fmt.Sprint(err), "1 Open Source Software Security vulnerabilities")
|
||||
assert.Equal(t, 1, severeVulnerabilities)
|
||||
assert.Equal(t, 2, len(alerts))
|
||||
})
|
||||
|
||||
t.Run("error - WhiteSource failure", func(t *testing.T) {
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
systemMock.AlertError = fmt.Errorf("failed to read alerts")
|
||||
_, _, err := checkProjectSecurityViolations(7.0, project, systemMock)
|
||||
assert.Contains(t, fmt.Sprint(err), "failed to retrieve project alerts from WhiteSource")
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func TestCountSecurityVulnerabilities(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
alerts := []ws.Alert{
|
||||
{Vulnerability: ws.Vulnerability{CVSS3Score: 7.1}},
|
||||
{Vulnerability: ws.Vulnerability{CVSS3Score: 7}},
|
||||
{Vulnerability: ws.Vulnerability{CVSS3Score: 6}},
|
||||
}
|
||||
|
||||
severe, nonSevere := countSecurityVulnerabilities(&alerts, 7.0)
|
||||
assert.Equal(t, 2, severe)
|
||||
assert.Equal(t, 1, nonSevere)
|
||||
}
|
||||
|
||||
func TestIsSevereVulnerability(t *testing.T) {
|
||||
tt := []struct {
|
||||
alert ws.Alert
|
||||
limit float64
|
||||
expected bool
|
||||
}{
|
||||
{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 0}}, limit: 0, expected: true},
|
||||
{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 6.9, Score: 6}}, limit: 7.0, expected: false},
|
||||
{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 7.0, Score: 6}}, limit: 7.0, expected: true},
|
||||
{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 7.1, Score: 6}}, limit: 7.0, expected: true},
|
||||
{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 6, Score: 6.9}}, limit: 7.0, expected: false},
|
||||
{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 6, Score: 7.0}}, limit: 7.0, expected: false},
|
||||
{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 6, Score: 7.1}}, limit: 7.0, expected: false},
|
||||
{alert: ws.Alert{Vulnerability: ws.Vulnerability{Score: 6.9}}, limit: 7.0, expected: false},
|
||||
{alert: ws.Alert{Vulnerability: ws.Vulnerability{Score: 7.0}}, limit: 7.0, expected: true},
|
||||
{alert: ws.Alert{Vulnerability: ws.Vulnerability{Score: 7.1}}, limit: 7.0, expected: true},
|
||||
}
|
||||
|
||||
for i, test := range tt {
|
||||
assert.Equalf(t, test.expected, isSevereVulnerability(test.alert, test.limit), "run %v failed", i)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateCustomVulnerabilityReport(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("success case", func(t *testing.T) {
|
||||
config := &ScanOptions{}
|
||||
scan := newWhitesourceScan(config)
|
||||
scan.AppendScannedProject("testProject")
|
||||
alerts := []ws.Alert{
|
||||
{Library: ws.Library{Filename: "vul1"}, Vulnerability: ws.Vulnerability{CVSS3Score: 7.0, Score: 6}},
|
||||
{Library: ws.Library{Filename: "vul2"}, Vulnerability: ws.Vulnerability{CVSS3Score: 8.0, TopFix: ws.Fix{Message: "this is the top fix"}}},
|
||||
{Library: ws.Library{Filename: "vul3"}, Vulnerability: ws.Vulnerability{Score: 6}},
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
|
||||
scanReport := createCustomVulnerabilityReport(config, scan, alerts, 7.0, utilsMock)
|
||||
|
||||
assert.Equal(t, "WhiteSource Security Vulnerability Report", scanReport.Title)
|
||||
assert.Equal(t, 3, len(scanReport.DetailTable.Rows))
|
||||
|
||||
// assert that library info is filled and sorting has been executed
|
||||
assert.Equal(t, "vul2", scanReport.DetailTable.Rows[0].Columns[5].Content)
|
||||
assert.Equal(t, "vul1", scanReport.DetailTable.Rows[1].Columns[5].Content)
|
||||
assert.Equal(t, "vul3", scanReport.DetailTable.Rows[2].Columns[5].Content)
|
||||
|
||||
// assert that CVSS version identification has been done
|
||||
assert.Equal(t, "v3", scanReport.DetailTable.Rows[0].Columns[3].Content)
|
||||
assert.Equal(t, "v3", scanReport.DetailTable.Rows[1].Columns[3].Content)
|
||||
assert.Equal(t, "v2", scanReport.DetailTable.Rows[2].Columns[3].Content)
|
||||
|
||||
// assert proper rating and styling of high prio issues
|
||||
assert.Equal(t, "8", scanReport.DetailTable.Rows[0].Columns[2].Content)
|
||||
assert.Equal(t, "7", scanReport.DetailTable.Rows[1].Columns[2].Content)
|
||||
assert.Equal(t, "6", scanReport.DetailTable.Rows[2].Columns[2].Content)
|
||||
assert.Equal(t, "red-cell", scanReport.DetailTable.Rows[0].Columns[2].Style.String())
|
||||
assert.Equal(t, "red-cell", scanReport.DetailTable.Rows[1].Columns[2].Style.String())
|
||||
assert.Equal(t, "yellow-cell", scanReport.DetailTable.Rows[2].Columns[2].Style.String())
|
||||
|
||||
assert.Contains(t, scanReport.DetailTable.Rows[0].Columns[10].Content, "this is the top fix")
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
func TestWriteCustomVulnerabilityReports(t *testing.T) {
|
||||
|
||||
t.Run("success", func(t *testing.T) {
|
||||
scanReport := reporting.ScanReport{}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
|
||||
reportPaths, err := writeCustomVulnerabilityReports(scanReport, utilsMock)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1, len(reportPaths))
|
||||
|
||||
exists, err := utilsMock.FileExists(reportPaths[0].Target)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, exists)
|
||||
|
||||
exists, err = utilsMock.FileExists(filepath.Join(reporting.MarkdownReportDirectory, "whitesourceExecuteScan_20100510001542.md"))
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, exists)
|
||||
})
|
||||
|
||||
t.Run("failed to write HTML report", func(t *testing.T) {
|
||||
scanReport := reporting.ScanReport{}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
utilsMock.FileWriteErrors = map[string]error{
|
||||
filepath.Join(ws.ReportsDirectory, "piper_whitesource_vulnerability_report.html"): fmt.Errorf("write error"),
|
||||
}
|
||||
|
||||
_, err := writeCustomVulnerabilityReports(scanReport, utilsMock)
|
||||
assert.Contains(t, fmt.Sprint(err), "failed to write html report")
|
||||
})
|
||||
|
||||
t.Run("failed to write markdown report", func(t *testing.T) {
|
||||
scanReport := reporting.ScanReport{}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
utilsMock.FileWriteErrors = map[string]error{
|
||||
filepath.Join(reporting.MarkdownReportDirectory, "whitesourceExecuteScan_20100510001542.md"): fmt.Errorf("write error"),
|
||||
}
|
||||
|
||||
_, err := writeCustomVulnerabilityReports(scanReport, utilsMock)
|
||||
assert.Contains(t, fmt.Sprint(err), "failed to write markdown report")
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func TestVulnerabilityScore(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tt := []struct {
|
||||
alert ws.Alert
|
||||
expected float64
|
||||
}{
|
||||
{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 7.0, Score: 6}}, expected: 7.0},
|
||||
{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 7.0}}, expected: 7.0},
|
||||
{alert: ws.Alert{Vulnerability: ws.Vulnerability{Score: 6}}, expected: 6},
|
||||
}
|
||||
for i, test := range tt {
|
||||
assert.Equalf(t, test.expected, vulnerabilityScore(test.alert), "run %v failed", i)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAggregateVersionWideLibraries(t *testing.T) {
|
||||
@ -264,16 +709,15 @@ func TestAggregateVersionWideLibraries(t *testing.T) {
|
||||
t.Run("happy path", func(t *testing.T) {
|
||||
// init
|
||||
config := &ScanOptions{
|
||||
ProductToken: "mock-product-token",
|
||||
ProductVersion: "1",
|
||||
ReportDirectoryName: "mock-reports",
|
||||
ProductToken: "mock-product-token",
|
||||
Version: "1",
|
||||
}
|
||||
utils := newWhitesourceUtilsMock()
|
||||
system := ws.NewSystemMock("2010-05-30 00:15:00 +0100")
|
||||
// test
|
||||
err := aggregateVersionWideLibraries(config, utils, system)
|
||||
// assert
|
||||
resource := filepath.Join("mock-reports", "libraries-20100510-001542.csv")
|
||||
resource := filepath.Join(ws.ReportsDirectory, "libraries-20100510-001542.csv")
|
||||
if assert.NoError(t, err) && assert.True(t, utils.HasWrittenFile(resource)) {
|
||||
contents, _ := utils.FileRead(resource)
|
||||
asString := string(contents)
|
||||
@ -287,100 +731,71 @@ func TestAggregateVersionWideVulnerabilities(t *testing.T) {
|
||||
t.Run("happy path", func(t *testing.T) {
|
||||
// init
|
||||
config := &ScanOptions{
|
||||
ProductToken: "mock-product-token",
|
||||
ProductVersion: "1",
|
||||
ReportDirectoryName: "mock-reports",
|
||||
ProductToken: "mock-product-token",
|
||||
Version: "1",
|
||||
}
|
||||
utils := newWhitesourceUtilsMock()
|
||||
system := ws.NewSystemMock("2010-05-30 00:15:00 +0100")
|
||||
// test
|
||||
err := aggregateVersionWideVulnerabilities(config, utils, system)
|
||||
// assert
|
||||
resource := filepath.Join("mock-reports", "project-names-aggregated.txt")
|
||||
resource := filepath.Join(ws.ReportsDirectory, "project-names-aggregated.txt")
|
||||
assert.NoError(t, err)
|
||||
if assert.True(t, utils.HasWrittenFile(resource)) {
|
||||
contents, _ := utils.FileRead(resource)
|
||||
asString := string(contents)
|
||||
assert.Equal(t, "mock-project - 1\n", asString)
|
||||
}
|
||||
reportSheet := filepath.Join("mock-reports", "vulnerabilities-20100510-001542.xlsx")
|
||||
reportSheet := filepath.Join(ws.ReportsDirectory, "vulnerabilities-20100510-001542.xlsx")
|
||||
sheetContents, err := utils.FileRead(reportSheet)
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, sheetContents)
|
||||
})
|
||||
}
|
||||
|
||||
func TestCheckAndReportScanResults(t *testing.T) {
|
||||
func TestPersistScannedProjects(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("no reports requested", func(t *testing.T) {
|
||||
t.Run("write 1 scanned projects", func(t *testing.T) {
|
||||
// init
|
||||
config := &ScanOptions{
|
||||
ProductToken: "mock-product-token",
|
||||
ProjectToken: "mock-project-token",
|
||||
ProductVersion: "1",
|
||||
ReportDirectoryName: "mock-reports",
|
||||
}
|
||||
cpe := whitesourceExecuteScanCommonPipelineEnvironment{}
|
||||
config := &ScanOptions{Version: "1"}
|
||||
scan := newWhitesourceScan(config)
|
||||
utils := newWhitesourceUtilsMock()
|
||||
system := ws.NewSystemMock(time.Now().Format(ws.DateTimeLayout))
|
||||
_ = scan.AppendScannedProject("project")
|
||||
// test
|
||||
err := checkAndReportScanResults(config, scan, utils, system)
|
||||
persistScannedProjects(config, scan, &cpe)
|
||||
// assert
|
||||
assert.NoError(t, err)
|
||||
vPath := filepath.Join("mock-reports", "mock-project-vulnerability-report.txt")
|
||||
assert.False(t, utils.HasWrittenFile(vPath))
|
||||
rPath := filepath.Join("mock-reports", "mock-project-risk-report.pdf")
|
||||
assert.False(t, utils.HasWrittenFile(rPath))
|
||||
assert.Equal(t, []string{"project - 1"}, cpe.custom.whitesourceProjectNames)
|
||||
})
|
||||
t.Run("check vulnerabilities - invalid limit", func(t *testing.T) {
|
||||
t.Run("write 2 scanned projects", func(t *testing.T) {
|
||||
// init
|
||||
config := &ScanOptions{
|
||||
SecurityVulnerabilities: true,
|
||||
CvssSeverityLimit: "invalid",
|
||||
}
|
||||
cpe := whitesourceExecuteScanCommonPipelineEnvironment{}
|
||||
config := &ScanOptions{Version: "1"}
|
||||
scan := newWhitesourceScan(config)
|
||||
utils := newWhitesourceUtilsMock()
|
||||
system := ws.NewSystemMock(time.Now().Format(ws.DateTimeLayout))
|
||||
_ = scan.AppendScannedProject("project-app")
|
||||
_ = scan.AppendScannedProject("project-db")
|
||||
// test
|
||||
err := checkAndReportScanResults(config, scan, utils, system)
|
||||
persistScannedProjects(config, scan, &cpe)
|
||||
// assert
|
||||
assert.EqualError(t, err, "failed to parse parameter cvssSeverityLimit (invalid) as floating point number: strconv.ParseFloat: parsing \"invalid\": invalid syntax")
|
||||
assert.Equal(t, []string{"project-app - 1", "project-db - 1"}, cpe.custom.whitesourceProjectNames)
|
||||
})
|
||||
t.Run("check vulnerabilities - limit not hit", func(t *testing.T) {
|
||||
t.Run("write no projects", func(t *testing.T) {
|
||||
// init
|
||||
config := &ScanOptions{
|
||||
ProductToken: "mock-product-token",
|
||||
ProjectToken: "mock-project-token",
|
||||
ProductVersion: "1",
|
||||
ReportDirectoryName: "mock-reports",
|
||||
SecurityVulnerabilities: true,
|
||||
CvssSeverityLimit: "6.0",
|
||||
}
|
||||
cpe := whitesourceExecuteScanCommonPipelineEnvironment{}
|
||||
config := &ScanOptions{Version: "1"}
|
||||
scan := newWhitesourceScan(config)
|
||||
utils := newWhitesourceUtilsMock()
|
||||
system := ws.NewSystemMock(time.Now().Format(ws.DateTimeLayout))
|
||||
// test
|
||||
err := checkAndReportScanResults(config, scan, utils, system)
|
||||
persistScannedProjects(config, scan, &cpe)
|
||||
// assert
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []string{}, cpe.custom.whitesourceProjectNames)
|
||||
})
|
||||
t.Run("check vulnerabilities - limit exceeded", func(t *testing.T) {
|
||||
t.Run("write aggregated project", func(t *testing.T) {
|
||||
// init
|
||||
config := &ScanOptions{
|
||||
ProductToken: "mock-product-token",
|
||||
ProjectName: "mock-project - 1",
|
||||
ProjectToken: "mock-project-token",
|
||||
ProductVersion: "1",
|
||||
ReportDirectoryName: "mock-reports",
|
||||
SecurityVulnerabilities: true,
|
||||
CvssSeverityLimit: "4",
|
||||
}
|
||||
cpe := whitesourceExecuteScanCommonPipelineEnvironment{}
|
||||
config := &ScanOptions{ProjectName: "project", Version: "1"}
|
||||
scan := newWhitesourceScan(config)
|
||||
utils := newWhitesourceUtilsMock()
|
||||
system := ws.NewSystemMock(time.Now().Format(ws.DateTimeLayout))
|
||||
// test
|
||||
err := checkAndReportScanResults(config, scan, utils, system)
|
||||
persistScannedProjects(config, scan, &cpe)
|
||||
// assert
|
||||
assert.EqualError(t, err, "1 Open Source Software Security vulnerabilities with CVSS score greater or equal to 4.0 detected in project mock-project - 1")
|
||||
assert.Equal(t, []string{"project - 1"}, cpe.custom.whitesourceProjectNames)
|
||||
})
|
||||
}
|
||||
|
@ -13,8 +13,8 @@ import (
|
||||
type ScanReport struct {
|
||||
StepName string `json:"stepName"`
|
||||
Title string `json:"title"`
|
||||
Subheaders []string `json:"subheaders"`
|
||||
Overview []string `json:"overview"`
|
||||
Subheaders []Subheader `json:"subheaders"`
|
||||
Overview []OverviewRow `json:"overview"`
|
||||
FurtherInfo string `json:"furtherInfo"`
|
||||
ReportTime time.Time `json:"reportTime"`
|
||||
DetailTable ScanDetailTable `json:"detailTable"`
|
||||
@ -35,6 +35,14 @@ type ScanRow struct {
|
||||
Columns []ScanCell `json:"columns"`
|
||||
}
|
||||
|
||||
// AddColumn adds a column to a dedicated ScanRow
|
||||
func (s *ScanRow) AddColumn(content interface{}, style ColumnStyle) {
|
||||
if s.Columns == nil {
|
||||
s.Columns = []ScanCell{}
|
||||
}
|
||||
s.Columns = append(s.Columns, ScanCell{Content: fmt.Sprint(content), Style: style})
|
||||
}
|
||||
|
||||
// ScanCell defines one column of a scan result table
|
||||
type ScanCell struct {
|
||||
Content string `json:"content"`
|
||||
@ -57,6 +65,28 @@ func (c ColumnStyle) String() string {
|
||||
return [...]string{"", "green-cell", "yellow-cell", "red-cell", "grey-cell", "black-cell"}[c]
|
||||
}
|
||||
|
||||
// OverviewRow defines a row in the report's overview section
|
||||
// it can consist of a description and some details where the details can have a style attached
|
||||
type OverviewRow struct {
|
||||
Description string `json:"description"`
|
||||
Details string `json:"details,omitempty"`
|
||||
Style ColumnStyle `json:"style,omitempty"`
|
||||
}
|
||||
|
||||
// Subheader defines a dedicated sub header in a report
|
||||
type Subheader struct {
|
||||
Description string `json:"text"`
|
||||
Details string `json:"details,omitempty"`
|
||||
}
|
||||
|
||||
// AddSubHeader adds a sub header to the report containing of a text/title plus optional details
|
||||
func (s *ScanReport) AddSubHeader(header, details string) {
|
||||
s.Subheaders = append(s.Subheaders, Subheader{Description: header, Details: details})
|
||||
}
|
||||
|
||||
// MarkdownReportDirectory specifies the default directory for markdown reports which can later be collected by step pipelineCreateSummary
|
||||
const MarkdownReportDirectory = ".pipeline/stepReports"
|
||||
|
||||
const reportHTMLTemplate = `<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
@ -125,14 +155,14 @@ const reportHTMLTemplate = `<!DOCTYPE html>
|
||||
<h2>
|
||||
<span>
|
||||
{{range $s := .Subheaders}}
|
||||
{{- $s}}<br />
|
||||
{{- $s.Description}}: {{$s.Details}}<br />
|
||||
{{end -}}
|
||||
</span>
|
||||
</h2>
|
||||
<div>
|
||||
<h3>
|
||||
{{range $o := .Overview}}
|
||||
{{- $o}}<br />
|
||||
{{- drawOverviewRow $o}}<br />
|
||||
{{end -}}
|
||||
</h3>
|
||||
<span>{{.FurtherInfo}}</span>
|
||||
@ -169,8 +199,9 @@ func (s *ScanReport) ToHTML() ([]byte, error) {
|
||||
"reportTime": func(currentTime time.Time) string {
|
||||
return currentTime.Format("Jan 02, 2006 - 15:04:05 MST")
|
||||
},
|
||||
"columnCount": tableColumnCount,
|
||||
"drawCell": drawCell,
|
||||
"columnCount": tableColumnCount,
|
||||
"drawCell": drawCell,
|
||||
"drawOverviewRow": drawOverviewRow,
|
||||
}
|
||||
report := []byte{}
|
||||
tmpl, err := template.New("report").Funcs(funcMap).Parse(reportHTMLTemplate)
|
||||
@ -185,26 +216,42 @@ func (s *ScanReport) ToHTML() ([]byte, error) {
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
const reportMdTemplate = `<details><summary>{{.Title}}</summary>
|
||||
<p>
|
||||
|
||||
{{range $s := .Subheaders}}
|
||||
**{{- $s.Description}}**: {{$s.Details}}
|
||||
{{end}}
|
||||
|
||||
{{range $o := .Overview}}
|
||||
{{- drawOverviewRow $o}}
|
||||
{{end}}
|
||||
|
||||
{{.FurtherInfo}}
|
||||
|
||||
Snapshot taken: _{{reportTime .ReportTime}}_
|
||||
</p>
|
||||
</details>`
|
||||
|
||||
// ToMarkdown creates a markdown version of the report content
|
||||
func (s *ScanReport) ToMarkdown() []byte {
|
||||
//ToDo: create collapsible markdown?
|
||||
/*
|
||||
## collapsible markdown?
|
||||
|
||||
<details><summary>CLICK ME</summary>
|
||||
<p>
|
||||
|
||||
#### yes, even hidden code blocks!
|
||||
|
||||
```python
|
||||
print("hello world!")
|
||||
```
|
||||
|
||||
</p>
|
||||
</details>
|
||||
*/
|
||||
|
||||
return []byte(fmt.Sprintf("<summary>%v</summary>", s.Title))
|
||||
func (s *ScanReport) ToMarkdown() ([]byte, error) {
|
||||
funcMap := template.FuncMap{
|
||||
"reportTime": func(currentTime time.Time) string {
|
||||
return currentTime.Format("Jan 02, 2006 - 15:04:05 MST")
|
||||
},
|
||||
"drawOverviewRow": drawOverviewRowMarkdown,
|
||||
}
|
||||
report := []byte{}
|
||||
tmpl, err := template.New("report").Funcs(funcMap).Parse(reportMdTemplate)
|
||||
if err != nil {
|
||||
return report, errors.Wrap(err, "failed to create Markdown report template")
|
||||
}
|
||||
buf := new(bytes.Buffer)
|
||||
err = tmpl.Execute(buf, s)
|
||||
if err != nil {
|
||||
return report, errors.Wrap(err, "failed to execute Markdown report template")
|
||||
}
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
func tableColumnCount(scanDetails ScanDetailTable) int {
|
||||
@ -221,3 +268,21 @@ func drawCell(cell ScanCell) string {
|
||||
}
|
||||
return fmt.Sprintf(`<td>%v</td>`, cell.Content)
|
||||
}
|
||||
|
||||
func drawOverviewRow(row OverviewRow) string {
|
||||
// so far accept only accept max. two columns for overview table: description and content
|
||||
if len(row.Details) == 0 {
|
||||
return row.Description
|
||||
}
|
||||
// ToDo: allow styling of details
|
||||
return fmt.Sprintf("%v: %v", row.Description, row.Details)
|
||||
}
|
||||
|
||||
func drawOverviewRowMarkdown(row OverviewRow) string {
|
||||
// so far accept only accept max. two columns for overview table: description and content
|
||||
if len(row.Details) == 0 {
|
||||
return row.Description
|
||||
}
|
||||
// ToDo: allow styling of details
|
||||
return fmt.Sprintf("**%v**: %v", row.Description, row.Details)
|
||||
}
|
||||
|
@ -10,9 +10,12 @@ import (
|
||||
func TestToHTML(t *testing.T) {
|
||||
t.Run("empty table", func(t *testing.T) {
|
||||
report := ScanReport{
|
||||
Title: "Report Test Title",
|
||||
Subheaders: []string{"sub 1", "sub 2"},
|
||||
Overview: []string{"overview 1", "overview 2"},
|
||||
Title: "Report Test Title",
|
||||
Subheaders: []Subheader{{Description: "sub 1", Details: "1"}, {Description: "sub 2", Details: "2"}},
|
||||
Overview: []OverviewRow{
|
||||
{"overview 1", "1", Green},
|
||||
{"overview 2", "2", Green},
|
||||
},
|
||||
FurtherInfo: "this is further information",
|
||||
ReportTime: time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||
DetailTable: ScanDetailTable{
|
||||
@ -24,13 +27,13 @@ func TestToHTML(t *testing.T) {
|
||||
},
|
||||
}
|
||||
expectedSub := `<span>
|
||||
sub 1<br />
|
||||
sub 2<br />
|
||||
sub 1: 1<br />
|
||||
sub 2: 2<br />
|
||||
</span>
|
||||
</h2>`
|
||||
expectedOverview := `<h3>
|
||||
overview 1<br />
|
||||
overview 2<br />
|
||||
overview 1: 1<br />
|
||||
overview 2: 2<br />
|
||||
</h3>`
|
||||
|
||||
res, err := report.ToHTML()
|
||||
@ -78,6 +81,30 @@ func TestToHTML(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestToMarkdown(t *testing.T) {
|
||||
report := ScanReport{
|
||||
Title: "Report Test Title",
|
||||
Subheaders: []Subheader{{Description: "sub 1", Details: "1"}, {Description: "sub 2", Details: "2"}},
|
||||
Overview: []OverviewRow{
|
||||
{"overview 1", "1", Green},
|
||||
{"overview 2", "2", Green},
|
||||
},
|
||||
FurtherInfo: "this is further information",
|
||||
ReportTime: time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||
}
|
||||
|
||||
res, err := report.ToMarkdown()
|
||||
result := string(res)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, result, `<details><summary>Report Test Title</summary>`)
|
||||
assert.Contains(t, result, `**sub 1**: 1`)
|
||||
assert.Contains(t, result, `**sub 2**: 2`)
|
||||
assert.Contains(t, result, `**overview 1**: 1`)
|
||||
assert.Contains(t, result, `**overview 2**: 2`)
|
||||
assert.Contains(t, result, `this is further information`)
|
||||
assert.Contains(t, result, "Snapshot taken: _Jan 01, 2021 - 00:00:00 UTC_")
|
||||
}
|
||||
|
||||
func TestTableColumnCount(t *testing.T) {
|
||||
t.Run("table without counter", func(t *testing.T) {
|
||||
details := ScanDetailTable{
|
||||
|
@ -23,6 +23,7 @@ const (
|
||||
func DetermineProjectCoordinatesWithCustomVersion(nameTemplate, versionScheme, customVersion string, gav Coordinates) (string, string) {
|
||||
name, version := DetermineProjectCoordinates(nameTemplate, versionScheme, gav)
|
||||
if len(customVersion) > 0 {
|
||||
log.Entry().Infof("Using custom version: %v", customVersion)
|
||||
return name, customVersion
|
||||
}
|
||||
return name, version
|
||||
|
@ -24,7 +24,7 @@ func (m *mavenMock) SetVersion(v string) error {
|
||||
return nil
|
||||
}
|
||||
func (m *mavenMock) GetCoordinates() (Coordinates, error) {
|
||||
return &MavenDescriptor{GroupID: m.groupID, ArtifactID: m.artifactID, Version: m.version, Packaging: m.packaging}, nil
|
||||
return Coordinates{GroupID: m.groupID, ArtifactID: m.artifactID, Version: m.version, Packaging: m.packaging}, nil
|
||||
}
|
||||
|
||||
type pipMock struct {
|
||||
@ -43,7 +43,7 @@ func (p *pipMock) SetVersion(v string) error {
|
||||
return nil
|
||||
}
|
||||
func (p *pipMock) GetCoordinates() (Coordinates, error) {
|
||||
return &PipDescriptor{ArtifactID: p.artifactID, Version: p.version}, nil
|
||||
return Coordinates{ArtifactID: p.artifactID, Version: p.version}, nil
|
||||
}
|
||||
|
||||
func TestDetermineProjectCoordinatesWithCustomVersion(t *testing.T) {
|
||||
|
@ -10,13 +10,6 @@ import (
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// DockerDescriptor holds the unique identifier combination for a Docker artifact
|
||||
type DockerDescriptor struct {
|
||||
GroupID string
|
||||
ArtifactID string
|
||||
Version string
|
||||
}
|
||||
|
||||
// Docker defines an artifact based on a Dockerfile
|
||||
type Docker struct {
|
||||
artifact Artifact
|
||||
@ -151,7 +144,7 @@ func (d *Docker) versionFromBaseImageTag() string {
|
||||
|
||||
// GetCoordinates returns the coordinates
|
||||
func (d *Docker) GetCoordinates() (Coordinates, error) {
|
||||
result := DockerDescriptor{}
|
||||
result := Coordinates{}
|
||||
|
||||
result.GroupID = ""
|
||||
result.ArtifactID, _ = d.GetArtifactID()
|
||||
|
@ -177,6 +177,5 @@ func TestGetCoordinates(t *testing.T) {
|
||||
|
||||
coordinates, err := docker.GetCoordinates()
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, DockerDescriptor{GroupID: "", ArtifactID: "my_test_image_tag", Version: ""}, coordinates)
|
||||
//assert.Equal(t, DockerDescriptor{GroupID: "", ArtifactID: "my_test_image_tag", Version: "1.2.3"}, coordinates)
|
||||
assert.Equal(t, Coordinates{GroupID: "", ArtifactID: "my_test_image_tag", Version: ""}, coordinates)
|
||||
}
|
||||
|
@ -10,14 +10,6 @@ import (
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// GoModDescriptor holds the unique identifier combination for Go modules
|
||||
type GoModDescriptor struct {
|
||||
GroupID string
|
||||
ArtifactID string
|
||||
Version string
|
||||
Packaging string
|
||||
}
|
||||
|
||||
// GoMod utility to interact with Go Modules specific versioning
|
||||
type GoMod struct {
|
||||
path string
|
||||
@ -86,28 +78,28 @@ func (m *GoMod) VersioningScheme() string {
|
||||
|
||||
// GetCoordinates returns the go.mod build descriptor coordinates
|
||||
func (m *GoMod) GetCoordinates() (Coordinates, error) {
|
||||
result := Coordinates{}
|
||||
err := m.init()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return result, err
|
||||
}
|
||||
|
||||
parsed, err := modfile.Parse(m.path, []byte(m.buildDescriptorContent), nil)
|
||||
if err != nil {
|
||||
return "", errors.Wrap(err, "failed to parse go.mod file")
|
||||
return result, errors.Wrap(err, "failed to parse go.mod file")
|
||||
}
|
||||
|
||||
descriptor := &GoModDescriptor{}
|
||||
if parsed.Module == nil {
|
||||
return "", errors.Wrap(err, "failed to parse go.mod file")
|
||||
return result, errors.Wrap(err, "failed to parse go.mod file")
|
||||
}
|
||||
if parsed.Module.Mod.Path != "" {
|
||||
artifactSplit := strings.Split(parsed.Module.Mod.Path, "/")
|
||||
artifactID := artifactSplit[len(artifactSplit)-1]
|
||||
descriptor.ArtifactID = artifactID
|
||||
result.ArtifactID = artifactID
|
||||
}
|
||||
descriptor.Version = parsed.Module.Mod.Version
|
||||
if descriptor.Version == "" {
|
||||
descriptor.Version = "unspecified"
|
||||
result.Version = parsed.Module.Mod.Version
|
||||
if result.Version == "" {
|
||||
result.Version = "unspecified"
|
||||
}
|
||||
return descriptor, nil
|
||||
return result, nil
|
||||
}
|
||||
|
@ -18,14 +18,6 @@ type gradleExecRunner interface {
|
||||
RunExecutable(e string, p ...string) error
|
||||
}
|
||||
|
||||
// GradleDescriptor holds the unique identifier combination for Gradle built Java artifacts
|
||||
type GradleDescriptor struct {
|
||||
GroupID string
|
||||
ArtifactID string
|
||||
Version string
|
||||
Packaging string
|
||||
}
|
||||
|
||||
// Gradle defines a maven artifact used for versioning
|
||||
type Gradle struct {
|
||||
execRunner gradleExecRunner
|
||||
@ -81,7 +73,7 @@ func (g *Gradle) VersioningScheme() string {
|
||||
|
||||
// GetCoordinates reads the coordinates from the maven pom.xml descriptor file
|
||||
func (g *Gradle) GetCoordinates() (Coordinates, error) {
|
||||
result := &GradleDescriptor{}
|
||||
result := Coordinates{}
|
||||
var err error
|
||||
// result.GroupID, err = g.GetGroupID()
|
||||
// if err != nil {
|
||||
@ -89,11 +81,11 @@ func (g *Gradle) GetCoordinates() (Coordinates, error) {
|
||||
// }
|
||||
result.ArtifactID, err = g.GetArtifactID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return result, err
|
||||
}
|
||||
result.Version, err = g.GetVersion()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return result, err
|
||||
}
|
||||
// result.Packaging, err = g.GetPackaging()
|
||||
// if err != nil {
|
||||
|
@ -88,5 +88,5 @@ func (i *INIfile) SetVersion(version string) error {
|
||||
|
||||
// GetCoordinates returns the coordinates
|
||||
func (i *INIfile) GetCoordinates() (Coordinates, error) {
|
||||
return nil, nil
|
||||
return Coordinates{}, nil
|
||||
}
|
||||
|
@ -9,14 +9,6 @@ import (
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// JSONDescriptor holds the unique identifier combination for json artifacts
|
||||
type JSONDescriptor struct {
|
||||
GroupID string
|
||||
ArtifactID string
|
||||
Version string
|
||||
Packaging string
|
||||
}
|
||||
|
||||
// JSONfile defines an artifact using a json file for versioning
|
||||
type JSONfile struct {
|
||||
path string
|
||||
@ -87,15 +79,15 @@ func (j *JSONfile) SetVersion(version string) error {
|
||||
|
||||
// GetCoordinates returns the coordinates
|
||||
func (j *JSONfile) GetCoordinates() (Coordinates, error) {
|
||||
result := Coordinates{}
|
||||
projectVersion, err := j.GetVersion()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return result, err
|
||||
}
|
||||
projectName := j.content["name"].(string)
|
||||
|
||||
artifact := &JSONDescriptor{
|
||||
ArtifactID: projectName,
|
||||
Version: projectVersion,
|
||||
}
|
||||
return artifact, nil
|
||||
result.ArtifactID = projectName
|
||||
result.Version = projectVersion
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
@ -20,14 +20,6 @@ type mavenRunner interface {
|
||||
Evaluate(*maven.EvaluateOptions, string, maven.Utils) (string, error)
|
||||
}
|
||||
|
||||
// MavenDescriptor holds the unique identifier combination for Maven built Java artifacts
|
||||
type MavenDescriptor struct {
|
||||
GroupID string
|
||||
ArtifactID string
|
||||
Version string
|
||||
Packaging string
|
||||
}
|
||||
|
||||
// Maven defines a maven artifact used for versioning
|
||||
type Maven struct {
|
||||
options maven.EvaluateOptions
|
||||
@ -52,23 +44,23 @@ func (m *Maven) VersioningScheme() string {
|
||||
|
||||
// GetCoordinates reads the coordinates from the maven pom.xml descriptor file
|
||||
func (m *Maven) GetCoordinates() (Coordinates, error) {
|
||||
result := &MavenDescriptor{}
|
||||
result := Coordinates{}
|
||||
var err error
|
||||
result.GroupID, err = m.GetGroupID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return result, err
|
||||
}
|
||||
result.ArtifactID, err = m.GetArtifactID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return result, err
|
||||
}
|
||||
result.Version, err = m.GetVersion()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return result, err
|
||||
}
|
||||
result.Packaging, err = m.GetPackaging()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return result, err
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
@ -17,14 +17,6 @@ const (
|
||||
VersionRegex = "(?s)(.*)version=['\"](.*?)['\"](.*)"
|
||||
)
|
||||
|
||||
// PipDescriptor holds the unique identifier combination for pip built Python artifacts
|
||||
type PipDescriptor struct {
|
||||
GroupID string
|
||||
ArtifactID string
|
||||
Version string
|
||||
Packaging string
|
||||
}
|
||||
|
||||
// Pip utility to interact with Python specific versioning
|
||||
type Pip struct {
|
||||
path string
|
||||
@ -117,26 +109,26 @@ func (p *Pip) VersioningScheme() string {
|
||||
|
||||
// GetCoordinates returns the pip build descriptor coordinates
|
||||
func (p *Pip) GetCoordinates() (Coordinates, error) {
|
||||
result := Coordinates{}
|
||||
err := p.init()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return result, err
|
||||
}
|
||||
|
||||
descriptor := &PipDescriptor{}
|
||||
if evaluateResult(p.buildDescriptorContent, NameRegex) {
|
||||
compile := regexp.MustCompile(NameRegex)
|
||||
values := compile.FindStringSubmatch(p.buildDescriptorContent)
|
||||
descriptor.ArtifactID = values[2]
|
||||
result.ArtifactID = values[2]
|
||||
} else {
|
||||
descriptor.ArtifactID = ""
|
||||
result.ArtifactID = ""
|
||||
}
|
||||
|
||||
descriptor.Version, err = p.GetVersion()
|
||||
result.Version, err = p.GetVersion()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to retrieve coordinates")
|
||||
return result, errors.Wrap(err, "failed to retrieve coordinates")
|
||||
}
|
||||
|
||||
return descriptor, nil
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func evaluateResult(value, regex string) bool {
|
||||
|
@ -188,10 +188,9 @@ func TestPipGetCoordinates(t *testing.T) {
|
||||
}
|
||||
|
||||
coordinates, err := pip.GetCoordinates()
|
||||
pipDescriptor := coordinates.(*PipDescriptor)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "simple-python", pipDescriptor.ArtifactID)
|
||||
assert.Equal(t, "1.2.3", pipDescriptor.Version)
|
||||
assert.Equal(t, "simple-python", coordinates.ArtifactID)
|
||||
assert.Equal(t, "1.2.3", coordinates.Version)
|
||||
|
||||
})
|
||||
|
||||
@ -207,10 +206,9 @@ func TestPipGetCoordinates(t *testing.T) {
|
||||
}
|
||||
|
||||
coordinates, err := pip.GetCoordinates()
|
||||
pipDescriptor := coordinates.(*PipDescriptor)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "", pipDescriptor.ArtifactID)
|
||||
assert.Equal(t, "1.2.3", pipDescriptor.Version)
|
||||
assert.Equal(t, "", coordinates.ArtifactID)
|
||||
assert.Equal(t, "1.2.3", coordinates.Version)
|
||||
|
||||
})
|
||||
|
||||
|
@ -82,5 +82,6 @@ func (p *PropertiesFile) SetVersion(version string) error {
|
||||
|
||||
// GetCoordinates returns the coordinates
|
||||
func (p *PropertiesFile) GetCoordinates() (Coordinates, error) {
|
||||
return nil, nil
|
||||
result := Coordinates{}
|
||||
return result, nil
|
||||
}
|
||||
|
@ -63,5 +63,6 @@ func (v *Versionfile) SetVersion(version string) error {
|
||||
|
||||
// GetCoordinates returns the coordinates
|
||||
func (v *Versionfile) GetCoordinates() (Coordinates, error) {
|
||||
return nil, nil
|
||||
result := Coordinates{}
|
||||
return result, nil
|
||||
}
|
||||
|
@ -9,8 +9,13 @@ import (
|
||||
"github.com/SAP/jenkins-library/pkg/maven"
|
||||
)
|
||||
|
||||
// Coordinates to address the artifact
|
||||
type Coordinates interface{}
|
||||
// Coordinates to address the artifact coordinates like groupId, artifactId, version and packaging
|
||||
type Coordinates struct {
|
||||
GroupID string
|
||||
ArtifactID string
|
||||
Version string
|
||||
Packaging string
|
||||
}
|
||||
|
||||
// Artifact defines the versioning operations for various build tools
|
||||
type Artifact interface {
|
||||
|
@ -106,15 +106,15 @@ func (y *YAMLfile) SetVersion(version string) error {
|
||||
|
||||
// GetCoordinates returns the coordinates
|
||||
func (y *YAMLfile) GetCoordinates() (Coordinates, error) {
|
||||
result := &YAMLDescriptor{}
|
||||
result := Coordinates{}
|
||||
var err error
|
||||
result.ArtifactID, err = y.GetArtifactID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return result, err
|
||||
}
|
||||
result.Version, err = y.GetVersion()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return result, err
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
@ -2,10 +2,11 @@ package whitesource
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/SAP/jenkins-library/pkg/log"
|
||||
"github.com/SAP/jenkins-library/pkg/piperutils"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/log"
|
||||
"github.com/SAP/jenkins-library/pkg/piperutils"
|
||||
)
|
||||
|
||||
// ReportOptions defines options for downloading reports after scanning.
|
||||
|
@ -14,6 +14,8 @@ type SystemMock struct {
|
||||
Products []Product
|
||||
Projects []Project
|
||||
Alerts []Alert
|
||||
AlertType string
|
||||
AlertError error
|
||||
Libraries []Library
|
||||
RiskReport []byte
|
||||
VulnerabilityReport []byte
|
||||
@ -107,6 +109,15 @@ func (m *SystemMock) GetProjectAlerts(projectToken string) ([]Alert, error) {
|
||||
return m.Alerts, nil
|
||||
}
|
||||
|
||||
// GetProjectAlertsByType returns the alerts stored in the SystemMock and records the type.
|
||||
func (m *SystemMock) GetProjectAlertsByType(projectToken, alertType string) ([]Alert, error) {
|
||||
if m.AlertError != nil {
|
||||
return m.Alerts, m.AlertError
|
||||
}
|
||||
m.AlertType = alertType
|
||||
return m.Alerts, nil
|
||||
}
|
||||
|
||||
// GetProjectLibraryLocations returns the libraries stored in the SystemMock.
|
||||
func (m *SystemMock) GetProjectLibraryLocations(projectToken string) ([]Library, error) {
|
||||
return m.Libraries, nil
|
||||
|
@ -13,6 +13,9 @@ import (
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// ReportsDirectory defines the subfolder for the WhiteSource reports which are generated
|
||||
const ReportsDirectory = "whitesource"
|
||||
|
||||
// Product defines a WhiteSource product with name and token
|
||||
type Product struct {
|
||||
Name string `json:"name"`
|
||||
@ -48,24 +51,41 @@ type Alert struct {
|
||||
|
||||
// Library
|
||||
type Library struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Filename string `json:"filename,omitempty"`
|
||||
Version string `json:"version,omitempty"`
|
||||
Project string `json:"project,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
Filename string `json:"filename,omitempty"`
|
||||
ArtifactID string `json:"artifactId,omitempty"`
|
||||
GroupID string `json:"groupId,omitempty"`
|
||||
Version string `json:"version,omitempty"`
|
||||
Project string `json:"project,omitempty"`
|
||||
}
|
||||
|
||||
// Vulnerability
|
||||
// Vulnerability defines a vulnerability as returned by WhiteSource
|
||||
type Vulnerability struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Type string `json:"type,omitempty"`
|
||||
Level string `json:"level,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Severity string `json:"severity,omitempty"`
|
||||
Score float64 `json:"score,omitempty"`
|
||||
CVSS3Severity string `json:"cvss3_severity,omitempty"`
|
||||
CVSS3Score float64 `json:"cvss3_score,omitempty"`
|
||||
Score float64 `json:"score,omitempty"`
|
||||
FixResolutionText string `json:"fixResolutionText,omitempty"`
|
||||
PublishDate string `json:"publishDate,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
TopFix Fix `json:"topFix,omitempty"`
|
||||
AllFixes []Fix `json:"allFixes,omitempty"`
|
||||
Level string `json:"level,omitempty"`
|
||||
FixResolutionText string `json:"fixResolutionText,omitempty"`
|
||||
}
|
||||
|
||||
// Fix defines a Fix as returned by WhiteSource
|
||||
type Fix struct {
|
||||
Vulnerability string `json:"vulnerability,omitempty"`
|
||||
Type string `json:"type,omitempty"`
|
||||
Origin string `json:"origin,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
FixResolution string `json:"fixResolution,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
Message string `json:"message,omitempty"`
|
||||
ExtraData string `json:"extraData,omitempty"`
|
||||
}
|
||||
|
||||
// Project defines a WhiteSource project with name and token
|
||||
@ -88,6 +108,7 @@ type Request struct {
|
||||
ProjectToken string `json:"projectToken,omitempty"`
|
||||
OrgToken string `json:"orgToken,omitempty"`
|
||||
Format string `json:"format,omitempty"`
|
||||
AlertType string `json:"alertType,omitempty"`
|
||||
ProductAdmins *Assignment `json:"productAdmins,omitempty"`
|
||||
ProductMembership *Assignment `json:"productMembership,omitempty"`
|
||||
AlertsEmailReceivers *Assignment `json:"alertsEmailReceivers,omitempty"`
|
||||
@ -132,7 +153,7 @@ func (s *System) GetProductsMetaInfo() ([]Product, error) {
|
||||
|
||||
err := s.sendRequestAndDecodeJSON(req, &wsResponse)
|
||||
if err != nil {
|
||||
return wsResponse.ProductVitals, errors.Wrap(err, "WhiteSource request failed")
|
||||
return wsResponse.ProductVitals, err
|
||||
}
|
||||
|
||||
return wsResponse.ProductVitals, nil
|
||||
@ -169,7 +190,7 @@ func (s *System) CreateProduct(productName string) (string, error) {
|
||||
|
||||
err := s.sendRequestAndDecodeJSON(req, &wsResponse)
|
||||
if err != nil {
|
||||
return "", errors.Wrap(err, "WhiteSource request failed")
|
||||
return "", err
|
||||
}
|
||||
|
||||
return wsResponse.ProductToken, nil
|
||||
@ -187,7 +208,7 @@ func (s *System) SetProductAssignments(productToken string, membership, admins,
|
||||
|
||||
err := s.sendRequestAndDecodeJSON(req, nil)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "WhiteSource request failed")
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
@ -208,7 +229,7 @@ func (s *System) GetProjectsMetaInfo(productToken string) ([]Project, error) {
|
||||
|
||||
err := s.sendRequestAndDecodeJSON(req, &wsResponse)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "WhiteSource request failed")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return wsResponse.ProjectVitals, nil
|
||||
@ -238,7 +259,7 @@ func (s *System) GetProjectByToken(projectToken string) (Project, error) {
|
||||
|
||||
err := s.sendRequestAndDecodeJSON(req, &wsResponse)
|
||||
if err != nil {
|
||||
return Project{}, errors.Wrap(err, "WhiteSource request failed")
|
||||
return Project{}, err
|
||||
}
|
||||
|
||||
if len(wsResponse.ProjectVitals) == 0 {
|
||||
@ -318,7 +339,7 @@ func (s *System) GetProductName(productToken string) (string, error) {
|
||||
|
||||
err := s.sendRequestAndDecodeJSON(req, &wsResponse)
|
||||
if err != nil {
|
||||
return "", errors.Wrap(err, "WhiteSource request failed")
|
||||
return "", err
|
||||
}
|
||||
|
||||
if len(wsResponse.ProductTags) == 0 {
|
||||
@ -374,7 +395,29 @@ func (s *System) GetProjectAlerts(projectToken string) ([]Alert, error) {
|
||||
|
||||
err := s.sendRequestAndDecodeJSON(req, &wsResponse)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "WhiteSource request failed")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return wsResponse.Alerts, nil
|
||||
}
|
||||
|
||||
// GetProjectAlertsByType returns all alerts of a certain type for a given project
|
||||
func (s *System) GetProjectAlertsByType(projectToken, alertType string) ([]Alert, error) {
|
||||
wsResponse := struct {
|
||||
Alerts []Alert `json:"alerts"`
|
||||
}{
|
||||
Alerts: []Alert{},
|
||||
}
|
||||
|
||||
req := Request{
|
||||
RequestType: "getProjectAlertsByType",
|
||||
ProjectToken: projectToken,
|
||||
AlertType: alertType,
|
||||
}
|
||||
|
||||
err := s.sendRequestAndDecodeJSON(req, &wsResponse)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return wsResponse.Alerts, nil
|
||||
@ -395,7 +438,7 @@ func (s *System) GetProjectLibraryLocations(projectToken string) ([]Library, err
|
||||
|
||||
err := s.sendRequestAndDecodeJSON(req, &wsResponse)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "WhiteSource request failed")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return wsResponse.Libraries, nil
|
||||
@ -404,7 +447,7 @@ func (s *System) GetProjectLibraryLocations(projectToken string) ([]Library, err
|
||||
func (s *System) sendRequestAndDecodeJSON(req Request, result interface{}) error {
|
||||
respBody, err := s.sendRequest(req)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "WhiteSource request failed")
|
||||
return errors.Wrap(err, "sending whiteSource request failed")
|
||||
}
|
||||
|
||||
log.Entry().Debugf("response: %v", string(respBody))
|
||||
|
@ -2,13 +2,15 @@ package whitesource
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
piperhttp "github.com/SAP/jenkins-library/pkg/http"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
piperhttp "github.com/SAP/jenkins-library/pkg/http"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
type whitesourceMockClient struct {
|
||||
@ -17,6 +19,7 @@ type whitesourceMockClient struct {
|
||||
urlsCalled string
|
||||
requestBody io.Reader
|
||||
responseBody string
|
||||
requestError error
|
||||
}
|
||||
|
||||
func (c *whitesourceMockClient) SetOptions(opts piperhttp.ClientOptions) {
|
||||
@ -27,6 +30,9 @@ func (c *whitesourceMockClient) SendRequest(method, url string, body io.Reader,
|
||||
c.httpMethod = method
|
||||
c.urlsCalled = url
|
||||
c.requestBody = body
|
||||
if c.requestError != nil {
|
||||
return &http.Response{}, c.requestError
|
||||
}
|
||||
return &http.Response{StatusCode: c.httpStatusCode, Body: ioutil.NopCloser(bytes.NewReader([]byte(c.responseBody)))}, nil
|
||||
}
|
||||
|
||||
@ -69,7 +75,7 @@ func TestCreateProduct(t *testing.T) {
|
||||
// test
|
||||
productToken, err := sys.CreateProduct("test_product_name")
|
||||
// assert
|
||||
assert.EqualError(t, err, "WhiteSource request failed: invalid request, error code 5001, message 'User is not allowed to perform this action'")
|
||||
assert.EqualError(t, err, "invalid request, error code 5001, message 'User is not allowed to perform this action'")
|
||||
requestBody, err := ioutil.ReadAll(myTestClient.requestBody)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "", productToken)
|
||||
@ -303,3 +309,30 @@ func TestGetProjectsByIDs(t *testing.T) {
|
||||
assert.Equal(t, []Project(nil), projects)
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetProjectAlertsByType(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("success case", func(t *testing.T) {
|
||||
responseBody := `{"alerts":[{"type":"SECURITY_VULNERABILITY", "vulnerability":{"name":"testVulnerability1"}}]}`
|
||||
myTestClient := whitesourceMockClient{responseBody: responseBody}
|
||||
sys := System{serverURL: "https://my.test.server", httpClient: &myTestClient, orgToken: "test_org_token", userToken: "test_user_token"}
|
||||
|
||||
alerts, err := sys.GetProjectAlertsByType("test_project_token", "SECURITY_VULNERABILITY")
|
||||
|
||||
assert.NoError(t, err)
|
||||
requestBody, err := ioutil.ReadAll(myTestClient.requestBody)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, string(requestBody), `"requestType":"getProjectAlertsByType"`)
|
||||
assert.Equal(t, []Alert{{Vulnerability: Vulnerability{Name: "testVulnerability1"}}}, alerts)
|
||||
})
|
||||
|
||||
t.Run("error case", func(t *testing.T) {
|
||||
myTestClient := whitesourceMockClient{requestError: fmt.Errorf("request failed")}
|
||||
sys := System{serverURL: "https://my.test.server", httpClient: &myTestClient, orgToken: "test_org_token", userToken: "test_user_token"}
|
||||
|
||||
_, err := sys.GetProjectAlertsByType("test_project_token", "SECURITY_VULNERABILITY")
|
||||
assert.EqualError(t, err, "sending whiteSource request failed: failed to send request to WhiteSource: request failed")
|
||||
|
||||
})
|
||||
}
|
||||
|
@ -116,6 +116,18 @@ spec:
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: true
|
||||
- name: customScanVersion
|
||||
type: string
|
||||
description: Custom version of the WhiteSource project used as source.
|
||||
longDescription: |-
|
||||
Defines a custom version for the WhiteSource scan which deviates from the typical versioning pattern using [`version`](#version) and [`versioningModel`](#versioningModel)
|
||||
It allows to set non-numeric versions as well and supersedes the value of [`version`](#version) which is calculated automatically.
|
||||
The parameter is also used by other scan steps (e.g. BlackDuck Detect) and thus allows a common custom version across scan tools.
|
||||
scope:
|
||||
- GENERAL
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
- name: cvssSeverityLimit
|
||||
type: string
|
||||
description: "Limit of tolerable CVSS v3 score upon assessment and in consequence fails the build,
|
||||
@ -228,8 +240,9 @@ spec:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
- name: productVersion
|
||||
- name: version
|
||||
aliases:
|
||||
- name: productVersion
|
||||
- name: whitesourceProductVersion
|
||||
- name: whitesource/productVersion
|
||||
deprecated: true
|
||||
@ -243,6 +256,9 @@ spec:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
resourceRef:
|
||||
- name: commonPipelineEnvironment
|
||||
param: artifactVersion
|
||||
- name: projectName
|
||||
aliases:
|
||||
- name: whitesourceProjectName
|
||||
@ -267,14 +283,6 @@ spec:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
- name: reportDirectoryName
|
||||
type: string
|
||||
description: "Name of the directory to save vulnerability/risk reports to"
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: "whitesource-reports"
|
||||
- name: reporting
|
||||
type: bool
|
||||
description: "Whether assessment is being done at all, defaults to `true`"
|
||||
@ -349,7 +357,7 @@ spec:
|
||||
default: 900
|
||||
- name: userToken
|
||||
type: string
|
||||
description: "WhiteSource token identifying the user executing the scan."
|
||||
description: User token to access WhiteSource. In Jenkins use case this is automatically filled through the credentials.
|
||||
scope:
|
||||
- GENERAL
|
||||
- PARAMETERS
|
||||
@ -360,6 +368,11 @@ spec:
|
||||
resourceRef:
|
||||
- name: userTokenCredentialsId
|
||||
type: secret
|
||||
- type: vaultSecret
|
||||
paths:
|
||||
- $(vaultPath)/whitesource
|
||||
- $(vaultBasePath)/$(vaultPipelineName)/whitesource
|
||||
- $(vaultBasePath)/GROUP-SECRETS/whitesource
|
||||
- name: versioningModel
|
||||
type: string
|
||||
description: "The default project versioning model used in case `projectVersion` parameter is
|
||||
@ -486,7 +499,7 @@ spec:
|
||||
- conditionRef: strings-equal
|
||||
params:
|
||||
- name: buildTool
|
||||
value: go
|
||||
value: golang
|
||||
- image: hseeberger/scala-sbt:8u181_2.12.8_1.2.8
|
||||
workingDir: /tmp
|
||||
env: []
|
||||
|
Loading…
Reference in New Issue
Block a user