1
0
mirror of https://github.com/SAP/jenkins-library.git synced 2025-09-16 09:26:22 +02:00

feat(whitesourceExecuteScan): UA for all build tools, e.g. maven & npm (#2501)

* feat(whitesource): add config helper

this helps to ease & enforce config settings

* fix accidential change of class

* add todos wrt java download

* use existing scanOptions, add option to download jre

* update generation

* fix generation

* allow running UA via go library

* correct image, improve logging

* add removal of downloaded JVM

* update java creation and deletion

* refactor and add log output

* remove obsolete ToDo

* increase test coverage

* increase test coverage

* adding aliases and tests

* make go modules as default

* maven: update behavior of projectNaming

* add Docker capabilities

* correct parameter name

* retrieve Docker coordinates

* docker coordinates only to provide artifact

* add ToDos

* add mta capability

* add aliases, mvn arguments for settings

* clean up groovy part

* update defaults

* add container for pip

* add defaults, add maven specifics, ...

* properly download settings

* maven: check existence of excluded files

* fix reporting

* Update CommonStepsTest.groovy

* update comment

* fix CodeClimate finding

* add tests for pip & fix minor issues

* fix order of pip build descriptors

* update pip container options

* fix pip virtualEnv parameter

* update report permissions

* fix test

* update container options

* add use fileUtils to load properties file

* update parameter description

* adding Docker scanning defaults

* clean up configHelper

* consider also npm tool cache

* add todos
This commit is contained in:
Oliver Nocon
2021-02-03 14:52:48 +01:00
committed by GitHub
parent 2b56e8594e
commit a104b2a06d
31 changed files with 2066 additions and 4139 deletions

View File

@@ -9,16 +9,18 @@ import (
"strings"
"time"
"github.com/SAP/jenkins-library/pkg/npm"
piperDocker "github.com/SAP/jenkins-library/pkg/docker"
piperhttp "github.com/SAP/jenkins-library/pkg/http"
ws "github.com/SAP/jenkins-library/pkg/whitesource"
"github.com/360EntSecGroup-Skylar/excelize/v2"
"github.com/SAP/jenkins-library/pkg/command"
piperhttp "github.com/SAP/jenkins-library/pkg/http"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/SAP/jenkins-library/pkg/npm"
"github.com/SAP/jenkins-library/pkg/piperutils"
"github.com/SAP/jenkins-library/pkg/telemetry"
"github.com/SAP/jenkins-library/pkg/versioning"
ws "github.com/SAP/jenkins-library/pkg/whitesource"
"github.com/pkg/errors"
)
// just to make the lines less long
@@ -147,11 +149,30 @@ func runWhitesourceExecuteScan(config *ScanOptions, scan *ws.Scan, utils whiteso
}
func runWhitesourceScan(config *ScanOptions, scan *ws.Scan, utils whitesourceUtils, sys whitesource, commonPipelineEnvironment *whitesourceExecuteScanCommonPipelineEnvironment) error {
// Download Docker image for container scan
// ToDo: move it to improve testability
if config.BuildTool == "docker" {
saveImageOptions := containerSaveImageOptions{
ContainerImage: config.ScanImage,
ContainerRegistryURL: config.ScanImageRegistryURL,
IncludeLayers: config.ScanImageIncludeLayers,
}
dClientOptions := piperDocker.ClientOptions{ImageName: saveImageOptions.ContainerImage, RegistryURL: saveImageOptions.ContainerRegistryURL, LocalPath: "", IncludeLayers: saveImageOptions.IncludeLayers}
dClient := &piperDocker.Client{}
dClient.SetOptions(dClientOptions)
if err := runContainerSaveImage(&saveImageOptions, &telemetry.CustomData{}, "./cache", "", dClient); err != nil {
return errors.Wrapf(err, "failed to dowload Docker image %v", config.ScanImage)
}
}
// Start the scan
if err := executeScan(config, scan, utils); err != nil {
return err
}
// ToDo: Check this:
// Why is this required at all, resolveProjectIdentifiers() is already called before the scan in runWhitesourceExecuteScan()
// Could perhaps use scan.updateProjects(sys) directly... have not investigated what could break
if err := resolveProjectIdentifiers(config, scan, utils, sys); err != nil {
return err
@@ -190,7 +211,7 @@ func checkAndReportScanResults(config *ScanOptions, scan *ws.Scan, utils whiteso
if err != nil {
return err
}
piperutils.PersistReportsAndLinks("whitesourceExecuteScan", "", nil, paths)
piperutils.PersistReportsAndLinks("whitesourceExecuteScan", "", paths, nil)
}
if config.SecurityVulnerabilities {
if err := checkSecurityViolations(config, scan, sys); err != nil {
@@ -223,6 +244,7 @@ func createWhiteSourceProduct(config *ScanOptions, sys whitesource) (string, err
func resolveProjectIdentifiers(config *ScanOptions, scan *ws.Scan, utils whitesourceUtils, sys whitesource) error {
if scan.AggregateProjectName == "" || config.ProductVersion == "" {
options := &versioning.Options{
DockerImage: config.ScanImage,
ProjectSettingsFile: config.ProjectSettingsFile,
GlobalSettingsFile: config.GlobalSettingsFile,
M2Path: config.M2Path,
@@ -232,6 +254,8 @@ func resolveProjectIdentifiers(config *ScanOptions, scan *ws.Scan, utils whiteso
return fmt.Errorf("failed to get build artifact description: %w", err)
}
//ToDo: fill version in coordinates with version from pipeline environment
nameTmpl := `{{list .GroupID .ArtifactID | join "-" | trimAll "-"}}`
name, version := versioning.DetermineProjectCoordinates(nameTmpl, config.VersioningModel, coordinates)
if scan.AggregateProjectName == "" {
@@ -337,12 +361,15 @@ func validateProductVersion(version string) string {
func wsScanOptions(config *ScanOptions) *ws.ScanOptions {
return &ws.ScanOptions{
BuildTool: config.BuildTool,
ScanType: config.ScanType,
OrgToken: config.OrgToken,
UserToken: config.UserToken,
ProductName: config.ProductName,
ProductToken: config.ProductToken,
ProductVersion: config.ProductVersion,
ProjectName: config.ProjectName,
BuildDescriptorFile: config.BuildDescriptorFile,
BuildDescriptorExcludeList: config.BuildDescriptorExcludeList,
PomPath: config.BuildDescriptorFile,
M2Path: config.M2Path,
@@ -355,6 +382,10 @@ func wsScanOptions(config *ScanOptions) *ws.ScanOptions {
ConfigFilePath: config.ConfigFilePath,
Includes: config.Includes,
Excludes: config.Excludes,
JreDownloadURL: config.JreDownloadURL,
AgentURL: config.AgentURL,
ServiceURL: config.ServiceURL,
Verbose: GeneralConfig.Verbose,
}
}
@@ -408,25 +439,37 @@ func checkSecurityViolations(config *ScanOptions, scan *ws.Scan, sys whitesource
}
if config.ProjectToken != "" {
project := ws.Project{Name: config.ProjectName, Token: config.ProjectToken}
if err := checkProjectSecurityViolations(cvssSeverityLimit, project, sys); err != nil {
if _, err := checkProjectSecurityViolations(cvssSeverityLimit, project, sys); err != nil {
return err
}
} else {
vulnerabilitiesCount := 0
var errorsOccured []string
for _, project := range scan.ScannedProjects() {
if err := checkProjectSecurityViolations(cvssSeverityLimit, project, sys); err != nil {
return err
// collect errors and aggregate vulnerabilities from all projects
if vulCount, err := checkProjectSecurityViolations(cvssSeverityLimit, project, sys); err != nil {
vulnerabilitiesCount += vulCount
errorsOccured = append(errorsOccured, fmt.Sprint(err))
}
}
if len(errorsOccured) > 0 {
if vulnerabilitiesCount > 0 {
log.SetErrorCategory(log.ErrorCompliance)
}
return fmt.Errorf(strings.Join(errorsOccured, ": "))
}
}
return nil
}
// checkSecurityViolations checks security violations and returns an error if the configured severity limit is crossed.
func checkProjectSecurityViolations(cvssSeverityLimit float64, project ws.Project, sys whitesource) error {
func checkProjectSecurityViolations(cvssSeverityLimit float64, project ws.Project, sys whitesource) (int, error) {
// get project alerts (vulnerabilities)
//ToDo: use getProjectAlertsByType with alertType : "SECURITY_VULNERABILITY"?
//ToDo: also return reference to alerts in order to use it for reporting later
alerts, err := sys.GetProjectAlerts(project.Token)
if err != nil {
return fmt.Errorf("failed to retrieve project alerts from Whitesource: %w", err)
return 0, fmt.Errorf("failed to retrieve project alerts from Whitesource: %w", err)
}
severeVulnerabilities := 0
@@ -456,11 +499,11 @@ func checkProjectSecurityViolations(cvssSeverityLimit float64, project ws.Projec
// https://github.com/SAP/jenkins-library/blob/master/vars/whitesourceExecuteScan.groovy#L558
if severeVulnerabilities > 0 {
return fmt.Errorf("%v Open Source Software Security vulnerabilities with CVSS score greater "+
return severeVulnerabilities, fmt.Errorf("%v Open Source Software Security vulnerabilities with CVSS score greater "+
"or equal to %.1f detected in project %s",
severeVulnerabilities, cvssSeverityLimit, project.Name)
}
return nil
return 0, nil
}
func aggregateVersionWideLibraries(config *ScanOptions, utils whitesourceUtils, sys whitesource) error {
@@ -514,7 +557,7 @@ func aggregateVersionWideVulnerabilities(config *ScanOptions, utils whitesourceU
}
reportPath := filepath.Join(config.ReportDirectoryName, "project-names-aggregated.txt")
if err := utils.FileWrite(reportPath, []byte(projectNames), 0644); err != nil {
if err := utils.FileWrite(reportPath, []byte(projectNames), 0666); err != nil {
return err
}
if err := newVulnerabilityExcelReport(versionWideAlerts, config, utils); err != nil {
@@ -611,7 +654,7 @@ func newLibraryCSVReport(libraries map[string][]ws.Library, config *ScanOptions,
// Write result to file
fileName := fmt.Sprintf("%s/libraries-%s.csv", config.ReportDirectoryName,
utils.Now().Format(wsReportTimeStampLayout))
if err := utils.FileWrite(fileName, []byte(output), 0777); err != nil {
if err := utils.FileWrite(fileName, []byte(output), 0666); err != nil {
return err
}
return nil

View File

@@ -16,39 +16,43 @@ import (
)
type whitesourceExecuteScanOptions struct {
BuildTool string `json:"buildTool,omitempty"`
BuildDescriptorFile string `json:"buildDescriptorFile,omitempty"`
VersioningModel string `json:"versioningModel,omitempty"`
CreateProductFromPipeline bool `json:"createProductFromPipeline,omitempty"`
SecurityVulnerabilities bool `json:"securityVulnerabilities,omitempty"`
Timeout int `json:"timeout,omitempty"`
AgentDownloadURL string `json:"agentDownloadUrl,omitempty"`
ConfigFilePath string `json:"configFilePath,omitempty"`
ReportDirectoryName string `json:"reportDirectoryName,omitempty"`
AggregateVersionWideReport bool `json:"aggregateVersionWideReport,omitempty"`
VulnerabilityReportFormat string `json:"vulnerabilityReportFormat,omitempty"`
ParallelLimit string `json:"parallelLimit,omitempty"`
Reporting bool `json:"reporting,omitempty"`
ServiceURL string `json:"serviceUrl,omitempty"`
BuildDescriptorExcludeList []string `json:"buildDescriptorExcludeList,omitempty"`
OrgToken string `json:"orgToken,omitempty"`
UserToken string `json:"userToken,omitempty"`
LicensingVulnerabilities bool `json:"licensingVulnerabilities,omitempty"`
AgentFileName string `json:"agentFileName,omitempty"`
AgentParameters []string `json:"agentParameters,omitempty"`
AgentURL string `json:"agentUrl,omitempty"`
AggregateVersionWideReport bool `json:"aggregateVersionWideReport,omitempty"`
BuildDescriptorExcludeList []string `json:"buildDescriptorExcludeList,omitempty"`
BuildDescriptorFile string `json:"buildDescriptorFile,omitempty"`
BuildTool string `json:"buildTool,omitempty"`
ConfigFilePath string `json:"configFilePath,omitempty"`
CreateProductFromPipeline bool `json:"createProductFromPipeline,omitempty"`
CvssSeverityLimit string `json:"cvssSeverityLimit,omitempty"`
EmailAddressesOfInitialProductAdmins []string `json:"emailAddressesOfInitialProductAdmins,omitempty"`
ProductVersion string `json:"productVersion,omitempty"`
Excludes []string `json:"excludes,omitempty"`
Includes []string `json:"includes,omitempty"`
InstallCommand string `json:"installCommand,omitempty"`
JreDownloadURL string `json:"jreDownloadUrl,omitempty"`
LicensingVulnerabilities bool `json:"licensingVulnerabilities,omitempty"`
OrgToken string `json:"orgToken,omitempty"`
ParallelLimit string `json:"parallelLimit,omitempty"`
ProductName string `json:"productName,omitempty"`
ProductToken string `json:"productToken,omitempty"`
ProductVersion string `json:"productVersion,omitempty"`
ProjectName string `json:"projectName,omitempty"`
ProjectToken string `json:"projectToken,omitempty"`
VulnerabilityReportTitle string `json:"vulnerabilityReportTitle,omitempty"`
InstallCommand string `json:"installCommand,omitempty"`
ReportDirectoryName string `json:"reportDirectoryName,omitempty"`
Reporting bool `json:"reporting,omitempty"`
ScanImage string `json:"scanImage,omitempty"`
ScanImageIncludeLayers bool `json:"scanImageIncludeLayers,omitempty"`
ScanImageRegistryURL string `json:"scanImageRegistryUrl,omitempty"`
ScanType string `json:"scanType,omitempty"`
CvssSeverityLimit string `json:"cvssSeverityLimit,omitempty"`
Includes string `json:"includes,omitempty"`
Excludes string `json:"excludes,omitempty"`
ProductToken string `json:"productToken,omitempty"`
AgentParameters string `json:"agentParameters,omitempty"`
SecurityVulnerabilities bool `json:"securityVulnerabilities,omitempty"`
ServiceURL string `json:"serviceUrl,omitempty"`
Timeout int `json:"timeout,omitempty"`
UserToken string `json:"userToken,omitempty"`
VersioningModel string `json:"versioningModel,omitempty"`
VulnerabilityReportFormat string `json:"vulnerabilityReportFormat,omitempty"`
VulnerabilityReportTitle string `json:"vulnerabilityReportTitle,omitempty"`
ProjectSettingsFile string `json:"projectSettingsFile,omitempty"`
GlobalSettingsFile string `json:"globalSettingsFile,omitempty"`
M2Path string `json:"m2Path,omitempty"`
@@ -84,7 +88,7 @@ func (p *whitesourceExecuteScanCommonPipelineEnvironment) persist(path, resource
}
}
// WhitesourceExecuteScanCommand BETA
// WhitesourceExecuteScanCommand Execute a WhiteSource scan
func WhitesourceExecuteScanCommand() *cobra.Command {
const STEP_NAME = "whitesourceExecuteScan"
@@ -95,22 +99,17 @@ func WhitesourceExecuteScanCommand() *cobra.Command {
var createWhitesourceExecuteScanCmd = &cobra.Command{
Use: STEP_NAME,
Short: "BETA",
Long: `BETA
With this step [WhiteSource](https://www.whitesourcesoftware.com) security and license compliance scans can be executed and assessed.
Short: "Execute a WhiteSource scan",
Long: `With this step [WhiteSource](https://www.whitesourcesoftware.com) security and license compliance scans can be executed and assessed.
WhiteSource is a Software as a Service offering based on a so called unified agent that locally determines the dependency
tree of a node.js, Java, Python, Ruby, or Scala based solution and sends it to the WhiteSource server for a policy based license compliance
check and additional Free and Open Source Software Publicly Known Vulnerabilities detection.
The step uses the so-called WhiteSource Unified Agent. For details please refer to the [WhiteSource Unified Agent Documentation](https://whitesource.atlassian.net/wiki/spaces/WD/pages/33718339/Unified+Agent).
!!! note "Docker Images"
The underlying Docker images are public and specific to the solution's programming language(s) and therefore may have to be exchanged
to fit to and support the relevant scenario. The default Python environment used is i.e. Python 3 based.
!!! warn "Restrictions"
Currently the step does contain hardened scan configurations for ` + "`" + `scanType` + "`" + ` ` + "`" + `'pip'` + "`" + ` and ` + "`" + `'go'` + "`" + `. Other environments are still being elaborated,
so please thoroughly check your results and do not take them for granted by default.
Also not all environments have been thoroughly tested already therefore you might need to tweak around with the default containers used or
create your own ones to adequately support your scenario. To do so please modify ` + "`" + `dockerImage` + "`" + ` and ` + "`" + `dockerWorkspace` + "`" + ` parameters.
The step expects an environment containing the programming language related compiler/interpreter as well as the related build tool. For a list
of the supported build tools per environment please refer to the [WhiteSource Unified Agent Documentation](https://whitesource.atlassian.net/wiki/spaces/WD/pages/33718339/Unified+Agent).`,
to fit to and support the relevant scenario. The default Python environment used is i.e. Python 3 based.`,
PreRunE: func(cmd *cobra.Command, _ []string) error {
startTime = time.Now()
log.SetStepName(STEP_NAME)
@@ -159,39 +158,43 @@ check and additional Free and Open Source Software Publicly Known Vulnerabilitie
}
func addWhitesourceExecuteScanFlags(cmd *cobra.Command, stepConfig *whitesourceExecuteScanOptions) {
cmd.Flags().StringVar(&stepConfig.BuildTool, "buildTool", os.Getenv("PIPER_buildTool"), "Defines the tool which is used for building the artifact.")
cmd.Flags().StringVar(&stepConfig.BuildDescriptorFile, "buildDescriptorFile", os.Getenv("PIPER_buildDescriptorFile"), "Explicit path to the build descriptor file.")
cmd.Flags().StringVar(&stepConfig.VersioningModel, "versioningModel", `major`, "The default project versioning model used in case `projectVersion` parameter is empty for creating the version based on the build descriptor version to report results in Whitesource, can be one of `'major'`, `'major-minor'`, `'semantic'`, `'full'`")
cmd.Flags().BoolVar(&stepConfig.CreateProductFromPipeline, "createProductFromPipeline", true, "Whether to create the related WhiteSource product on the fly based on the supplied pipeline configuration.")
cmd.Flags().BoolVar(&stepConfig.SecurityVulnerabilities, "securityVulnerabilities", true, "Whether security compliance is considered and reported as part of the assessment.")
cmd.Flags().IntVar(&stepConfig.Timeout, "timeout", 900, "Timeout in seconds until an HTTP call is forcefully terminated.")
cmd.Flags().StringVar(&stepConfig.AgentDownloadURL, "agentDownloadUrl", `https://github.com/whitesource/unified-agent-distribution/releases/latest/download/wss-unified-agent.jar`, "URL used to download the latest version of the WhiteSource Unified Agent.")
cmd.Flags().StringVar(&stepConfig.ConfigFilePath, "configFilePath", `./wss-generated-file.config`, "Explicit path to the WhiteSource Unified Agent configuration file.")
cmd.Flags().StringVar(&stepConfig.ReportDirectoryName, "reportDirectoryName", `whitesource-reports`, "Name of the directory to save vulnerability/risk reports to")
cmd.Flags().BoolVar(&stepConfig.AggregateVersionWideReport, "aggregateVersionWideReport", false, "This does not run a scan, instead just generated a report for all projects with projectVersion = config.ProductVersion")
cmd.Flags().StringVar(&stepConfig.VulnerabilityReportFormat, "vulnerabilityReportFormat", `xlsx`, "Format of the file the vulnerability report is written to.")
cmd.Flags().StringVar(&stepConfig.ParallelLimit, "parallelLimit", `15`, "[NOT IMPLEMENTED] Limit of parallel jobs being run at once in case of `scanType: 'mta'` based scenarios, defaults to `15`.")
cmd.Flags().BoolVar(&stepConfig.Reporting, "reporting", true, "Whether assessment is being done at all, defaults to `true`")
cmd.Flags().StringVar(&stepConfig.ServiceURL, "serviceUrl", `https://saas.whitesourcesoftware.com/api`, "URL to the WhiteSource server API used for communication.")
cmd.Flags().StringSliceVar(&stepConfig.BuildDescriptorExcludeList, "buildDescriptorExcludeList", []string{`unit-tests/pom.xml`, `integration-tests/pom.xml`}, "List of build descriptors and therefore modules to exclude from the scan and assessment activities.")
cmd.Flags().StringVar(&stepConfig.OrgToken, "orgToken", os.Getenv("PIPER_orgToken"), "WhiteSource token identifying your organization.")
cmd.Flags().StringVar(&stepConfig.UserToken, "userToken", os.Getenv("PIPER_userToken"), "WhiteSource token identifying the user executing the scan.")
cmd.Flags().BoolVar(&stepConfig.LicensingVulnerabilities, "licensingVulnerabilities", true, "[NOT IMPLEMENTED] Whether license compliance is considered and reported as part of the assessment.")
cmd.Flags().StringVar(&stepConfig.AgentFileName, "agentFileName", `wss-unified-agent.jar`, "Locally used name for the Unified Agent jar file after download.")
cmd.Flags().StringSliceVar(&stepConfig.AgentParameters, "agentParameters", []string{}, "[NOT IMPLEMENTED] List of additional parameters passed to the Unified Agent command line.")
cmd.Flags().StringVar(&stepConfig.AgentURL, "agentUrl", `https://saas.whitesourcesoftware.com/agent`, "URL to the WhiteSource agent endpoint.")
cmd.Flags().BoolVar(&stepConfig.AggregateVersionWideReport, "aggregateVersionWideReport", false, "This does not run a scan, instead just generated a report for all projects with projectVersion = config.ProductVersion")
cmd.Flags().StringSliceVar(&stepConfig.BuildDescriptorExcludeList, "buildDescriptorExcludeList", []string{`unit-tests/pom.xml`, `integration-tests/pom.xml`}, "List of build descriptors and therefore modules to exclude from the scan and assessment activities.")
cmd.Flags().StringVar(&stepConfig.BuildDescriptorFile, "buildDescriptorFile", os.Getenv("PIPER_buildDescriptorFile"), "Explicit path to the build descriptor file.")
cmd.Flags().StringVar(&stepConfig.BuildTool, "buildTool", os.Getenv("PIPER_buildTool"), "Defines the tool which is used for building the artifact.")
cmd.Flags().StringVar(&stepConfig.ConfigFilePath, "configFilePath", `./wss-unified-agent.config`, "Explicit path to the WhiteSource Unified Agent configuration file.")
cmd.Flags().BoolVar(&stepConfig.CreateProductFromPipeline, "createProductFromPipeline", true, "Whether to create the related WhiteSource product on the fly based on the supplied pipeline configuration.")
cmd.Flags().StringVar(&stepConfig.CvssSeverityLimit, "cvssSeverityLimit", `-1`, "Limit of tolerable CVSS v3 score upon assessment and in consequence fails the build, defaults to `-1`.")
cmd.Flags().StringSliceVar(&stepConfig.EmailAddressesOfInitialProductAdmins, "emailAddressesOfInitialProductAdmins", []string{}, "The list of email addresses to assign as product admins for newly created WhiteSource products.")
cmd.Flags().StringVar(&stepConfig.ProductVersion, "productVersion", os.Getenv("PIPER_productVersion"), "Version of the WhiteSource product to be created and used for results aggregation, usually determined automatically.")
cmd.Flags().StringVar(&stepConfig.JreDownloadURL, "jreDownloadUrl", os.Getenv("PIPER_jreDownloadUrl"), "[NOT IMPLEMENTED] URL used for downloading the Java Runtime Environment (JRE) required to run the WhiteSource Unified Agent.")
cmd.Flags().StringSliceVar(&stepConfig.Excludes, "excludes", []string{}, "List of file path patterns to exclude in the scan.")
cmd.Flags().StringSliceVar(&stepConfig.Includes, "includes", []string{}, "List of file path patterns to include in the scan.")
cmd.Flags().StringVar(&stepConfig.InstallCommand, "installCommand", os.Getenv("PIPER_installCommand"), "[NOT IMPLEMENTED] Install command that can be used to populate the default docker image for some scenarios.")
cmd.Flags().StringVar(&stepConfig.JreDownloadURL, "jreDownloadUrl", `https://github.com/SAP/SapMachine/releases/download/sapmachine-11.0.2/sapmachine-jre-11.0.2_linux-x64_bin.tar.gz`, "URL used for downloading the Java Runtime Environment (JRE) required to run the WhiteSource Unified Agent.")
cmd.Flags().BoolVar(&stepConfig.LicensingVulnerabilities, "licensingVulnerabilities", true, "[NOT IMPLEMENTED] Whether license compliance is considered and reported as part of the assessment.")
cmd.Flags().StringVar(&stepConfig.OrgToken, "orgToken", os.Getenv("PIPER_orgToken"), "WhiteSource token identifying your organization.")
cmd.Flags().StringVar(&stepConfig.ParallelLimit, "parallelLimit", `15`, "[NOT IMPLEMENTED] Limit of parallel jobs being run at once in case of `scanType: 'mta'` based scenarios, defaults to `15`.")
cmd.Flags().StringVar(&stepConfig.ProductName, "productName", os.Getenv("PIPER_productName"), "Name of the WhiteSource product used for results aggregation. This parameter is mandatory if the parameter `createProductFromPipeline` is set to `true` and the WhiteSource product does not yet exist. It is also mandatory if the parameter `productToken` is not provided.")
cmd.Flags().StringVar(&stepConfig.ProductToken, "productToken", os.Getenv("PIPER_productToken"), "Token of the WhiteSource product to be created and used for results aggregation, usually determined automatically. Can optionally be provided as an alternative to `productName`.")
cmd.Flags().StringVar(&stepConfig.ProductVersion, "productVersion", os.Getenv("PIPER_productVersion"), "Version of the WhiteSource product to be created and used for results aggregation.")
cmd.Flags().StringVar(&stepConfig.ProjectName, "projectName", os.Getenv("PIPER_projectName"), "The project name used for reporting results in WhiteSource. When provided, all source modules will be scanned into one aggregated WhiteSource project. For scan types `maven`, `mta`, `npm`, the default is to generate one WhiteSource project per module, whereas the project name is derived from the module's build descriptor. For NPM modules, project aggregation is not supported, the last scanned NPM module will override all previously aggregated scan results!")
cmd.Flags().StringVar(&stepConfig.ProjectToken, "projectToken", os.Getenv("PIPER_projectToken"), "Project token to execute scan on. Ignored for scan types `maven`, `mta` and `npm`. Used for project aggregation when scanning with the Unified Agent and can be provided as an alternative to `projectName`.")
cmd.Flags().StringVar(&stepConfig.VulnerabilityReportTitle, "vulnerabilityReportTitle", `WhiteSource Security Vulnerability Report`, "Title of vulnerability report written during the assessment phase.")
cmd.Flags().StringVar(&stepConfig.InstallCommand, "installCommand", os.Getenv("PIPER_installCommand"), "[NOT IMPLEMENTED] Install command that can be used to populate the default docker image for some scenarios.")
cmd.Flags().StringVar(&stepConfig.ReportDirectoryName, "reportDirectoryName", `whitesource-reports`, "Name of the directory to save vulnerability/risk reports to")
cmd.Flags().BoolVar(&stepConfig.Reporting, "reporting", true, "Whether assessment is being done at all, defaults to `true`")
cmd.Flags().StringVar(&stepConfig.ScanImage, "scanImage", os.Getenv("PIPER_scanImage"), "For `buildTool: docker`: Defines the docker image which should be scanned.")
cmd.Flags().BoolVar(&stepConfig.ScanImageIncludeLayers, "scanImageIncludeLayers", true, "For `buildTool: docker`: Defines if layers should be included.")
cmd.Flags().StringVar(&stepConfig.ScanImageRegistryURL, "scanImageRegistryUrl", os.Getenv("PIPER_scanImageRegistryUrl"), "For `buildTool: docker`: Defines the registry where the scanImage is located.")
cmd.Flags().StringVar(&stepConfig.ScanType, "scanType", os.Getenv("PIPER_scanType"), "Type of development stack used to implement the solution. For scan types other than `mta`, `maven`, and `npm`, the WhiteSource Unified Agent is downloaded and used to perform the scan. If the parameter is not provided, it is derived from the parameter `buildTool`, which is usually configured in the general section of the pipeline config file.")
cmd.Flags().StringVar(&stepConfig.CvssSeverityLimit, "cvssSeverityLimit", `-1`, "Limit of tolerable CVSS v3 score upon assessment and in consequence fails the build, defaults to `-1`.")
cmd.Flags().StringVar(&stepConfig.Includes, "includes", `**\/src\/main\/**\/*.java **\/*.py **\/*.go **\/*.js **\/*.ts`, "Space separated list of file path patterns to include in the scan, slashes must be escaped for sed.")
cmd.Flags().StringVar(&stepConfig.Excludes, "excludes", `tests/**/*.py **/src/test/**/*.java`, "Space separated list of file path patterns to exclude in the scan")
cmd.Flags().StringVar(&stepConfig.ProductToken, "productToken", os.Getenv("PIPER_productToken"), "Token of the WhiteSource product to be created and used for results aggregation, usually determined automatically. Can optionally be provided as an alternative to `productName`.")
cmd.Flags().StringVar(&stepConfig.AgentParameters, "agentParameters", os.Getenv("PIPER_agentParameters"), "[NOT IMPLEMENTED] Additional parameters passed to the Unified Agent command line.")
cmd.Flags().BoolVar(&stepConfig.SecurityVulnerabilities, "securityVulnerabilities", true, "Whether security compliance is considered and reported as part of the assessment.")
cmd.Flags().StringVar(&stepConfig.ServiceURL, "serviceUrl", `https://saas.whitesourcesoftware.com/api`, "URL to the WhiteSource API endpoint.")
cmd.Flags().IntVar(&stepConfig.Timeout, "timeout", 900, "Timeout in seconds until an HTTP call is forcefully terminated.")
cmd.Flags().StringVar(&stepConfig.UserToken, "userToken", os.Getenv("PIPER_userToken"), "WhiteSource token identifying the user executing the scan.")
cmd.Flags().StringVar(&stepConfig.VersioningModel, "versioningModel", `major`, "The default project versioning model used in case `projectVersion` parameter is empty for creating the version based on the build descriptor version to report results in Whitesource, can be one of `'major'`, `'major-minor'`, `'semantic'`, `'full'`")
cmd.Flags().StringVar(&stepConfig.VulnerabilityReportFormat, "vulnerabilityReportFormat", `xlsx`, "Format of the file the vulnerability report is written to.")
cmd.Flags().StringVar(&stepConfig.VulnerabilityReportTitle, "vulnerabilityReportTitle", `WhiteSource Security Vulnerability Report`, "Title of vulnerability report written during the assessment phase.")
cmd.Flags().StringVar(&stepConfig.ProjectSettingsFile, "projectSettingsFile", os.Getenv("PIPER_projectSettingsFile"), "Path to the mvn settings file that should be used as project settings file.")
cmd.Flags().StringVar(&stepConfig.GlobalSettingsFile, "globalSettingsFile", os.Getenv("PIPER_globalSettingsFile"), "Path to the mvn settings file that should be used as global settings file.")
cmd.Flags().StringVar(&stepConfig.M2Path, "m2Path", os.Getenv("PIPER_m2Path"), "Path to the location of the local repository that should be used.")
@@ -209,11 +212,67 @@ func whitesourceExecuteScanMetadata() config.StepData {
Metadata: config.StepMetadata{
Name: "whitesourceExecuteScan",
Aliases: []config.Alias{},
Description: "BETA",
Description: "Execute a WhiteSource scan",
},
Spec: config.StepSpec{
Inputs: config.StepInputs{
Parameters: []config.StepParameters{
{
Name: "agentDownloadUrl",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "agentFileName",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "agentParameters",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "[]string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "agentUrl",
ResourceRef: []config.ResourceReference{},
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "aggregateVersionWideReport",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "bool",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "buildDescriptorExcludeList",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "[]string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "buildDescriptorFile",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "buildTool",
ResourceRef: []config.ResourceReference{
@@ -228,21 +287,13 @@ func whitesourceExecuteScanMetadata() config.StepData {
Aliases: []config.Alias{},
},
{
Name: "buildDescriptorFile",
Name: "configFilePath",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "versioningModel",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS", "GENERAL"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{{Name: "defaultVersioningModel"}},
},
{
Name: "createProductFromPipeline",
ResourceRef: []config.ResourceReference{},
@@ -252,129 +303,7 @@ func whitesourceExecuteScanMetadata() config.StepData {
Aliases: []config.Alias{},
},
{
Name: "securityVulnerabilities",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "bool",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "timeout",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "int",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "agentDownloadUrl",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "configFilePath",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "reportDirectoryName",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "aggregateVersionWideReport",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "bool",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "vulnerabilityReportFormat",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "parallelLimit",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "reporting",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "bool",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "serviceUrl",
ResourceRef: []config.ResourceReference{},
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "buildDescriptorExcludeList",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "[]string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "orgToken",
ResourceRef: []config.ResourceReference{
{
Name: "orgAdminUserTokenCredentialsId",
Type: "secret",
},
},
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: true,
Aliases: []config.Alias{},
},
{
Name: "userToken",
ResourceRef: []config.ResourceReference{
{
Name: "userTokenCredentialsId",
Type: "secret",
},
},
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: true,
Aliases: []config.Alias{},
},
{
Name: "licensingVulnerabilities",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "bool",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "agentFileName",
Name: "cvssSeverityLimit",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
@@ -390,9 +319,25 @@ func whitesourceExecuteScanMetadata() config.StepData {
Aliases: []config.Alias{},
},
{
Name: "productVersion",
Name: "excludes",
ResourceRef: []config.ResourceReference{},
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "[]string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "includes",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "[]string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "installCommand",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
@@ -403,6 +348,35 @@ func whitesourceExecuteScanMetadata() config.StepData {
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{{Name: "whitesource/jreDownloadUrl"}},
},
{
Name: "licensingVulnerabilities",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "bool",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "orgToken",
ResourceRef: []config.ResourceReference{
{
Name: "orgAdminUserTokenCredentialsId",
Type: "secret",
},
},
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: true,
Aliases: []config.Alias{{Name: "whitesourceOrgToken"}, {Name: "whitesource/orgToken"}},
},
{
Name: "parallelLimit",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
@@ -411,7 +385,23 @@ func whitesourceExecuteScanMetadata() config.StepData {
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{{Name: "whitesourceProductName"}},
Aliases: []config.Alias{{Name: "whitesourceProductName"}, {Name: "whitesource/productName"}},
},
{
Name: "productToken",
ResourceRef: []config.ResourceReference{},
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{{Name: "whitesourceProductToken"}, {Name: "whitesource/productToken"}},
},
{
Name: "productVersion",
ResourceRef: []config.ResourceReference{},
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{{Name: "whitesourceProductVersion"}, {Name: "whitesource/productVersion"}},
},
{
Name: "projectName",
@@ -430,7 +420,7 @@ func whitesourceExecuteScanMetadata() config.StepData {
Aliases: []config.Alias{},
},
{
Name: "vulnerabilityReportTitle",
Name: "reportDirectoryName",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
@@ -438,7 +428,31 @@ func whitesourceExecuteScanMetadata() config.StepData {
Aliases: []config.Alias{},
},
{
Name: "installCommand",
Name: "reporting",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "bool",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "scanImage",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "scanImageIncludeLayers",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "bool",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "scanImageRegistryUrl",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
@@ -454,39 +468,60 @@ func whitesourceExecuteScanMetadata() config.StepData {
Aliases: []config.Alias{},
},
{
Name: "cvssSeverityLimit",
Name: "securityVulnerabilities",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Type: "bool",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "includes",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "excludes",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "productToken",
Name: "serviceUrl",
ResourceRef: []config.ResourceReference{},
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{{Name: "whitesourceProductToken"}},
Aliases: []config.Alias{{Name: "whitesourceServiceUrl"}, {Name: "whitesource/serviceUrl"}},
},
{
Name: "agentParameters",
Name: "timeout",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "int",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "userToken",
ResourceRef: []config.ResourceReference{
{
Name: "userTokenCredentialsId",
Type: "secret",
},
},
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: true,
Aliases: []config.Alias{},
},
{
Name: "versioningModel",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS", "GENERAL"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{{Name: "defaultVersioningModel"}},
},
{
Name: "vulnerabilityReportFormat",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "vulnerabilityReportTitle",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
@@ -536,11 +571,13 @@ func whitesourceExecuteScanMetadata() config.StepData {
},
},
Containers: []config.Container{
{Image: "devxci/mbtci:1.0.14", WorkingDir: "/home/mta", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "scanType", Value: "mta"}}}}},
{Image: "maven:3.5-jdk-8", WorkingDir: "/home/java", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "scanType", Value: "maven"}}}}},
{Image: "node:lts-stretch", WorkingDir: "/home/node", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "scanType", Value: "npm"}}}}},
{Image: "hseeberger/scala-sbt:8u181_2.12.8_1.2.8", WorkingDir: "/home/scala", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "scanType", Value: "sbt"}}}}},
{Image: "buildpack-deps:stretch-curl", WorkingDir: "/home/dub", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "scanType", Value: "dub"}}}}},
{Image: "buildpack-deps:stretch-curl", WorkingDir: "/tmp", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "dub"}, {Name: "buildTool", Value: "docker"}}}}},
{Image: "devxci/mbtci:1.0.14", WorkingDir: "/home/mta", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "mta"}}}, {ConditionRef: "strings-equal", Params: []config.Param{{Name: "scanType", Value: "mta"}}}}},
{Image: "golang:1", WorkingDir: "/go", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "go"}}}}},
{Image: "hseeberger/scala-sbt:8u181_2.12.8_1.2.8", WorkingDir: "/tmp", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "sbt"}}}}},
{Image: "maven:3.5-jdk-8", WorkingDir: "/tmp", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "maven"}}}, {ConditionRef: "strings-equal", Params: []config.Param{{Name: "scanType", Value: "maven"}}}}},
{Image: "node:lts-stretch", WorkingDir: "/home/node", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "npm"}}}, {ConditionRef: "strings-equal", Params: []config.Param{{Name: "scanType", Value: "npm"}}}}},
{Image: "python:3.6-stretch", WorkingDir: "/tmp", Conditions: []config.Condition{{ConditionRef: "strings-equal", Params: []config.Param{{Name: "buildTool", Value: "pip"}}}}},
},
Outputs: config.StepOutputs{
Resources: []config.StepResources{

View File

@@ -47,12 +47,15 @@ func (p *fileProperties) isDir() bool {
//FilesMock implements the functions from piperutils.Files with an in-memory file system.
type FilesMock struct {
files map[string]*fileProperties
writtenFiles []string
copiedFiles map[string]string
removedFiles []string
CurrentDir string
Separator string
files map[string]*fileProperties
writtenFiles []string
copiedFiles map[string]string
removedFiles []string
CurrentDir string
Separator string
FileExistsErrors map[string]error
FileWriteError error
FileWriteErrors map[string]error
}
func (f *FilesMock) init() {
@@ -147,6 +150,9 @@ func (f *FilesMock) HasCopiedFile(src string, dest string) bool {
// FileExists returns true if file content has been associated with the given path, false otherwise.
// Only relative paths are supported.
func (f *FilesMock) FileExists(path string) (bool, error) {
if f.FileExistsErrors[path] != nil {
return false, f.FileExistsErrors[path]
}
if f.files == nil {
return false, nil
}
@@ -218,10 +224,13 @@ func (f *FilesMock) FileRead(path string) ([]byte, error) {
// FileWrite just forwards to AddFile(), i.e. the content is associated with the given path.
func (f *FilesMock) FileWrite(path string, content []byte, mode os.FileMode) error {
if f.FileWriteError != nil {
return f.FileWriteError
}
if f.FileWriteErrors[path] != nil {
return f.FileWriteErrors[path]
}
f.init()
// NOTE: FilesMock could be extended to have a set of paths for which FileWrite should fail.
// This is why AddFile() exists separately, to differentiate the notion of setting up the mocking
// versus implementing the methods from Files.
f.writtenFiles = append(f.writtenFiles, f.toAbsPath(path))
f.AddFileWithMode(path, content, mode)
return nil

View File

@@ -10,6 +10,13 @@ import (
"github.com/pkg/errors"
)
// DockerDescriptor holds the unique identifier combination for a Docker artifact
type DockerDescriptor struct {
GroupID string
ArtifactID string
Version string
}
// Docker defines an artifact based on a Dockerfile
type Docker struct {
artifact Artifact
@@ -144,5 +151,25 @@ func (d *Docker) versionFromBaseImageTag() string {
// GetCoordinates returns the coordinates
func (d *Docker) GetCoordinates() (Coordinates, error) {
return nil, nil
result := DockerDescriptor{}
result.GroupID = ""
result.ArtifactID, _ = d.GetArtifactID()
result.Version = ""
// cannot properly resolve version unless all options are provided. Can we ensure proper parameterization?
// result.Version, err = d.GetVersion()
// if err != nil {
// return nil, err
// }
return result, nil
}
// GetArtifactID returns the current ID of the artifact
func (d *Docker) GetArtifactID() (string, error) {
d.init()
artifactID := strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(d.options.DockerImage, "/", "_"), ":", "_"), ".", "_")
return artifactID, nil
}

View File

@@ -167,3 +167,16 @@ func TestVersionFromBaseImageTag(t *testing.T) {
assert.Equal(t, test.expected, test.docker.versionFromBaseImageTag())
}
}
func TestGetCoordinates(t *testing.T) {
docker := Docker{
readFile: func(filename string) ([]byte, error) { return []byte("FROM test:1.2.3"), nil },
versionSource: "FROM",
options: &Options{DockerImage: "my/test/image:tag"},
}
coordinates, err := docker.GetCoordinates()
assert.NoError(t, err)
assert.Equal(t, DockerDescriptor{GroupID: "", ArtifactID: "my_test_image_tag", Version: ""}, coordinates)
//assert.Equal(t, DockerDescriptor{GroupID: "", ArtifactID: "my_test_image_tag", Version: "1.2.3"}, coordinates)
}

View File

@@ -60,9 +60,9 @@ func (p *Pip) GetVersion() (string, error) {
if strings.Contains(p.path, "setup.py") {
buildDescriptorFilePath, err = searchDescriptor([]string{"version.txt", "VERSION"}, p.fileExists)
if err != nil {
err = p.init()
if err != nil {
return "", errors.Wrapf(err, "failed to read file '%v'", p.path)
initErr := p.init()
if initErr != nil {
return "", errors.Wrapf(initErr, "failed to read file '%v'", p.path)
}
if evaluateResult(p.buildDescriptorContent, VersionRegex) {
compile := regexp.MustCompile(VersionRegex)
@@ -75,6 +75,7 @@ func (p *Pip) GetVersion() (string, error) {
artifact := &Versionfile{
path: buildDescriptorFilePath,
versioningScheme: p.VersioningScheme(),
readFile: p.readFile,
}
return artifact.GetVersion()
}
@@ -86,9 +87,9 @@ func (p *Pip) SetVersion(v string) error {
if strings.Contains(p.path, "setup.py") {
buildDescriptorFilePath, err = searchDescriptor([]string{"version.txt", "VERSION"}, p.fileExists)
if err != nil {
err = p.init()
if err != nil {
return errors.Wrapf(err, "failed to read file '%v'", p.path)
initErr := p.init()
if initErr != nil {
return errors.Wrapf(initErr, "failed to read file '%v'", p.path)
}
if evaluateResult(p.buildDescriptorContent, VersionRegex) {
compile := regexp.MustCompile(VersionRegex)
@@ -104,6 +105,7 @@ func (p *Pip) SetVersion(v string) error {
artifact := &Versionfile{
path: buildDescriptorFilePath,
versioningScheme: p.VersioningScheme(),
writeFile: p.writeFile,
}
return artifact.SetVersion(v)
}

View File

@@ -1 +1,245 @@
package versioning
import (
"fmt"
"testing"
"github.com/SAP/jenkins-library/pkg/mock"
"github.com/stretchr/testify/assert"
)
func TestPipGetVersion(t *testing.T) {
t.Parallel()
t.Run("success case - setup.py", func(t *testing.T) {
fileUtils := mock.FilesMock{}
fileUtils.AddFile("setup.py", []byte(`setup(name="simple-python",version="1.2.3"`))
pip := Pip{
path: "setup.py",
fileExists: fileUtils.FileExists,
readFile: fileUtils.FileRead,
writeFile: fileUtils.FileWrite,
}
version, err := pip.GetVersion()
assert.NoError(t, err)
assert.Equal(t, "1.2.3", version)
})
t.Run("success case - setup.py & version.txt", func(t *testing.T) {
fileUtils := mock.FilesMock{}
fileUtils.AddFile("setup.py", []byte(`setup(name="simple-python",`))
fileUtils.AddFile("version.txt", []byte(`1.2.4`))
pip := Pip{
path: "setup.py",
fileExists: fileUtils.FileExists,
readFile: fileUtils.FileRead,
writeFile: fileUtils.FileWrite,
}
version, err := pip.GetVersion()
assert.NoError(t, err)
assert.Equal(t, "1.2.4", version)
})
t.Run("success case - setup.py & VERSION", func(t *testing.T) {
fileUtils := mock.FilesMock{}
fileUtils.AddFile("setup.py", []byte(`setup(name="simple-python",`))
fileUtils.AddFile("VERSION", []byte(`1.2.5`))
pip := Pip{
path: "setup.py",
fileExists: fileUtils.FileExists,
readFile: fileUtils.FileRead,
writeFile: fileUtils.FileWrite,
}
version, err := pip.GetVersion()
assert.NoError(t, err)
assert.Equal(t, "1.2.5", version)
})
t.Run("error to read file", func(t *testing.T) {
fileUtils := mock.FilesMock{}
pip := Pip{
path: "setup.py",
fileExists: fileUtils.FileExists,
readFile: fileUtils.FileRead,
writeFile: fileUtils.FileWrite,
}
_, err := pip.GetVersion()
assert.Contains(t, fmt.Sprint(err), "failed to read file 'setup.py'")
})
t.Run("error to retrieve version", func(t *testing.T) {
fileUtils := mock.FilesMock{}
fileUtils.AddFile("setup.py", []byte(`setup(name="simple-python",`))
pip := Pip{
path: "setup.py",
fileExists: fileUtils.FileExists,
readFile: fileUtils.FileRead,
writeFile: fileUtils.FileWrite,
}
_, err := pip.GetVersion()
assert.Contains(t, fmt.Sprint(err), "failed to retrieve version")
})
}
func TestPipSetVersion(t *testing.T) {
t.Run("success case - setup.py", func(t *testing.T) {
fileUtils := mock.FilesMock{}
fileUtils.AddFile("setup.py", []byte(`setup(name="simple-python",version="1.2.3"`))
pip := Pip{
path: "setup.py",
fileExists: fileUtils.FileExists,
readFile: fileUtils.FileRead,
writeFile: fileUtils.FileWrite,
}
err := pip.SetVersion("2.0.0")
assert.NoError(t, err)
content, _ := fileUtils.FileRead("setup.py")
assert.Contains(t, string(content), `version="2.0.0"`)
})
t.Run("success case - setup.py & version.txt", func(t *testing.T) {
fileUtils := mock.FilesMock{}
fileUtils.AddFile("setup.py", []byte(`setup(name="simple-python",`))
fileUtils.AddFile("version.txt", []byte(`1.2.3`))
pip := Pip{
path: "setup.py",
fileExists: fileUtils.FileExists,
readFile: fileUtils.FileRead,
writeFile: fileUtils.FileWrite,
}
err := pip.SetVersion("2.0.0")
assert.NoError(t, err)
content, _ := fileUtils.FileRead("version.txt")
assert.Equal(t, "2.0.0", string(content))
})
t.Run("success case - setup.py & VERSION", func(t *testing.T) {
fileUtils := mock.FilesMock{}
fileUtils.AddFile("setup.py", []byte(`setup(name="simple-python",`))
fileUtils.AddFile("VERSION", []byte(`1.2.3`))
pip := Pip{
path: "setup.py",
fileExists: fileUtils.FileExists,
readFile: fileUtils.FileRead,
writeFile: fileUtils.FileWrite,
}
err := pip.SetVersion("2.0.0")
assert.NoError(t, err)
content, _ := fileUtils.FileRead("VERSION")
assert.Equal(t, "2.0.0", string(content))
})
t.Run("error to read file", func(t *testing.T) {
fileUtils := mock.FilesMock{}
pip := Pip{
path: "setup.py",
fileExists: fileUtils.FileExists,
readFile: fileUtils.FileRead,
writeFile: fileUtils.FileWrite,
}
err := pip.SetVersion("2.0.0")
assert.Contains(t, fmt.Sprint(err), "failed to read file 'setup.py'")
})
t.Run("error to retrieve version", func(t *testing.T) {
fileUtils := mock.FilesMock{}
fileUtils.AddFile("setup.py", []byte(`setup(name="simple-python",`))
pip := Pip{
path: "setup.py",
fileExists: fileUtils.FileExists,
readFile: fileUtils.FileRead,
writeFile: fileUtils.FileWrite,
}
err := pip.SetVersion("2.0.0")
assert.Contains(t, fmt.Sprint(err), "failed to retrieve version")
})
}
func TestPipGetCoordinates(t *testing.T) {
t.Run("success case - setup.py", func(t *testing.T) {
fileUtils := mock.FilesMock{}
fileUtils.AddFile("setup.py", []byte(`setup(name="simple-python",version="1.2.3"`))
pip := Pip{
path: "setup.py",
fileExists: fileUtils.FileExists,
readFile: fileUtils.FileRead,
writeFile: fileUtils.FileWrite,
}
coordinates, err := pip.GetCoordinates()
pipDescriptor := coordinates.(*PipDescriptor)
assert.NoError(t, err)
assert.Equal(t, "simple-python", pipDescriptor.ArtifactID)
assert.Equal(t, "1.2.3", pipDescriptor.Version)
})
t.Run("success case - only version", func(t *testing.T) {
fileUtils := mock.FilesMock{}
fileUtils.AddFile("setup.py", []byte(`setup(version="1.2.3"`))
pip := Pip{
path: "setup.py",
fileExists: fileUtils.FileExists,
readFile: fileUtils.FileRead,
writeFile: fileUtils.FileWrite,
}
coordinates, err := pip.GetCoordinates()
pipDescriptor := coordinates.(*PipDescriptor)
assert.NoError(t, err)
assert.Equal(t, "", pipDescriptor.ArtifactID)
assert.Equal(t, "1.2.3", pipDescriptor.Version)
})
t.Run("error to retrieve setup.py", func(t *testing.T) {
fileUtils := mock.FilesMock{}
pip := Pip{
path: "setup.py",
fileExists: fileUtils.FileExists,
readFile: fileUtils.FileRead,
writeFile: fileUtils.FileWrite,
}
_, err := pip.GetCoordinates()
assert.Contains(t, fmt.Sprint(err), "failed to read file 'setup.py'")
})
t.Run("error to retrieve version", func(t *testing.T) {
fileUtils := mock.FilesMock{}
fileUtils.AddFile("setup.py", []byte(`setup(name="simple-python"`))
pip := Pip{
path: "setup.py",
fileExists: fileUtils.FileExists,
readFile: fileUtils.FileRead,
writeFile: fileUtils.FileWrite,
}
_, err := pip.GetCoordinates()
assert.Contains(t, fmt.Sprint(err), "failed to retrieve version")
})
}

View File

@@ -20,9 +20,10 @@ type Artifact interface {
GetCoordinates() (Coordinates, error)
}
// Options define build tool specific settings in order to properly retrieve e.g. the version of an artifact
// Options define build tool specific settings in order to properly retrieve e.g. the version / coordinates of an artifact
type Options struct {
ProjectSettingsFile string
DockerImage string
GlobalSettingsFile string
M2Path string
VersionSource string
@@ -134,7 +135,7 @@ func GetArtifact(buildTool, buildDescriptorFilePath string, opts *Options, utils
case "pip":
if len(buildDescriptorFilePath) == 0 {
var err error
buildDescriptorFilePath, err = searchDescriptor([]string{"version.txt", "VERSION", "setup.py"}, fileExists)
buildDescriptorFilePath, err = searchDescriptor([]string{"setup.py", "version.txt", "VERSION"}, fileExists)
if err != nil {
return artifact, err
}

View File

@@ -126,7 +126,7 @@ func TestGetArtifact(t *testing.T) {
theType, ok := pip.(*Pip)
assert.True(t, ok)
assert.Equal(t, "version.txt", theType.path)
assert.Equal(t, "setup.py", theType.path)
assert.Equal(t, "pep440", pip.VersioningScheme())
})
@@ -134,7 +134,7 @@ func TestGetArtifact(t *testing.T) {
fileExists = func(string) (bool, error) { return false, nil }
_, err := GetArtifact("pip", "", &Options{}, nil)
assert.EqualError(t, err, "no build descriptor available, supported: [version.txt VERSION setup.py]")
assert.EqualError(t, err, "no build descriptor available, supported: [setup.py version.txt VERSION]")
})
t.Run("sbt", func(t *testing.T) {

View File

@@ -0,0 +1,272 @@
package whitesource
import (
"bytes"
"fmt"
"path/filepath"
"strings"
"time"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/SAP/jenkins-library/pkg/maven"
"github.com/magiconair/properties"
"github.com/pkg/errors"
)
// ConfigOption defines a dedicated WhiteSource config which can be enforced if required
type ConfigOption struct {
Name string
Value interface{}
OmitIfPresent string
Force bool
}
// ConfigOptions contains a list of config options (ConfigOption)
type ConfigOptions []ConfigOption
// RewriteUAConfigurationFile updates the user's Unified Agent configuration with configuration which should be enforced or just eases the overall configuration
// It then returns the path to the file containing the updated configuration
func (s *ScanOptions) RewriteUAConfigurationFile(utils Utils) (string, error) {
uaContent, err := utils.FileRead(s.ConfigFilePath)
uaConfig, propErr := properties.Load(uaContent, properties.UTF8)
uaConfigMap := map[string]string{}
if err != nil || propErr != nil {
log.Entry().Warningf("Failed to load configuration file '%v'. Creating a configuration file from scratch.", s.ConfigFilePath)
} else {
uaConfigMap = uaConfig.Map()
}
cOptions := ConfigOptions{}
cOptions.addGeneralDefaults(s, utils)
cOptions.addBuildToolDefaults(s, utils)
newConfigMap := cOptions.updateConfig(&uaConfigMap)
newConfig := properties.LoadMap(newConfigMap)
now := time.Now().Format("20060102150405")
newConfigFilePath := fmt.Sprintf("%v.%v", s.ConfigFilePath, now)
var configContent bytes.Buffer
_, err = newConfig.Write(&configContent, properties.UTF8)
if err != nil {
return "", errors.Wrap(err, "failed to write properties")
}
err = utils.FileWrite(newConfigFilePath, configContent.Bytes(), 0666)
if err != nil {
return "", errors.Wrap(err, "failed to write file")
}
return newConfigFilePath, nil
}
func (c *ConfigOptions) updateConfig(originalConfig *map[string]string) map[string]string {
newConfig := map[string]string{}
for k, v := range *originalConfig {
newConfig[k] = v
}
for _, cOpt := range *c {
//omit default if value present
var dependentValue string
if len(cOpt.OmitIfPresent) > 0 {
dependentValue = newConfig[cOpt.OmitIfPresent]
}
if len(dependentValue) == 0 && (cOpt.Force || len(newConfig[cOpt.Name]) == 0) {
newConfig[cOpt.Name] = fmt.Sprint(cOpt.Value)
}
}
return newConfig
}
func (c *ConfigOptions) addGeneralDefaults(config *ScanOptions, utils Utils) {
cOptions := ConfigOptions{}
if strings.HasPrefix(config.ProductName, "DIST - ") {
cOptions = append(cOptions, []ConfigOption{
{Name: "checkPolicies", Value: false, Force: true},
{Name: "forceCheckAllDependencies", Value: false, Force: true},
}...)
} else {
cOptions = append(cOptions, []ConfigOption{
{Name: "checkPolicies", Value: true, Force: true},
{Name: "forceCheckAllDependencies", Value: true, Force: true},
}...)
}
if config.Verbose {
cOptions = append(cOptions, []ConfigOption{
{Name: "log.level", Value: "debug"},
{Name: "log.files.level", Value: "debug"},
}...)
}
if len(config.Excludes) > 0 {
cOptions = append(cOptions, ConfigOption{Name: "excludes", Value: strings.Join(config.Excludes, " "), Force: true})
}
if len(config.Includes) > 0 {
cOptions = append(cOptions, ConfigOption{Name: "includes", Value: strings.Join(config.Includes, " "), Force: true})
}
cOptions = append(cOptions, []ConfigOption{
{Name: "apiKey", Value: config.OrgToken, Force: true},
{Name: "productName", Value: config.ProductName, Force: true},
{Name: "productVersion", Value: config.ProductVersion, Force: true},
{Name: "projectName", Value: config.ProjectName, Force: true},
{Name: "projectVersion", Value: config.ProductVersion, Force: true},
{Name: "productToken", Value: config.ProductToken, OmitIfPresent: "projectToken", Force: true},
{Name: "userKey", Value: config.UserToken, Force: true},
{Name: "forceUpdate", Value: true, Force: true},
{Name: "offline", Value: false, Force: true},
{Name: "ignoreSourceFiles", Value: true, Force: true},
{Name: "resolveAllDependencies", Value: false, Force: true},
{Name: "failErrorLevel", Value: "ALL", Force: true},
{Name: "case.sensitive.glob", Value: false},
{Name: "followSymbolicLinks", Value: true},
}...)
for _, cOpt := range cOptions {
*c = append(*c, cOpt)
}
}
func (c *ConfigOptions) addBuildToolDefaults(config *ScanOptions, utils Utils) error {
buildToolDefaults := map[string]ConfigOptions{
"docker": {
{Name: "docker.scanImages", Value: true, Force: true},
{Name: "docker.scanTarFiles", Value: true, Force: true},
{Name: "docker.includes", Value: ".*.tar", Force: true},
{Name: "ignoreSourceFiles", Value: true, Force: true},
{Name: "python.resolveGlobalPackages", Value: true, Force: false},
{Name: "resolveAllDependencies", Value: true, Force: false},
{Name: "updateType", Value: "OVERRIDE", Force: true},
{Name: "docker.excludeBaseImage", Value: "true", Force: false},
},
"dub": {
{Name: "includes", Value: "**/*.d **/*.di"},
},
//ToDo: rename to go?
//ToDo: switch to gomod as dependency manager
"golang": {
{Name: "go.resolveDependencies", Value: true, Force: true},
{Name: "go.ignoreSourceFiles", Value: true, Force: true},
{Name: "go.collectDependenciesAtRuntime", Value: false},
{Name: "go.dependencyManager", Value: "modules"},
},
"gradle": {
{Name: "gradle.localRepositoryPath", Value: ".gradle", Force: false},
},
"maven": {
{Name: "updateEmptyProject", Value: true, Force: true},
{Name: "maven.resolveDependencies", Value: true, Force: true},
{Name: "maven.ignoreSourceFiles", Value: true, Force: true},
{Name: "maven.aggregateModules", Value: false, Force: true},
{Name: "maven.ignoredScopes", Value: "test provided"},
{Name: "maven.ignorePomModules", Value: false},
{Name: "maven.runPreStep", Value: true},
// ToDo: check with Klaus since when set to true name will not include groupId any longer
{Name: "maven.projectNameFromDependencyFile", Value: false},
{Name: "includes", Value: "**/*.jar"},
{Name: "excludes", Value: "**/*sources.jar **/*javadoc.jar"},
},
"npm": {
{Name: "npm.resolveDependencies", Value: true, Force: true},
{Name: "npm.ignoreSourceFiles", Value: true, Force: true},
{Name: "npm.ignoreNpmLsErrors", Value: true},
{Name: "npm.failOnNpmLsErrors", Value: false},
{Name: "npm.runPreStep", Value: true},
{Name: "npm.projectNameFromDependencyFile", Value: true},
{Name: "npm.resolveLockFile", Value: true},
},
"pip": {
{Name: "python.resolveDependencies", Value: true, Force: true},
{Name: "python.ignoreSourceFiles", Value: true, Force: true},
{Name: "python.ignorePipInstallErrors", Value: false},
{Name: "python.installVirtualEnv", Value: true},
{Name: "python.resolveHierarchyTree", Value: true},
{Name: "python.requirementsFileIncludes", Value: "requirements.txt"},
{Name: "python.resolveSetupPyFiles", Value: true},
{Name: "python.runPipenvPreStep", Value: true},
{Name: "python.pipenvDevDependencies", Value: true},
{Name: "python.IgnorePipenvInstallErrors", Value: false},
{Name: "includes", Value: "**/*.py **/*.txt"},
{Name: "excludes", Value: "**/*sources.jar **/*javadoc.jar"},
},
"sbt": {
{Name: "sbt.resolveDependencies", Value: true, Force: true},
{Name: "sbt.ignoreSourceFiles", Value: true, Force: true},
{Name: "sbt.aggregateModules", Value: false, Force: true},
{Name: "sbt.runPreStep", Value: true},
{Name: "includes", Value: "**/*.jar"},
{Name: "excludes", Value: "**/*sources.jar **/*javadoc.jar"},
},
}
if config.BuildTool == "maven" {
if len(config.M2Path) > 0 {
*c = append(*c, ConfigOption{Name: "maven.m2RepositoryPath", Value: config.M2Path, Force: true})
}
mvnAdditionalArguments, _ := maven.DownloadAndGetMavenParameters(config.GlobalSettingsFile, config.ProjectSettingsFile, utils)
mvnAdditionalArguments = append(mvnAdditionalArguments, mvnProjectExcludes(config.BuildDescriptorExcludeList, utils)...)
if len(mvnAdditionalArguments) > 0 {
*c = append(*c, ConfigOption{Name: "maven.additionalArguments", Value: strings.Join(mvnAdditionalArguments, " "), Force: true})
}
}
if config.BuildTool == "docker" {
// for now only support default name of Dockerfile
// ToDo: evaluate possibilities to allow also non-default Dockerfile names
dockerFile := "Dockerfile"
if exists, _ := utils.FileExists("Dockerfile"); exists {
*c = append(*c, ConfigOption{Name: "docker.dockerfilePath", Value: dockerFile, Force: false})
}
}
if cOptions := buildToolDefaults[config.BuildTool]; cOptions != nil {
for _, cOpt := range cOptions {
*c = append(*c, cOpt)
}
return nil
}
//ToDo: Do we want to auto generate the config via autoGenerateWhitesourceConfig() here?
// -> try to load original config file -> if not available generate?
log.Entry().Infof("Configuration for buildTool: '%v' is not yet hardened, please do a quality assessment of your scan results.", config.BuildTool)
return fmt.Errorf("configuration not hardened")
}
// handle modules to exclude based on buildDescriptorExcludeList returning e.g. --projects !integration-tests
func mvnProjectExcludes(buildDescriptorExcludeList []string, utils Utils) []string {
projectExcludes := []string{}
for _, buildDescriptor := range buildDescriptorExcludeList {
exists, _ := utils.FileExists(buildDescriptor)
if strings.Contains(buildDescriptor, "pom.xml") && exists {
module, _ := filepath.Split(buildDescriptor)
projectExcludes = append(projectExcludes, fmt.Sprintf("!%v", strings.TrimSuffix(module, "/")))
}
}
if len(projectExcludes) > 0 {
return []string{"--projects", strings.Join(projectExcludes, ",")}
}
return []string{}
}
//ToDo: Check if we want to optionally allow auto generation for unknown projects
func autoGenerateWhitesourceConfig(config *ScanOptions, utils Utils) error {
// TODO: Should we rely on -detect, or set the parameters manually?
if err := utils.RunExecutable("java", "-jar", config.AgentFileName, "-d", ".", "-detect"); err != nil {
return err
}
// Rename generated config file to config.ConfigFilePath parameter
if err := utils.FileRename("wss-generated-file.config", config.ConfigFilePath); err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,256 @@
package whitesource
import (
"fmt"
"testing"
"github.com/stretchr/testify/assert"
)
func TestRewriteUAConfigurationFile(t *testing.T) {
t.Parallel()
t.Run("default", func(t *testing.T) {
config := ScanOptions{
BuildTool: "npm",
ConfigFilePath: "ua.props",
}
utilsMock := NewScanUtilsMock()
utilsMock.AddFile(config.ConfigFilePath, []byte("test = dummy"))
path, err := config.RewriteUAConfigurationFile(utilsMock)
assert.NoError(t, err)
newUAConfig, err := utilsMock.FileRead(path)
assert.NoError(t, err)
assert.Contains(t, string(newUAConfig), "test = dummy")
assert.Contains(t, string(newUAConfig), "failErrorLevel = ALL")
})
t.Run("accept non-existing file", func(t *testing.T) {
config := ScanOptions{
BuildTool: "npm",
ConfigFilePath: "ua.props",
}
utilsMock := NewScanUtilsMock()
path, err := config.RewriteUAConfigurationFile(utilsMock)
assert.NoError(t, err)
newUAConfig, err := utilsMock.FileRead(path)
assert.NoError(t, err)
assert.Contains(t, string(newUAConfig), "failErrorLevel = ALL")
})
t.Run("error - write file", func(t *testing.T) {
config := ScanOptions{
BuildTool: "npm",
ConfigFilePath: "ua.props",
}
utilsMock := NewScanUtilsMock()
utilsMock.FileWriteError = fmt.Errorf("failed to write file")
_, err := config.RewriteUAConfigurationFile(utilsMock)
assert.Contains(t, fmt.Sprint(err), "failed to write file")
})
}
func TestUpdateConfig(t *testing.T) {
t.Parallel()
originalConfig := map[string]string{
"dependent": "dependentValue",
"forced": "forced_original",
"not_forced": "not_forced_original",
"dont_omit_forced": "dont_omit_forced_original",
"dont_omit_not_forced": "dont_omit_not_forced_original",
}
testConfig := ConfigOptions{
{Name: "non_existing_forced", Value: "non_existing_forced_val", Force: true},
{Name: "non_existing_not_forced", Value: "non_existing_not_forced_val", Force: false},
{Name: "forced", Value: "forced_val", Force: true},
{Name: "not_forced", Value: "not_forced_val", Force: false},
{Name: "omit", Value: "omit_val", OmitIfPresent: "dependent"},
{Name: "dont_omit", Value: "dont_omit_val", OmitIfPresent: "dependent_notExisting"},
{Name: "dont_omit_forced", Value: "dont_omit_forced_val", OmitIfPresent: "dependent_notExisting", Force: true},
{Name: "dont_omit_not_forced", Value: "dont_omit_not_forced_val", OmitIfPresent: "dependent_notExisting", Force: false},
}
updatedConfig := testConfig.updateConfig(&originalConfig)
assert.Equal(t, "dependentValue", updatedConfig["dependent"])
assert.Equal(t, "non_existing_forced_val", updatedConfig["non_existing_forced"])
assert.Equal(t, "non_existing_not_forced_val", updatedConfig["non_existing_not_forced"])
assert.Equal(t, "forced_val", updatedConfig["forced"])
assert.Equal(t, "not_forced_original", updatedConfig["not_forced"])
assert.NotEqual(t, "omit_val", updatedConfig["omit"])
assert.Equal(t, "dont_omit_val", updatedConfig["dont_omit"])
assert.Equal(t, "dont_omit_forced_val", updatedConfig["dont_omit_forced"])
assert.Equal(t, "dont_omit_not_forced_original", updatedConfig["dont_omit_not_forced"])
}
func TestAddGeneralDefaults(t *testing.T) {
t.Parallel()
utilsMock := NewScanUtilsMock()
t.Run("default", func(t *testing.T) {
testConfig := ConfigOptions{}
whitesourceConfig := ScanOptions{
OrgToken: "testOrgToken",
ProductName: "Test",
ProductToken: "testProductToken",
ProductVersion: "testVersion",
ProjectName: "testProject",
UserToken: "testuserKey",
}
testConfig.addGeneralDefaults(&whitesourceConfig, utilsMock)
assert.Equal(t, "checkPolicies", testConfig[0].Name)
assert.Equal(t, true, testConfig[0].Value)
assert.Equal(t, "forceCheckAllDependencies", testConfig[1].Name)
assert.Equal(t, true, testConfig[1].Value)
assert.Equal(t, "testOrgToken", testConfig[2].Value)
assert.Equal(t, "Test", testConfig[3].Value)
assert.Equal(t, "testVersion", testConfig[4].Value)
assert.Equal(t, "testProject", testConfig[5].Value)
assert.Equal(t, "testVersion", testConfig[6].Value)
assert.Equal(t, "testProductToken", testConfig[7].Value)
assert.Equal(t, "testuserKey", testConfig[8].Value)
})
t.Run("DIST product", func(t *testing.T) {
testConfig := ConfigOptions{}
whitesourceConfig := ScanOptions{
OrgToken: "testOrgToken",
ProductName: "DIST - Test",
ProductToken: "testProductToken",
ProductVersion: "testVersion",
ProjectName: "testProject",
UserToken: "testuserKey",
}
testConfig.addGeneralDefaults(&whitesourceConfig, utilsMock)
assert.Equal(t, "checkPolicies", testConfig[0].Name)
assert.Equal(t, false, testConfig[0].Value)
assert.Equal(t, "forceCheckAllDependencies", testConfig[1].Name)
assert.Equal(t, false, testConfig[1].Value)
})
t.Run("verbose", func(t *testing.T) {
testConfig := ConfigOptions{}
whitesourceConfig := ScanOptions{
Verbose: true,
}
testConfig.addGeneralDefaults(&whitesourceConfig, utilsMock)
assert.Equal(t, "log.level", testConfig[2].Name)
assert.Equal(t, "debug", testConfig[2].Value)
assert.Equal(t, "log.files.level", testConfig[3].Name)
assert.Equal(t, "debug", testConfig[3].Value)
})
t.Run("includes and excludes", func(t *testing.T) {
testConfig := ConfigOptions{}
whitesourceConfig := ScanOptions{
Excludes: []string{"**/excludes1", "**/excludes2"},
Includes: []string{"**/includes1", "**/includes2"},
}
testConfig.addGeneralDefaults(&whitesourceConfig, utilsMock)
assert.Equal(t, "excludes", testConfig[2].Name)
assert.Equal(t, "**/excludes1 **/excludes2", testConfig[2].Value)
assert.Equal(t, true, testConfig[2].Force)
assert.Equal(t, "includes", testConfig[3].Name)
assert.Equal(t, "**/includes1 **/includes2", testConfig[3].Value)
assert.Equal(t, true, testConfig[3].Force)
})
}
func TestAddBuildToolDefaults(t *testing.T) {
t.Parallel()
t.Run("success case", func(t *testing.T) {
utilsMock := NewScanUtilsMock()
var testConfig ConfigOptions
whitesourceConfig := ScanOptions{
BuildTool: "dub",
}
err := testConfig.addBuildToolDefaults(&whitesourceConfig, utilsMock)
assert.NoError(t, err)
assert.Equal(t, ConfigOptions{{Name: "includes", Value: "**/*.d **/*.di"}}, testConfig)
})
t.Run("error case", func(t *testing.T) {
utilsMock := NewScanUtilsMock()
var testConfig ConfigOptions
whitesourceConfig := ScanOptions{
BuildTool: "notHardened",
}
err := testConfig.addBuildToolDefaults(&whitesourceConfig, utilsMock)
assert.EqualError(t, err, "configuration not hardened")
})
t.Run("maven - m2 path", func(t *testing.T) {
utilsMock := NewScanUtilsMock()
testConfig := ConfigOptions{}
whitesourceConfig := ScanOptions{
BuildTool: "maven",
M2Path: "test/.m2",
}
testConfig.addBuildToolDefaults(&whitesourceConfig, utilsMock)
assert.Contains(t, testConfig, ConfigOption{Name: "maven.m2RepositoryPath", Value: "test/.m2", Force: true})
assert.NotContains(t, testConfig, ConfigOption{Name: "maven.additionalArguments", Value: "", Force: true})
})
t.Run("maven - settings", func(t *testing.T) {
utilsMock := NewScanUtilsMock()
testConfig := ConfigOptions{}
whitesourceConfig := ScanOptions{
BuildTool: "maven",
ProjectSettingsFile: "project-settings.xml",
GlobalSettingsFile: "global-settings.xml",
BuildDescriptorExcludeList: []string{"unit-tests/pom.xml"},
}
utilsMock.AddFile("unit-tests/pom.xml", []byte("dummy"))
testConfig.addBuildToolDefaults(&whitesourceConfig, utilsMock)
assert.Contains(t, testConfig, ConfigOption{Name: "maven.additionalArguments", Value: "--global-settings global-settings.xml --settings project-settings.xml --projects !unit-tests", Force: true})
})
t.Run("Docker - default", func(t *testing.T) {
utilsMock := NewScanUtilsMock()
testConfig := ConfigOptions{}
whitesourceConfig := ScanOptions{
BuildTool: "docker",
}
utilsMock.AddFile("Dockerfile", []byte("dummy"))
testConfig.addBuildToolDefaults(&whitesourceConfig, utilsMock)
assert.Contains(t, testConfig, ConfigOption{Name: "docker.dockerfilePath", Value: "Dockerfile", Force: false})
})
t.Run("Docker - no builddescriptor found", func(t *testing.T) {
utilsMock := NewScanUtilsMock()
testConfig := ConfigOptions{}
whitesourceConfig := ScanOptions{
BuildTool: "docker",
}
testConfig.addBuildToolDefaults(&whitesourceConfig, utilsMock)
assert.NotContains(t, testConfig, ConfigOption{Name: "docker.dockerfilePath", Value: "Dockerfile", Force: false})
})
}
func TestMvnProjectExcludes(t *testing.T) {
utilsMock := NewScanUtilsMock()
utilsMock.AddFile("unit-tests/package.json", []byte("dummy"))
utilsMock.AddFile("unit-tests/pom.xml", []byte("dummy"))
utilsMock.AddFile("integration-tests/pom.xml", []byte("dummy"))
tt := []struct {
buildDescriptorExcludeList []string
expected []string
}{
{buildDescriptorExcludeList: []string{}, expected: []string{}},
{buildDescriptorExcludeList: []string{"unit-tests/package.json", "integration-tests/package.json"}, expected: []string{}},
{buildDescriptorExcludeList: []string{"unit-tests/pom.xml"}, expected: []string{"--projects", "!unit-tests"}},
{buildDescriptorExcludeList: []string{"unit-tests/pom.xml", "integration-tests/pom.xml"}, expected: []string{"--projects", "!unit-tests,!integration-tests"}},
}
for _, test := range tt {
assert.Equal(t, test.expected, mvnProjectExcludes(test.buildDescriptorExcludeList, utilsMock), test.buildDescriptorExcludeList)
}
}

View File

@@ -2,10 +2,11 @@ package whitesource
import (
"fmt"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/SAP/jenkins-library/pkg/piperutils"
"strings"
"time"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/SAP/jenkins-library/pkg/piperutils"
)
// Scan stores information about scanned WhiteSource projects (modules).

View File

@@ -1,10 +1,11 @@
package whitesource
import (
"testing"
"github.com/SAP/jenkins-library/pkg/mock"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"testing"
)
func TestExecuteScanMTA(t *testing.T) {
@@ -18,11 +19,12 @@ func TestExecuteScanMTA(t *testing.T) {
</project>
`
config := ScanOptions{
ScanType: "mta",
OrgToken: "org-token",
UserToken: "user-token",
ProductName: "mock-product",
ProjectName: "mock-project",
ScanType: "mta",
OrgToken: "org-token",
UserToken: "user-token",
ProductName: "mock-product",
ProductVersion: "product-version",
ProjectName: "mock-project",
}
t.Parallel()

View File

@@ -1,11 +1,12 @@
package whitesource
import (
"path/filepath"
"testing"
"github.com/SAP/jenkins-library/pkg/mock"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"path/filepath"
"testing"
)
func TestExecuteScanMaven(t *testing.T) {
@@ -22,11 +23,12 @@ func TestExecuteScanMaven(t *testing.T) {
</project>
`
config := ScanOptions{
ScanType: "maven",
OrgToken: "org-token",
UserToken: "user-token",
ProductName: "mock-product",
ProjectName: "mock-project",
ScanType: "maven",
OrgToken: "org-token",
UserToken: "user-token",
ProductName: "mock-product",
ProductVersion: "product-version",
ProjectName: "mock-project",
}
utilsMock := NewScanUtilsMock()
utilsMock.AddFile("pom.xml", []byte(pomXML))
@@ -81,10 +83,12 @@ func TestExecuteScanMaven(t *testing.T) {
</project>
`
config := ScanOptions{
ScanType: "maven",
OrgToken: "org-token",
UserToken: "user-token",
ProductName: "mock-product",
ScanType: "maven",
OrgToken: "org-token",
UserToken: "user-token",
ProductName: "mock-product",
ProductVersion: "product-version",
ProductToken: "product-version",
}
utilsMock := NewScanUtilsMock()
utilsMock.AddFile("pom.xml", []byte(rootPomXML))

View File

@@ -3,11 +3,12 @@ package whitesource
import (
"encoding/json"
"fmt"
"path/filepath"
"testing"
"github.com/SAP/jenkins-library/pkg/mock"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"path/filepath"
"testing"
)
func TestExecuteScanNPM(t *testing.T) {
@@ -137,11 +138,12 @@ func TestExecuteScanNPM(t *testing.T) {
func TestWriteWhitesourceConfigJSON(t *testing.T) {
config := &ScanOptions{
OrgToken: "org-token",
UserToken: "user-token",
ProductName: "mock-product",
ProjectName: "mock-project",
ProductToken: "mock-product-token",
OrgToken: "org-token",
UserToken: "user-token",
ProductName: "mock-product",
ProductVersion: "product-version",
ProjectName: "mock-project",
ProductToken: "mock-product-token",
}
expected := make(map[string]interface{})

View File

@@ -2,15 +2,19 @@ package whitesource
// ScanOptions contains parameters needed during the scan.
type ScanOptions struct {
BuildTool string
// ScanType defines the type of scan. Can be "maven" or "mta" for scanning with Maven or "npm"/"yarn".
ScanType string
OrgToken string
UserToken string
ProductName string
ProductToken string
ScanType string
OrgToken string
UserToken string
ProductName string
ProductToken string
ProductVersion string
// ProjectName is an optional name for an "aggregator" project.
// All scanned maven modules will be reflected in the aggregate project.
ProjectName string
ProjectName string
BuildDescriptorFile string
BuildDescriptorExcludeList []string
// PomPath is the path to root build descriptor file.
PomPath string
@@ -30,6 +34,13 @@ type ScanOptions struct {
AgentFileName string
ConfigFilePath string
Includes string
Excludes string
JreDownloadURL string
Includes []string
Excludes []string
AgentURL string
ServiceURL string
Verbose bool
}

View File

@@ -1,30 +1,141 @@
package whitesource
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/pkg/errors"
)
const jvmTarGz = "jvm.tar.gz"
const jvmDir = "./jvm"
// ExecuteUAScan executes a scan with the Whitesource Unified Agent.
func (s *Scan) ExecuteUAScan(config *ScanOptions, utils Utils) error {
if config.BuildTool != "mta" {
return s.ExecuteUAScanInPath(config, utils, ".")
}
log.Entry().Infof("Executing WhiteSource UA scan for MTA project")
pomExists, _ := utils.FileExists("pom.xml")
if pomExists {
mavenConfig := *config
mavenConfig.BuildTool = "maven"
if err := s.ExecuteUAScanInPath(&mavenConfig, utils, "."); err != nil {
return errors.Wrap(err, "failed to run scan for maven modules of mta")
}
} else {
// ToDo: only warning message?
//log.Entry().Warning("MTA project does not contain a pom.xml in the root. Scan results might be incomplete")
return fmt.Errorf("mta project does not contain an aggregator pom.xml in the root - this is mandatory")
}
packageJSONFiles, err := utils.FindPackageJSONFiles(config)
if err != nil {
return errors.Wrap(err, "failed to find package.json files")
}
if len(packageJSONFiles) > 0 {
npmConfig := *config
npmConfig.BuildTool = "npm"
for _, packageJSONFile := range packageJSONFiles {
// we only need the path here
modulePath, _ := filepath.Split(packageJSONFile)
projectName, err := getProjectNameFromPackageJSON(packageJSONFile, utils)
if err != nil {
return errors.Wrapf(err, "failed retrieve project name")
}
npmConfig.ProjectName = projectName
// ToDo: likely needs to be refactored, AggregateProjectName should only be available if we want to force aggregation?
s.AggregateProjectName = projectName
if err := s.ExecuteUAScanInPath(&npmConfig, utils, modulePath); err != nil {
return errors.Wrapf(err, "failed to run scan for npm module %v", modulePath)
}
}
}
_ = removeJre(filepath.Join(jvmDir, "bin", "java"), utils)
return nil
}
// ExecuteUAScanInPath executes a scan with the Whitesource Unified Agent in a dedicated scanPath.
func (s *Scan) ExecuteUAScanInPath(config *ScanOptions, utils Utils, scanPath string) error {
// Download the unified agent jar file if one does not exist
if err := downloadAgent(config, utils); err != nil {
err := downloadAgent(config, utils)
if err != nil {
return err
}
// Auto generate a config file based on the working directory's contents.
// TODO/NOTE: Currently this scans the UA jar file as a dependency since it is downloaded beforehand
if err := autoGenerateWhitesourceConfig(config, utils); err != nil {
// Download JRE in case none is available
javaPath, err := downloadJre(config, utils)
if err != nil {
return err
}
// ToDo: Check if Download of Docker/container image should be done here instead of in cmd/whitesourceExecuteScan.go
// ToDo: check if this is required
if err := s.AppendScannedProject(s.AggregateProjectName); err != nil {
return err
}
return utils.RunExecutable("java", "-jar", config.AgentFileName, "-d", ".", "-c", config.ConfigFilePath,
configPath, err := config.RewriteUAConfigurationFile(utils)
if err != nil {
return err
}
if len(scanPath) == 0 {
scanPath = "."
}
// ToDo: remove parameters which are added to UA config via RewriteUAConfigurationFile()
// let the scanner resolve project name on its own?
err = utils.RunExecutable(javaPath, "-jar", config.AgentFileName, "-d", scanPath, "-c", configPath,
"-apiKey", config.OrgToken, "-userKey", config.UserToken, "-project", s.AggregateProjectName,
"-product", config.ProductName, "-productVersion", s.ProductVersion)
"-product", config.ProductName, "-productVersion", s.ProductVersion, "-wss.url", config.AgentURL)
if err := removeJre(javaPath, utils); err != nil {
log.Entry().Warning(err)
}
if err != nil {
if err := removeJre(javaPath, utils); err != nil {
log.Entry().Warning(err)
}
exitCode := utils.GetExitCode()
log.Entry().Infof("WhiteSource scan failed with exit code %v", exitCode)
evaluateExitCode(exitCode)
return errors.Wrapf(err, "failed to execute WhiteSource scan with exit code %v", exitCode)
}
return nil
}
func evaluateExitCode(exitCode int) {
switch exitCode {
case 255:
log.Entry().Info("General error has occurred.")
log.SetErrorCategory(log.ErrorUndefined)
case 254:
log.Entry().Info("Whitesource found one or multiple policy violations.")
log.SetErrorCategory(log.ErrorCompliance)
case 253:
log.Entry().Info("The local scan client failed to execute the scan.")
log.SetErrorCategory(log.ErrorUndefined)
case 252:
log.Entry().Info("There was a failure in the connection to the WhiteSource servers.")
log.SetErrorCategory(log.ErrorInfrastructure)
case 251:
log.Entry().Info("The server failed to analyze the scan.")
log.SetErrorCategory(log.ErrorService)
case 250:
log.Entry().Info("One of the package manager's prerequisite steps (e.g. npm install) failed.")
log.SetErrorCategory(log.ErrorCustom)
default:
log.Entry().Info("Whitesource scan failed with unknown error code")
log.SetErrorCategory(log.ErrorUndefined)
}
}
// downloadAgent downloads the unified agent jar file if one does not exist
@@ -32,62 +143,79 @@ func downloadAgent(config *ScanOptions, utils Utils) error {
agentFile := config.AgentFileName
exists, err := utils.FileExists(agentFile)
if err != nil {
return fmt.Errorf("could not check whether the file '%s' exists: %w", agentFile, err)
return errors.Wrapf(err, "failed to check if file '%s' exists", agentFile)
}
if !exists {
err := utils.DownloadFile(config.AgentDownloadURL, agentFile, nil, nil)
if err != nil {
return fmt.Errorf("failed to download unified agent from URL '%s' to file '%s': %w",
config.AgentDownloadURL, agentFile, err)
return errors.Wrapf(err, "failed to download unified agent from URL '%s' to file '%s'", config.AgentDownloadURL, agentFile)
}
}
return nil
}
// autoGenerateWhitesourceConfig
// Auto generate a config file based on the current directory structure, renames it to user specified configFilePath
// Generated file name will be 'wss-generated-file.config'
func autoGenerateWhitesourceConfig(config *ScanOptions, utils Utils) error {
// TODO: Should we rely on -detect, or set the parameters manually?
if err := utils.RunExecutable("java", "-jar", config.AgentFileName, "-d", ".", "-detect"); err != nil {
return err
// downloadJre downloads the a JRE in case no java command can be executed
func downloadJre(config *ScanOptions, utils Utils) (string, error) {
// cater for multiple executions
if exists, _ := utils.FileExists(filepath.Join(jvmDir, "bin", "java")); exists {
return filepath.Join(jvmDir, "bin", "java"), nil
}
// Rename generated config file to config.ConfigFilePath parameter
if err := utils.FileRename("wss-generated-file.config", config.ConfigFilePath); err != nil {
return err
}
// Append aggregateModules=true parameter to config file (consolidates multi-module projects into one)
f, err := utils.FileOpen(config.ConfigFilePath, os.O_APPEND|os.O_WRONLY, 0600)
err := utils.RunExecutable("java", "-version")
javaPath := "java"
if err != nil {
return err
}
defer func() { _ = f.Close() }()
log.Entry().Infof("No Java installation found, downloading JVM from %v", config.JreDownloadURL)
err := utils.DownloadFile(config.JreDownloadURL, jvmTarGz, nil, nil)
if err != nil {
return "", errors.Wrapf(err, "failed to download jre from URL '%s'", config.JreDownloadURL)
}
// Append additional config parameters to prevent multiple projects being generated
m2Path := config.M2Path
if m2Path == "" {
m2Path = ".m2"
}
cfg := fmt.Sprintf("\ngradle.aggregateModules=true\nmaven.aggregateModules=true\ngradle.localRepositoryPath=.gradle\nmaven.m2RepositoryPath=%s\nexcludes=%s",
m2Path,
config.Excludes)
if _, err = f.WriteString(cfg); err != nil {
return err
}
// ToDo: replace tar call with go library call
err = utils.MkdirAll(jvmDir, 0755)
// archiveExtractionDepth=0
if err := utils.RunExecutable("sed", "-ir", `s/^[#]*\s*archiveExtractionDepth=.*/archiveExtractionDepth=0/`,
config.ConfigFilePath); err != nil {
return err
err = utils.RunExecutable("tar", fmt.Sprintf("--directory=%v", jvmDir), "--strip-components=1", "-xzf", jvmTarGz)
if err != nil {
return "", errors.Wrapf(err, "failed to extract %v", jvmTarGz)
}
log.Entry().Info("Java successfully installed")
javaPath = filepath.Join(jvmDir, "bin", "java")
}
return javaPath, nil
}
// config.Includes defaults to "**/*.java **/*.jar **/*.py **/*.go **/*.js **/*.ts"
regex := fmt.Sprintf(`s/^[#]*\s*includes=.*/includes="%s"/`, config.Includes)
if err := utils.RunExecutable("sed", "-ir", regex, config.ConfigFilePath); err != nil {
return err
func removeJre(javaPath string, utils Utils) error {
if javaPath == "java" {
return nil
}
if err := utils.RemoveAll(jvmDir); err != nil {
return fmt.Errorf("failed to remove downloaded and extracted jvm from %v", jvmDir)
}
log.Entry().Debugf("Java successfully removed from %v", jvmDir)
if err := utils.FileRemove(jvmTarGz); err != nil {
return fmt.Errorf("failed to remove downloaded %v", jvmTarGz)
}
log.Entry().Debugf("%v successfully removed", jvmTarGz)
return nil
}
func getProjectNameFromPackageJSON(packageJSONPath string, utils Utils) (string, error) {
fileContents, err := utils.FileRead(packageJSONPath)
if err != nil {
return "", errors.Wrapf(err, "failed to read file %v", packageJSONPath)
}
var packageJSON = make(map[string]interface{})
if err := json.Unmarshal(fileContents, &packageJSON); err != nil {
return "", errors.Wrapf(err, "failed to read file content of %v", packageJSONPath)
}
projectNameEntry, exists := packageJSON["name"]
if !exists {
return "", fmt.Errorf("the file '%s' must configure a name", packageJSONPath)
}
projectName, isString := projectNameEntry.(string)
if !isString {
return "", fmt.Errorf("the file '%s' must configure a name as string", packageJSONPath)
}
return projectName, nil
}

View File

@@ -2,96 +2,394 @@ package whitesource
import (
"fmt"
"github.com/SAP/jenkins-library/pkg/mock"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"path/filepath"
"testing"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/stretchr/testify/assert"
)
func TestExecuteScanUA(t *testing.T) {
func TestExecuteUAScan(t *testing.T) {
t.Parallel()
t.Run("happy path UA", func(t *testing.T) {
// init
config := ScanOptions{
ScanType: "unified-agent",
OrgToken: "org-token",
UserToken: "user-token",
ProductName: "mock-product",
ProjectName: "mock-project",
AgentDownloadURL: "https://download.ua.org/agent.jar",
AgentFileName: "unified-agent.jar",
ConfigFilePath: "ua.cfg",
M2Path: ".pipeline/m2",
}
utilsMock := NewScanUtilsMock()
utilsMock.AddFile("wss-generated-file.config", []byte("key=value"))
scan := newTestScan(&config)
// test
err := scan.ExecuteUAScan(&config, utilsMock)
// assert
require.NoError(t, err)
content, err := utilsMock.FileRead("ua.cfg")
require.NoError(t, err)
contentAsString := string(content)
assert.Contains(t, contentAsString, "key=value\n")
assert.Contains(t, contentAsString, "gradle.aggregateModules=true\n")
assert.Contains(t, contentAsString, "maven.aggregateModules=true\n")
assert.Contains(t, contentAsString, "maven.m2RepositoryPath=.pipeline/m2\n")
assert.Contains(t, contentAsString, "excludes=")
require.Len(t, utilsMock.Calls, 4)
fmt.Printf("calls: %v\n", utilsMock.Calls)
expectedCall := mock.ExecCall{
Exec: "java",
Params: []string{
"-jar",
config.AgentFileName,
"-d", ".",
"-c", config.ConfigFilePath,
"-apiKey", config.OrgToken,
"-userKey", config.UserToken,
"-project", config.ProjectName,
"-product", config.ProductName,
"-productVersion", scan.ProductVersion,
},
}
assert.Equal(t, expectedCall, utilsMock.Calls[3])
})
t.Run("UA is downloaded", func(t *testing.T) {
// init
t.Run("success - non mta", func(t *testing.T) {
config := ScanOptions{
BuildTool: "maven",
ProjectName: "test-project",
ProductName: "test-product",
}
utilsMock := NewScanUtilsMock()
scan := newTestScan(&config)
err := scan.ExecuteUAScan(&config, utilsMock)
assert.NoError(t, err)
assert.Equal(t, "maven", config.BuildTool)
assert.Contains(t, utilsMock.Calls[1].Params, config.ProductName)
assert.Contains(t, utilsMock.Calls[1].Params, ".")
})
t.Run("success - mta", func(t *testing.T) {
config := ScanOptions{
BuildTool: "mta",
ProjectName: "test-project",
ProductName: "test-product",
}
utilsMock := NewScanUtilsMock()
utilsMock.AddFile("pom.xml", []byte("dummy"))
utilsMock.AddFile("package.json", []byte(`{"name":"my-module-name"}`))
scan := newTestScan(&config)
err := scan.ExecuteUAScan(&config, utilsMock)
assert.NoError(t, err)
assert.Equal(t, "mta", config.BuildTool)
assert.Contains(t, utilsMock.Calls[1].Params, config.ProductName)
assert.Contains(t, utilsMock.Calls[1].Params, ".")
})
t.Run("error - maven", func(t *testing.T) {
config := ScanOptions{
AgentDownloadURL: "https://download.ua.org/agent.jar",
BuildTool: "mta",
ProjectName: "test-project",
ProductName: "test-product",
}
utilsMock := NewScanUtilsMock()
utilsMock.AddFile("pom.xml", []byte("dummy"))
utilsMock.AddFile("package.json", []byte(`{"name":"my-module-name"}`))
utilsMock.DownloadError = map[string]error{"https://download.ua.org/agent.jar": fmt.Errorf("failed to download file")}
scan := newTestScan(&config)
err := scan.ExecuteUAScan(&config, utilsMock)
assert.Contains(t, fmt.Sprint(err), "failed to run scan for maven modules of mta")
})
t.Run("error - no pom.xml", func(t *testing.T) {
config := ScanOptions{
BuildTool: "mta",
ProjectName: "test-project",
ProductName: "test-product",
}
utilsMock := NewScanUtilsMock()
scan := newTestScan(&config)
err := scan.ExecuteUAScan(&config, utilsMock)
assert.EqualError(t, err, "mta project does not contain an aggregator pom.xml in the root - this is mandatory")
})
t.Run("error - npm no name", func(t *testing.T) {
config := ScanOptions{
BuildTool: "mta",
ProjectName: "test-project",
ProductName: "test-product",
}
utilsMock := NewScanUtilsMock()
utilsMock.AddFile("pom.xml", []byte("dummy"))
utilsMock.AddFile("package.json", []byte(`{}`))
scan := newTestScan(&config)
err := scan.ExecuteUAScan(&config, utilsMock)
assert.EqualError(t, err, "failed retrieve project name: the file 'package.json' must configure a name")
})
}
func TestExecuteUAScanInPath(t *testing.T) {
t.Parallel()
t.Run("success", func(t *testing.T) {
config := ScanOptions{
AgentFileName: "unified-agent.jar",
ConfigFilePath: "ua.props",
ProductName: "test-product",
ProductVersion: "1",
ProjectName: "test-project",
OrgToken: "orgTestToken",
UserToken: "userTestToken",
AgentURL: "https://ws.service.url/agent",
}
utilsMock := NewScanUtilsMock()
scan := newTestScan(&config)
err := scan.ExecuteUAScanInPath(&config, utilsMock, "")
assert.NoError(t, err)
assert.Equal(t, "java", utilsMock.Calls[1].Exec)
assert.Equal(t, 18, len(utilsMock.Calls[1].Params))
assert.Contains(t, utilsMock.Calls[1].Params, "-jar")
assert.Contains(t, utilsMock.Calls[1].Params, "-d")
assert.Contains(t, utilsMock.Calls[1].Params, ".")
assert.Contains(t, utilsMock.Calls[1].Params, "-c")
// name of config file not tested since it is dynamic. This is acceptable here since we test also the size
assert.Contains(t, utilsMock.Calls[1].Params, "-apiKey")
assert.Contains(t, utilsMock.Calls[1].Params, config.OrgToken)
assert.Contains(t, utilsMock.Calls[1].Params, "-userKey")
assert.Contains(t, utilsMock.Calls[1].Params, config.UserToken)
assert.Contains(t, utilsMock.Calls[1].Params, "-project")
assert.Contains(t, utilsMock.Calls[1].Params, config.ProjectName)
assert.Contains(t, utilsMock.Calls[1].Params, "-product")
assert.Contains(t, utilsMock.Calls[1].Params, config.ProductName)
assert.Contains(t, utilsMock.Calls[1].Params, "-productVersion")
assert.Contains(t, utilsMock.Calls[1].Params, config.ProductVersion)
assert.Contains(t, utilsMock.Calls[1].Params, "-wss.url")
assert.Contains(t, utilsMock.Calls[1].Params, config.AgentURL)
})
t.Run("success - dedicated path", func(t *testing.T) {
config := ScanOptions{
AgentFileName: "unified-agent.jar",
ConfigFilePath: "ua.props",
ProductName: "test-product",
ProductVersion: "1",
ProjectName: "test-project",
OrgToken: "orgTestToken",
UserToken: "userTestToken",
AgentURL: "https://ws.service.url/agent",
}
utilsMock := NewScanUtilsMock()
scan := newTestScan(&config)
err := scan.ExecuteUAScanInPath(&config, utilsMock, "./my/test/path")
assert.NoError(t, err)
assert.Contains(t, utilsMock.Calls[1].Params, "-d")
assert.Contains(t, utilsMock.Calls[1].Params, "./my/test/path")
})
t.Run("error - download agent", func(t *testing.T) {
config := ScanOptions{
AgentDownloadURL: "https://download.ua.org/agent.jar",
}
utilsMock := NewScanUtilsMock()
utilsMock.DownloadError = map[string]error{"https://download.ua.org/agent.jar": fmt.Errorf("failed to download file")}
scan := newTestScan(&config)
err := scan.ExecuteUAScanInPath(&config, utilsMock, "")
assert.Contains(t, fmt.Sprint(err), "failed to download unified agent from URL 'https://download.ua.org/agent.jar'")
})
t.Run("error - download jre", func(t *testing.T) {
config := ScanOptions{
JreDownloadURL: "https://download.jre.org/jvm.jar",
}
utilsMock := NewScanUtilsMock()
utilsMock.DownloadError = map[string]error{"https://download.jre.org/jvm.jar": fmt.Errorf("failed to download file")}
utilsMock.ShouldFailOnCommand = map[string]error{"java": fmt.Errorf("failed to run java")}
scan := newTestScan(&config)
err := scan.ExecuteUAScanInPath(&config, utilsMock, "")
assert.Contains(t, fmt.Sprint(err), "failed to download jre from URL 'https://download.jre.org/jvm.jar'")
})
t.Run("error - append scanned projects", func(t *testing.T) {
config := ScanOptions{}
utilsMock := NewScanUtilsMock()
scan := newTestScan(&config)
err := scan.ExecuteUAScanInPath(&config, utilsMock, "")
assert.EqualError(t, err, "projectName must not be empty")
})
t.Run("error - rewrite config", func(t *testing.T) {
config := ScanOptions{
ProjectName: "test-project",
}
utilsMock := NewScanUtilsMock()
utilsMock.FileWriteError = fmt.Errorf("failed to write file")
scan := newTestScan(&config)
err := scan.ExecuteUAScanInPath(&config, utilsMock, "")
assert.Contains(t, fmt.Sprint(err), "failed to write file")
})
t.Run("error - scan error", func(t *testing.T) {
config := ScanOptions{
ProjectName: "test-project",
}
utilsMock := NewScanUtilsMock()
utilsMock.ShouldFailOnCommand = map[string]error{
"java": fmt.Errorf("failed to run java"),
}
scan := newTestScan(&config)
err := scan.ExecuteUAScanInPath(&config, utilsMock, "")
assert.Contains(t, fmt.Sprint(err), "failed to execute WhiteSource scan with exit code")
})
}
func TestEvaluateExitCode(t *testing.T) {
tt := []struct {
exitCode int
expected log.ErrorCategory
}{
{exitCode: 255, expected: log.ErrorUndefined},
{exitCode: 254, expected: log.ErrorCompliance},
{exitCode: 253, expected: log.ErrorUndefined},
{exitCode: 252, expected: log.ErrorInfrastructure},
{exitCode: 251, expected: log.ErrorService},
{exitCode: 250, expected: log.ErrorCustom},
{exitCode: 200, expected: log.ErrorUndefined},
}
for _, test := range tt {
evaluateExitCode(test.exitCode)
assert.Equal(t, test.expected, log.GetErrorCategory(), fmt.Sprintf("test for exit code %v failed", test.exitCode))
}
}
func TestDownloadAgent(t *testing.T) {
t.Parallel()
t.Run("success - download", func(t *testing.T) {
config := ScanOptions{
ScanType: "unified-agent",
ProjectName: "mock-project",
AgentDownloadURL: "https://download.ua.org/agent.jar",
AgentFileName: "unified-agent.jar",
}
utilsMock := NewScanUtilsMock()
utilsMock.AddFile("wss-generated-file.config", []byte("dummy"))
scan := newTestScan(&config)
// test
err := scan.ExecuteUAScan(&config, utilsMock)
// assert
require.NoError(t, err)
require.Len(t, utilsMock.DownloadedFiles, 1)
err := downloadAgent(&config, utilsMock)
assert.NoError(t, err, "error occured although none expected")
assert.Len(t, utilsMock.DownloadedFiles, 1)
assert.Equal(t, "https://download.ua.org/agent.jar", utilsMock.DownloadedFiles[0].SourceURL)
assert.Equal(t, "unified-agent.jar", utilsMock.DownloadedFiles[0].FilePath)
})
t.Run("UA is NOT downloaded", func(t *testing.T) {
// init
t.Run("success - no download", func(t *testing.T) {
config := ScanOptions{
ScanType: "unified-agent",
ProjectName: "mock-project",
AgentDownloadURL: "https://download.ua.org/agent.jar",
AgentFileName: "unified-agent.jar",
}
utilsMock := NewScanUtilsMock()
utilsMock.AddFile("wss-generated-file.config", []byte("dummy"))
utilsMock.AddFile("unified-agent.jar", []byte("dummy"))
scan := newTestScan(&config)
// test
err := scan.ExecuteUAScan(&config, utilsMock)
// assert
require.NoError(t, err)
err := downloadAgent(&config, utilsMock)
assert.NoError(t, err, "error occured although none expected")
assert.Len(t, utilsMock.DownloadedFiles, 0)
})
t.Run("error - file existence", func(t *testing.T) {
config := ScanOptions{
AgentDownloadURL: "https://download.ua.org/agent.jar",
AgentFileName: "unified-agent.jar",
}
utilsMock := NewScanUtilsMock()
utilsMock.FileExistsErrors = map[string]error{"unified-agent.jar": fmt.Errorf("failed to check existence")}
err := downloadAgent(&config, utilsMock)
assert.Contains(t, fmt.Sprint(err), "failed to check if file 'unified-agent.jar' exists")
})
t.Run("error - download", func(t *testing.T) {
config := ScanOptions{
AgentDownloadURL: "https://download.ua.org/agent.jar",
AgentFileName: "unified-agent.jar",
}
utilsMock := NewScanUtilsMock()
utilsMock.DownloadError = map[string]error{"https://download.ua.org/agent.jar": fmt.Errorf("failed to download file")}
err := downloadAgent(&config, utilsMock)
assert.Contains(t, fmt.Sprint(err), "failed to download unified agent from URL")
})
}
func TestDownloadJre(t *testing.T) {
t.Parallel()
t.Run("success - no download required", func(t *testing.T) {
config := ScanOptions{
JreDownloadURL: "https://download.jre.org/jvm.jar",
}
utilsMock := NewScanUtilsMock()
jre, err := downloadJre(&config, utilsMock)
assert.NoError(t, err)
assert.Equal(t, "java", jre)
assert.Equal(t, "java", utilsMock.Calls[0].Exec)
assert.Equal(t, []string{"-version"}, utilsMock.Calls[0].Params)
})
t.Run("success - previously downloaded", func(t *testing.T) {
config := ScanOptions{
JreDownloadURL: "https://download.jre.org/jvm.jar",
}
utilsMock := NewScanUtilsMock()
utilsMock.AddFile(filepath.Join(jvmDir, "bin", "java"), []byte("dummy"))
jre, err := downloadJre(&config, utilsMock)
assert.NoError(t, err)
assert.Equal(t, filepath.Join(jvmDir, "bin", "java"), jre)
})
t.Run("success - jre downloaded", func(t *testing.T) {
config := ScanOptions{
JreDownloadURL: "https://download.jre.org/jvm.jar",
}
utilsMock := NewScanUtilsMock()
utilsMock.ShouldFailOnCommand = map[string]error{"java": fmt.Errorf("failed to run java")}
jre, err := downloadJre(&config, utilsMock)
assert.NoError(t, err)
assert.Equal(t, filepath.Join(jvmDir, "bin", "java"), jre)
assert.Equal(t, "https://download.jre.org/jvm.jar", utilsMock.DownloadedFiles[0].SourceURL)
exists, _ := utilsMock.DirExists(jvmDir)
assert.True(t, exists)
assert.Equal(t, "tar", utilsMock.Calls[1].Exec)
assert.Equal(t, fmt.Sprintf("--directory=%v", jvmDir), utilsMock.Calls[1].Params[0])
})
t.Run("error - download", func(t *testing.T) {
config := ScanOptions{
JreDownloadURL: "https://download.jre.org/jvm.jar",
}
utilsMock := NewScanUtilsMock()
utilsMock.ShouldFailOnCommand = map[string]error{"java": fmt.Errorf("failed to run java")}
utilsMock.DownloadError = map[string]error{"https://download.jre.org/jvm.jar": fmt.Errorf("failed to download file")}
_, err := downloadJre(&config, utilsMock)
assert.Contains(t, fmt.Sprint(err), "failed to download jre from URL")
})
t.Run("error - tar execution", func(t *testing.T) {
config := ScanOptions{
JreDownloadURL: "https://download.jre.org/jvm.jar",
}
utilsMock := NewScanUtilsMock()
utilsMock.ShouldFailOnCommand = map[string]error{
"java": fmt.Errorf("failed to run java"),
"tar": fmt.Errorf("failed to run tar"),
}
_, err := downloadJre(&config, utilsMock)
assert.Contains(t, fmt.Sprint(err), "failed to extract")
})
}
func TestRemoveJre(t *testing.T) {
t.Parallel()
t.Run("success - no removal required", func(t *testing.T) {
utilsMock := NewScanUtilsMock()
err := removeJre("java", utilsMock)
assert.NoError(t, err, "error occured although none expected")
})
t.Run("success - with removal", func(t *testing.T) {
utilsMock := NewScanUtilsMock()
utilsMock.AddFile(jvmTarGz, []byte("dummy"))
err := removeJre("./jvm/bin/java", utilsMock)
assert.NoError(t, err, "error occured although none expected")
assert.Contains(t, utilsMock.RemoveAllDirs, jvmDir)
assert.True(t, utilsMock.HasRemovedFile(jvmTarGz))
})
t.Run("error - remove jvm directory", func(t *testing.T) {
utilsMock := NewScanUtilsMock()
utilsMock.RemoveAllError = map[string]error{jvmDir: fmt.Errorf("failed to remove directory")}
err := removeJre("./jvm/bin/java", utilsMock)
assert.Contains(t, fmt.Sprint(err), "failed to remove downloaded and extracted jvm")
})
t.Run("error - remove jvm tar.gz", func(t *testing.T) {
utilsMock := NewScanUtilsMock()
err := removeJre("./jvm/bin/java", utilsMock)
assert.Contains(t, fmt.Sprint(err), "failed to remove downloaded")
})
}

View File

@@ -1,9 +1,10 @@
package whitesource
import (
"github.com/SAP/jenkins-library/pkg/maven"
"io"
"os"
"github.com/SAP/jenkins-library/pkg/maven"
)
// File defines the method subset we use from os.File
@@ -23,6 +24,7 @@ type Utils interface {
FileWrite(path string, content []byte, perm os.FileMode) error
FileRemove(path string) error
FileRename(oldPath, newPath string) error
GetExitCode() int
RemoveAll(path string) error
FileOpen(name string, flag int, perm os.FileMode) (File, error)

View File

@@ -3,16 +3,17 @@
package whitesource
import (
"github.com/SAP/jenkins-library/pkg/mock"
"github.com/SAP/jenkins-library/pkg/piperutils"
"net/http"
"os"
"github.com/SAP/jenkins-library/pkg/mock"
"github.com/SAP/jenkins-library/pkg/piperutils"
)
func newTestScan(config *ScanOptions) *Scan {
return &Scan{
AggregateProjectName: config.ProjectName,
ProductVersion: "product-version",
ProductVersion: config.ProductVersion,
}
}
@@ -34,11 +35,18 @@ type ScanUtilsMock struct {
*mock.ExecMockRunner
NpmInstalledModules []NpmInstall
DownloadedFiles []DownloadedFile
DownloadError map[string]error
RemoveAllDirs []string
RemoveAllError map[string]error
}
// RemoveAll mimics os.RemoveAll().
func (m *ScanUtilsMock) RemoveAll(_ string) error {
func (m *ScanUtilsMock) RemoveAll(dir string) error {
// Can be removed once implemented in mock.FilesMock.
m.RemoveAllDirs = append(m.RemoveAllDirs, dir)
if m.RemoveAllError[dir] != nil {
return m.RemoveAllError[dir]
}
return nil
}
@@ -59,6 +67,9 @@ func (m *ScanUtilsMock) InstallAllNPMDependencies(_ *ScanOptions, packageJSONs [
// DownloadFile mimics http.Downloader and records the downloaded file.
func (m *ScanUtilsMock) DownloadFile(url, filename string, _ http.Header, _ []*http.Cookie) error {
if m.DownloadError[url] != nil {
return m.DownloadError[url]
}
m.DownloadedFiles = append(m.DownloadedFiles, DownloadedFile{SourceURL: url, FilePath: filename})
return nil
}

View File

@@ -373,76 +373,9 @@ steps:
npmExecuteScripts:
install: true
whitesourceExecuteScan:
createProductFromPipeline: true
emailAddressesOfInitialProductAdmins: []
buildDescriptorExcludeList: []
parallelLimit: 15
licensingVulnerabilities: true
securityVulnerabilities: true
cvssSeverityLimit: -1
reporting: true
vulnerabilityReportFileName: 'piper_whitesource_vulnerability_report'
vulnerabilityReportTitle: 'WhiteSource Security Vulnerability Report'
projectNames: []
jreDownloadUrl: 'https://github.com/SAP/SapMachine/releases/download/sapmachine-11.0.2/sapmachine-jre-11.0.2_linux-x64_bin.tar.gz'
agentFileName: 'wss-unified-agent.jar'
agentDownloadUrl: 'https://github.com/whitesource/unified-agent-distribution/releases/latest/download/${config.agentFileName}'
agentParameters: ''
configFilePath: './wss-unified-agent.config'
mta:
stashContent:
- 'buildDescriptor'
- 'opensourceConfiguration'
maven:
buildDescriptorFile: './pom.xml'
dockerImage: 'maven:3.5-jdk-8'
dockerWorkspace: '/home/java'
stashContent:
- 'buildDescriptor'
- 'opensourceConfiguration'
npm:
buildDescriptorFile: './package.json'
dockerImage: 'node:lts-stretch'
dockerWorkspace: '/home/node'
stashContent:
- 'buildDescriptor'
- 'opensourceConfiguration'
pip:
buildDescriptorFile: './setup.py'
dockerImage: 'python:3.7.2-stretch'
dockerWorkspace: '/home/python'
stashContent:
- 'buildDescriptor'
- 'opensourceConfiguration'
golang:
buildDescriptorFile: './Gopkg.toml'
dockerImage: 'golang:1.12-stretch'
dockerWorkspace: '/home/dep'
stashContent:
- 'buildDescriptor'
- 'opensourceConfiguration'
- 'checkmarx'
additionalInstallCommand: >-
curl --fail https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
&& mkdir -p \$GOPATH/src/${config.whitesource.projectName.substring(0, config.whitesource.projectName.lastIndexOf('/'))}
&& ln -s \$(pwd) \$GOPATH/src/${config.whitesource.projectName}
&& cd \$GOPATH/src/${config.whitesource.projectName} && dep ensure
dub:
buildDescriptorFile: './dub.json'
dockerImage: 'buildpack-deps:stretch-curl'
dockerWorkspace: '/home/dub'
stashContent:
- 'buildDescriptor'
- 'checkmarx'
sbt:
buildDescriptorFile: './build.sbt'
dockerImage: 'hseeberger/scala-sbt:8u181_2.12.8_1.2.8'
dockerWorkspace: '/home/scala'
stashContent:
- 'buildDescriptor'
- 'opensourceConfiguration'
verbose: false
timeout: 0
stashExcludes:
stashBack: '.pipeline/** whitesourceExecuteScan_*.* whitesource-reports/**'
pipelineExecute:
branch: 'master'
path: 'Jenkinsfile'

View File

@@ -1,36 +1,92 @@
metadata:
name: whitesourceExecuteScan
description: BETA
description: Execute a WhiteSource scan
longDescription: |-
BETA
With this step [WhiteSource](https://www.whitesourcesoftware.com) security and license compliance scans can be executed and assessed.
WhiteSource is a Software as a Service offering based on a so called unified agent that locally determines the dependency
tree of a node.js, Java, Python, Ruby, or Scala based solution and sends it to the WhiteSource server for a policy based license compliance
check and additional Free and Open Source Software Publicly Known Vulnerabilities detection.
The step uses the so-called WhiteSource Unified Agent. For details please refer to the [WhiteSource Unified Agent Documentation](https://whitesource.atlassian.net/wiki/spaces/WD/pages/33718339/Unified+Agent).
!!! note "Docker Images"
The underlying Docker images are public and specific to the solution's programming language(s) and therefore may have to be exchanged
to fit to and support the relevant scenario. The default Python environment used is i.e. Python 3 based.
!!! warn "Restrictions"
Currently the step does contain hardened scan configurations for `scanType` `'pip'` and `'go'`. Other environments are still being elaborated,
so please thoroughly check your results and do not take them for granted by default.
Also not all environments have been thoroughly tested already therefore you might need to tweak around with the default containers used or
create your own ones to adequately support your scenario. To do so please modify `dockerImage` and `dockerWorkspace` parameters.
The step expects an environment containing the programming language related compiler/interpreter as well as the related build tool. For a list
of the supported build tools per environment please refer to the [WhiteSource Unified Agent Documentation](https://whitesource.atlassian.net/wiki/spaces/WD/pages/33718339/Unified+Agent).
spec:
inputs:
secrets:
- name: userTokenCredentialsId
aliases:
- name: whitesourceUserTokenCredentialsId
- name: whitesource/userTokenCredentialsId
deprecated: true
description: Jenkins 'Secret text' credentials ID containing Whitesource user token.
type: jenkins
- name: orgAdminUserTokenCredentialsId
aliases:
- name: whitesourceOrgAdminUserTokenCredentialsId
- name: whitesource/orgAdminUserTokenCredentialsId
deprecated: true
description: Jenkins 'Secret text' credentials ID containing Whitesource org admin token.
type: jenkins
params:
- name: agentDownloadUrl
type: string
description: "URL used to download the latest version of the WhiteSource Unified Agent."
scope:
- PARAMETERS
- STAGES
- STEPS
default: https://github.com/whitesource/unified-agent-distribution/releases/latest/download/wss-unified-agent.jar
- name: agentFileName
type: string
description: "Locally used name for the Unified Agent jar file after download."
scope:
- PARAMETERS
- STAGES
- STEPS
default: "wss-unified-agent.jar"
- name: agentParameters
type: "[]string"
description: "[NOT IMPLEMENTED] List of additional parameters passed to the Unified Agent command line."
scope:
- PARAMETERS
- STAGES
- STEPS
- name: agentUrl
alias:
- name: whitesourceAgentUrl
type: string
description: "URL to the WhiteSource agent endpoint."
scope:
- GENERAL
- PARAMETERS
- STAGES
- STEPS
default: "https://saas.whitesourcesoftware.com/agent"
- name: aggregateVersionWideReport
type: bool
description: "This does not run a scan, instead just generated a report for all projects with
projectVersion = config.ProductVersion"
scope:
- PARAMETERS
- STAGES
- STEPS
- name: buildDescriptorExcludeList
type: "[]string"
description: "List of build descriptors and therefore modules to exclude from the scan and assessment activities."
scope:
- PARAMETERS
- STAGES
- STEPS
default: ["unit-tests/pom.xml","integration-tests/pom.xml"]
- name: buildDescriptorFile
type: string
description: "Explicit path to the build descriptor file."
scope:
- PARAMETERS
- STAGES
- STEPS
- name: buildTool
type: string
description: "Defines the tool which is used for building the artifact."
@@ -43,26 +99,14 @@ spec:
resourceRef:
- name: commonPipelineEnvironment
param: buildTool
- name: buildDescriptorFile
- name: configFilePath
type: string
description: "Explicit path to the build descriptor file."
description: "Explicit path to the WhiteSource Unified Agent configuration file."
scope:
- PARAMETERS
- STAGES
- STEPS
- name: versioningModel
type: string
description: "The default project versioning model used in case `projectVersion` parameter is
empty for creating the version based on the build descriptor version to report results in
Whitesource, can be one of `'major'`, `'major-minor'`, `'semantic'`, `'full'`"
scope:
- PARAMETERS
- STAGES
- STEPS
- GENERAL
default: "major"
aliases:
- name: defaultVersioningModel
default: ./wss-unified-agent.config
- name: createProductFromPipeline
type: bool
description: "Whether to create the related WhiteSource product on the fly based on the supplied pipeline
@@ -72,99 +116,69 @@ spec:
- STAGES
- STEPS
default: true
- name: securityVulnerabilities
type: bool
description: "Whether security compliance is considered and reported as part of the assessment."
scope:
- PARAMETERS
- STAGES
- STEPS
default: true
- name: timeout
type: int
description: "Timeout in seconds until an HTTP call is forcefully terminated."
scope:
- PARAMETERS
- STAGES
- STEPS
default: 900
- name: agentDownloadUrl
- name: cvssSeverityLimit
type: string
description: "URL used to download the latest version of the WhiteSource Unified Agent."
description: "Limit of tolerable CVSS v3 score upon assessment and in consequence fails the build,
defaults to `-1`."
scope:
- PARAMETERS
- STAGES
- STEPS
default: https://github.com/whitesource/unified-agent-distribution/releases/latest/download/wss-unified-agent.jar
- name: configFilePath
default: "-1"
- name: emailAddressesOfInitialProductAdmins
type: "[]string"
description: "The list of email addresses to assign as product admins for newly created WhiteSource products."
scope:
- PARAMETERS
- STAGES
- STEPS
- name: excludes
type: "[]string"
description: List of file path patterns to exclude in the scan.
scope:
- PARAMETERS
- STAGES
- STEPS
- name: includes
type: "[]string"
description: List of file path patterns to include in the scan.
scope:
- PARAMETERS
- STAGES
- STEPS
- name: installCommand
type: string
description: "Explicit path to the WhiteSource Unified Agent configuration file."
description: "[NOT IMPLEMENTED] Install command that can be used to populate the default docker image for some scenarios."
scope:
- PARAMETERS
- STAGES
- STEPS
default: ./wss-generated-file.config
- name: reportDirectoryName
- name: jreDownloadUrl
aliases:
- name: whitesource/jreDownloadUrl
deprecated: true
type: string
description: "Name of the directory to save vulnerability/risk reports to"
scope:
- PARAMETERS
- STAGES
- STEPS
default: "whitesource-reports"
- name: aggregateVersionWideReport
type: bool
description: "This does not run a scan, instead just generated a report for all projects with
projectVersion = config.ProductVersion"
scope:
- PARAMETERS
- STAGES
- STEPS
default: false
- name: vulnerabilityReportFormat
type: string
description: "Format of the file the vulnerability report is written to."
possibleValues: [xlsx, json, xml]
scope:
- PARAMETERS
- STAGES
- STEPS
default: xlsx
- name: parallelLimit
type: string
description: '[NOT IMPLEMENTED] Limit of parallel jobs being run at once in case of `scanType:
''mta''` based scenarios, defaults to `15`.'
scope:
- PARAMETERS
- STAGES
- STEPS
default: 15
- name: reporting
type: bool
description: "Whether assessment is being done at all, defaults to `true`"
scope:
- PARAMETERS
- STAGES
- STEPS
default: true
- name: serviceUrl
type: string
description: "URL to the WhiteSource server API used for communication."
description: "URL used for downloading the Java Runtime Environment (JRE) required to run the
WhiteSource Unified Agent."
scope:
- GENERAL
- PARAMETERS
- STAGES
- STEPS
default: "https://saas.whitesourcesoftware.com/api"
- name: buildDescriptorExcludeList
type: "[]string"
description: "List of build descriptors and therefore modules to exclude from the scan and assessment activities."
default: "https://github.com/SAP/SapMachine/releases/download/sapmachine-11.0.2/sapmachine-jre-11.0.2_linux-x64_bin.tar.gz"
- name: licensingVulnerabilities
type: bool
description: "[NOT IMPLEMENTED] Whether license compliance is considered and reported as part of the assessment."
scope:
- PARAMETERS
- STAGES
- STEPS
default: ["unit-tests/pom.xml","integration-tests/pom.xml"]
default: true
- name: orgToken
aliases:
- name: whitesourceOrgToken
- name: whitesource/orgToken
deprecated: true
type: string
description: "WhiteSource token identifying your organization."
scope:
@@ -177,63 +191,20 @@ spec:
resourceRef:
- name: orgAdminUserTokenCredentialsId
type: secret
- name: userToken
- name: parallelLimit
type: string
description: "WhiteSource token identifying the user executing the scan."
scope:
- GENERAL
- PARAMETERS
- STAGES
- STEPS
secret: true
mandatory: true
resourceRef:
- name: userTokenCredentialsId
type: secret
- name: licensingVulnerabilities
type: bool
description: "[NOT IMPLEMENTED] Whether license compliance is considered and reported as part of the assessment."
description: '[NOT IMPLEMENTED] Limit of parallel jobs being run at once in case of `scanType:
''mta''` based scenarios, defaults to `15`.'
scope:
- PARAMETERS
- STAGES
- STEPS
default: true
- name: agentFileName
type: string
description: "Locally used name for the Unified Agent jar file after download."
scope:
- PARAMETERS
- STAGES
- STEPS
default: "wss-unified-agent.jar"
- name: emailAddressesOfInitialProductAdmins
type: "[]string"
description: "The list of email addresses to assign as product admins for newly created WhiteSource products."
scope:
- PARAMETERS
- STAGES
- STEPS
- name: productVersion
type: string
description: "Version of the WhiteSource product to be created and used for results aggregation,
usually determined automatically."
scope:
- GENERAL
- PARAMETERS
- STAGES
- STEPS
- name: jreDownloadUrl
type: string
description: "[NOT IMPLEMENTED] URL used for downloading the Java Runtime Environment (JRE) required to run the
WhiteSource Unified Agent."
scope:
- GENERAL
- PARAMETERS
- STAGES
- STEPS
default: 15
- name: productName
aliases:
- name: whitesourceProductName
- name: whitesource/productName
deprecated: true
type: string
description: "Name of the WhiteSource product used for results aggregation.
This parameter is mandatory if the parameter `createProductFromPipeline` is set to `true`
@@ -244,6 +215,34 @@ spec:
- PARAMETERS
- STAGES
- STEPS
- name: productToken
aliases:
- name: whitesourceProductToken
- name: whitesource/productToken
deprecated: true
type: string
description: "Token of the WhiteSource product to be created and used for results aggregation,
usually determined automatically. Can optionally be provided as an alternative to `productName`."
scope:
- GENERAL
- PARAMETERS
- STAGES
- STEPS
- name: productVersion
aliases:
- name: whitesourceProductVersion
- name: whitesource/productVersion
deprecated: true
type: string
description: Version of the WhiteSource product to be created and used for results aggregation.
longDescription: |-
Version of the WhiteSource product to be created and used for results aggregation.
This is usually determined automatically based on the information in the buildTool specific build descriptor file.
scope:
- GENERAL
- PARAMETERS
- STAGES
- STEPS
- name: projectName
aliases:
- name: whitesourceProjectName
@@ -268,17 +267,40 @@ spec:
- PARAMETERS
- STAGES
- STEPS
- name: vulnerabilityReportTitle
- name: reportDirectoryName
type: string
description: "Title of vulnerability report written during the assessment phase."
description: "Name of the directory to save vulnerability/risk reports to"
scope:
- PARAMETERS
- STAGES
- STEPS
default: "WhiteSource Security Vulnerability Report"
- name: installCommand
default: "whitesource-reports"
- name: reporting
type: bool
description: "Whether assessment is being done at all, defaults to `true`"
scope:
- PARAMETERS
- STAGES
- STEPS
default: true
- name: scanImage
type: string
description: "[NOT IMPLEMENTED] Install command that can be used to populate the default docker image for some scenarios."
description: "For `buildTool: docker`: Defines the docker image which should be scanned."
scope:
- PARAMETERS
- STAGES
- STEPS
- name: scanImageIncludeLayers
type: bool
description: "For `buildTool: docker`: Defines if layers should be included."
scope:
- PARAMETERS
- STAGES
- STEPS
default: true
- name: scanImageRegistryUrl
type: string
description: "For `buildTool: docker`: Defines the registry where the scanImage is located."
scope:
- PARAMETERS
- STAGES
@@ -296,50 +318,78 @@ spec:
- STAGES
- STEPS
possibleValues: ["golang", "gradle", "maven", "mta", "npm", "pip", "yarn"]
- name: cvssSeverityLimit
type: string
description: "Limit of tolerable CVSS v3 score upon assessment and in consequence fails the build,
defaults to `-1`."
- name: securityVulnerabilities
type: bool
description: "Whether security compliance is considered and reported as part of the assessment."
scope:
- PARAMETERS
- STAGES
- STEPS
default: "-1"
- name: includes
type: string
description: "Space separated list of file path patterns to include in the scan, slashes must be escaped for sed."
scope:
- PARAMETERS
- STAGES
- STEPS
default: '**\/src\/main\/**\/*.java **\/*.py **\/*.go **\/*.js **\/*.ts'
- name: excludes
type: string
description: Space separated list of file path patterns to exclude in the scan
scope:
- PARAMETERS
- STAGES
- STEPS
default: "tests/**/*.py **/src/test/**/*.java"
- name: productToken
default: true
- name: serviceUrl
aliases:
- name: whitesourceProductToken
- name: whitesourceServiceUrl
- name: whitesource/serviceUrl
deprecated: true
type: string
description: "Token of the WhiteSource product to be created and used for results aggregation,
usually determined automatically. Can optionally be provided as an alternative to `productName`."
description: "URL to the WhiteSource API endpoint."
scope:
- GENERAL
- PARAMETERS
- STAGES
- STEPS
- name: agentParameters
type: string
description: "[NOT IMPLEMENTED] Additional parameters passed to the Unified Agent command line."
default: "https://saas.whitesourcesoftware.com/api"
- name: timeout
type: int
description: "Timeout in seconds until an HTTP call is forcefully terminated."
scope:
- PARAMETERS
- STAGES
- STEPS
default: 900
- name: userToken
type: string
description: "WhiteSource token identifying the user executing the scan."
scope:
- GENERAL
- PARAMETERS
- STAGES
- STEPS
secret: true
mandatory: true
resourceRef:
- name: userTokenCredentialsId
type: secret
- name: versioningModel
type: string
description: "The default project versioning model used in case `projectVersion` parameter is
empty for creating the version based on the build descriptor version to report results in
Whitesource, can be one of `'major'`, `'major-minor'`, `'semantic'`, `'full'`"
scope:
- PARAMETERS
- STAGES
- STEPS
- GENERAL
default: "major"
aliases:
- name: defaultVersioningModel
- name: vulnerabilityReportFormat
type: string
description: "Format of the file the vulnerability report is written to."
possibleValues: [xlsx, json, xml]
scope:
- PARAMETERS
- STAGES
- STEPS
default: xlsx
- name: vulnerabilityReportTitle
type: string
description: "Title of vulnerability report written during the assessment phase."
scope:
- PARAMETERS
- STAGES
- STEPS
default: "WhiteSource Security Vulnerability Report"
# Global maven settings, should be added to all maven steps
- name: projectSettingsFile
type: string
@@ -381,7 +431,6 @@ spec:
- STEPS
- STAGES
- PARAMETERS
# Global npm settings, should be added to all npm steps
- name: defaultNpmRegistry
type: string
@@ -408,18 +457,52 @@ spec:
- name: custom/whitesourceProjectNames
type: "[]string"
containers:
- image: buildpack-deps:stretch-curl
workingDir: /tmp
env: []
conditions:
- conditionRef: strings-equal
params:
- name: buildTool
value: dub
- name: buildTool
value: docker
- image: devxci/mbtci:1.0.14
workingDir: /home/mta
env: [ ]
conditions:
- conditionRef: strings-equal
params:
- name: buildTool
value: mta
- conditionRef: strings-equal
params:
- name: scanType
value: mta
- image: maven:3.5-jdk-8
workingDir: /home/java
- image: golang:1
workingDir: /go
env: []
conditions:
- conditionRef: strings-equal
params:
- name: buildTool
value: go
- image: hseeberger/scala-sbt:8u181_2.12.8_1.2.8
workingDir: /tmp
env: []
conditions:
- conditionRef: strings-equal
params:
- name: buildTool
value: sbt
- image: maven:3.5-jdk-8
workingDir: /tmp
env: []
conditions:
- conditionRef: strings-equal
params:
- name: buildTool
value: maven
- conditionRef: strings-equal
params:
- name: scanType
@@ -428,23 +511,19 @@ spec:
workingDir: /home/node
env: []
conditions:
- conditionRef: strings-equal
params:
- name: buildTool
value: npm
- conditionRef: strings-equal
params:
- name: scanType
value: npm
- image: hseeberger/scala-sbt:8u181_2.12.8_1.2.8
workingDir: /home/scala
- image: python:3.6-stretch
workingDir: /tmp
env: []
conditions:
- conditionRef: strings-equal
params:
- name: scanType
value: sbt
- image: buildpack-deps:stretch-curl
workingDir: /home/dub
env: []
conditions:
- conditionRef: strings-equal
params:
- name: scanType
value: dub
- name: buildTool
value: pip

View File

@@ -1,147 +0,0 @@
package com.sap.piper
import com.cloudbees.groovy.cps.NonCPS
class WhitesourceConfigurationHelper implements Serializable {
static def extendUAConfigurationFile(script, utils, config, path) {
def mapping = []
def parsingClosure = { fileReadPath -> return script.readProperties (file: fileReadPath) }
def serializationClosure = { configuration -> serializeUAConfig(configuration) }
def inputFile = config.whitesource.configFilePath.replaceFirst('\\./', '')
def suffix = utils.generateSha1("${path}${inputFile}")
def targetFile = "${inputFile}.${suffix}"
if(config.whitesource.productName.startsWith('DIST - ')) {
mapping += [
[name: 'checkPolicies', value: false, force: true],
[name: 'forceCheckAllDependencies', value: false, force: true]
]
} else {
mapping += [
[name: 'checkPolicies', value: true, force: true],
[name: 'forceCheckAllDependencies', value: true, force: true]
]
}
if(config.verbose)
mapping += [
[name: 'log.level', value: 'debug'],
[name: 'log.files.level', value: 'debug']
]
mapping += [
[name: 'apiKey', value: config.whitesource.orgToken, force: true],
[name: 'productName', value: config.whitesource.productName, force: true],
[name: 'productVersion', value: config.whitesource.productVersion?:'', force: true],
[name: 'projectName', value: config.whitesource.projectName, force: true],
[name: 'projectVersion', value: config.whitesource.productVersion?:'', force: true],
[name: 'productToken', value: config.whitesource.productToken, omitIfPresent: 'projectToken', force: true],
[name: 'userKey', value: config.whitesource.userKey, force: true],
[name: 'forceUpdate', value: true, force: true],
[name: 'offline', value: false, force: true],
[name: 'ignoreSourceFiles', value: true, force: true],
[name: 'resolveAllDependencies', value: false, force: true],
[name: 'failErrorLevel', value: 'ALL', force: true],
[name: 'case.sensitive.glob', value: false],
[name: 'followSymbolicLinks', value: true]
]
switch (config.scanType) {
case 'pip':
mapping += [
[name: 'python.resolveDependencies', value: true, force: true],
[name: 'python.ignoreSourceFiles', value: true, force: true],
[name: 'python.ignorePipInstallErrors', value: false],
[name: 'python.installVirtualenv', value: true],
[name: 'python.resolveHierarchyTree', value: true],
[name: 'python.requirementsFileIncludes', value: 'requirements.txt'],
[name: 'python.resolveSetupPyFiles', value: true],
[name: 'python.runPipenvPreStep', value: true],
[name: 'python.pipenvDevDependencies', value: true],
[name: 'python.IgnorePipenvInstallErrors', value: false],
[name: 'includes', value: '**/*.py **/*.txt'],
[name: 'excludes', value: '**/*sources.jar **/*javadoc.jar']
]
break
case 'sbt':
mapping += [
[name: 'sbt.resolveDependencies', value: true, force: true],
[name: 'sbt.ignoreSourceFiles', value: true, force: true],
[name: 'sbt.aggregateModules', value: false, force: true],
[name: 'sbt.runPreStep', value: true],
[name: 'includes', value: '**/*.jar'],
[name: 'excludes', value: '**/*sources.jar **/*javadoc.jar']
]
break
case 'golang':
mapping += [
[name: 'go.resolveDependencies', value: true, force: true],
[name: 'go.ignoreSourceFiles', value: true, force: true],
[name: 'go.collectDependenciesAtRuntime', value: false],
[name: 'go.dependencyManager', value: 'dep'],
[name: 'includes', value: '**/*.lock'],
[name: 'excludes', value: '**/*sources.jar **/*javadoc.jar']
]
break
case 'docker':
mapping += [
[name: 'docker.scanImages', value: true, force: true],
[name: 'docker.scanTarFiles', value: true, force: true],
[name: 'docker.includes', value: /.*.tar/, force: true],
[name: 'ignoreSourceFiles', value: true, force: true],
[name: 'python.resolveGlobalPackages', value: true, force: false],
[name: 'resolveAllDependencies', value: true, force: false],
[name: 'updateType', value: 'OVERRIDE', force: true],
]
case 'dub':
mapping += [
[name: 'includes', value: '**/*.d **/*.di']
]
break
default:
script.echo "[Warning][Whitesource] Configuration for scanType: '${config.scanType}' is not yet hardened, please do a quality assessment of your scan results."
}
rewriteConfiguration(script, utils, config, mapping, suffix, path, inputFile, targetFile, parsingClosure, serializationClosure)
}
static private def rewriteConfiguration(script, utils, config, mapping, suffix, path, inputFile, targetFile, parsingClosure, serializationClosure) {
def inputFilePath = "${path}${inputFile}"
def outputFilePath = "${path}${targetFile}"
def moduleSpecificFile = parsingClosure(inputFilePath)
if (!moduleSpecificFile && inputFilePath != config.whitesource.configFilePath)
moduleSpecificFile = parsingClosure(config.whitesource.configFilePath)
if (!moduleSpecificFile)
moduleSpecificFile = [:]
for(int i = 0; i < mapping.size(); i++) {
def entry = mapping.get(i)
def dependentValue = entry.omitIfPresent ? moduleSpecificFile[entry.omitIfPresent] : null
if ((entry.omitIfPresent && !dependentValue || !entry.omitIfPresent) && (entry.force || moduleSpecificFile[entry.name] == null) && entry.value != 'null')
moduleSpecificFile[entry.name] = entry.value.toString()
}
def output = serializationClosure(moduleSpecificFile)
if(config.verbose)
script.echo "Writing config file ${outputFilePath} with content:\n${output}"
script.writeFile file: outputFilePath, text: output
if(config.stashContent && config.stashContent.size() > 0) {
def stashName = "modified whitesource config ${suffix}".toString()
utils.stashWithMessage (
stashName,
"Stashing modified Whitesource configuration",
outputFilePath.replaceFirst('\\./', '')
)
config.stashContent += [stashName]
}
config.whitesource.configFilePath = outputFilePath
}
@NonCPS
static private def serializeUAConfig(configuration) {
Properties p = new Properties()
p.putAll(configuration)
new StringWriter().with{ w -> p.store(w, null); w }.toString()
}
}

View File

@@ -1,109 +0,0 @@
package com.sap.piper.integration
import com.cloudbees.groovy.cps.NonCPS
import com.sap.piper.JsonUtils
class WhitesourceOrgAdminRepository implements Serializable {
final Script script
final internalWhitesource
final Map config
WhitesourceOrgAdminRepository(Script script, Map config) {
this.script = script
this.config = config
if(!this.config.whitesource?.serviceUrl && !this.config.whitesourceAccessor)
script.error "Parameter 'whitesource.serviceUrl' must be provided as part of the configuration."
if(this.config.whitesourceAccessor instanceof String) {
def clazz = this.class.classLoader.loadClass(this.config.whitesourceAccessor)
this.internalWhitesource = clazz?.newInstance(this.script, this.config)
}
}
def fetchProductMetaInfo() {
def requestBody = [
requestType: "getOrganizationProductVitals",
orgToken: config.whitesource.orgToken
]
def parsedResponse = issueHttpRequest(requestBody)
findProductMeta(parsedResponse)
}
def findProductMeta(parsedResponse) {
def foundMetaProduct = null
for (product in parsedResponse.productVitals) {
if (product.name == config.whitesource.productName) {
foundMetaProduct = product
break
}
}
return foundMetaProduct
}
def createProduct() {
def requestBody = [
requestType: "createProduct",
orgToken: config.whitesource.orgToken,
productName: config.whitesource.productName
]
def parsedResponse = issueHttpRequest(requestBody)
def metaInfo = parsedResponse
def groups = []
def users = []
for(int i = 0; i < config.whitesource.emailAddressesOfInitialProductAdmins.size(); i++) {
def email = config.whitesource.emailAddressesOfInitialProductAdmins.get(i)
users.add(["email": email])
}
requestBody = [
"requestType" : "setProductAssignments",
"productToken" : metaInfo.productToken,
"productMembership" : ["userAssignments":[], "groupAssignments":groups],
"productAdmins" : ["userAssignments":users],
"alertsEmailReceivers" : ["userAssignments":[]]
]
issueHttpRequest(requestBody)
return metaInfo
}
def issueHttpRequest(requestBody) {
def response = internalWhitesource ? internalWhitesource.httpWhitesource(requestBody) : httpWhitesource(requestBody)
def parsedResponse = new JsonUtils().jsonStringToGroovyObject(response.content)
if(parsedResponse?.errorCode){
script.error "[WhiteSource] Request failed with error message '${parsedResponse.errorMessage}' (${parsedResponse.errorCode})."
}
return parsedResponse
}
@NonCPS
protected def httpWhitesource(requestBody) {
requestBody["userKey"] = config.whitesource.orgAdminUserKey
def serializedBody = new JsonUtils().groovyObjectToPrettyJsonString(requestBody)
def params = [
url : config.whitesource.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: serializedBody,
quiet : !config.verbose,
timeout : config.whitesource.timeout
]
if (script.env.HTTP_PROXY)
params["httpProxy"] = script.env.HTTP_PROXY
if (config.verbose)
script.echo "Sending http request with parameters ${params}"
def response = script.httpRequest(params)
if (config.verbose)
script.echo "Received response ${response}"
return response
}
}

View File

@@ -1,205 +0,0 @@
package com.sap.piper.integration
import com.cloudbees.groovy.cps.NonCPS
import com.sap.piper.JsonUtils
class WhitesourceRepository implements Serializable {
final Script script
final Map config
WhitesourceRepository(Script script, Map config) {
this.script = script
this.config = config
if(!config?.whitesource?.serviceUrl)
script.error "Parameter 'whitesource.serviceUrl' must be provided as part of the configuration."
}
List fetchVulnerabilities(whitesourceProjectsMetaInformation) {
def fetchedVulnerabilities = []
if (config.whitesource.projectNames) {
for (int i = 0; i < whitesourceProjectsMetaInformation.size(); i++) {
fetchSecurityAlertsPerItem(whitesourceProjectsMetaInformation[i].token, "getProjectAlertsByType", fetchedVulnerabilities)
}
} else {
fetchSecurityAlertsPerItem(config.whitesource.productToken, "getProductAlertsByType", fetchedVulnerabilities)
}
sortVulnerabilitiesByScore(fetchedVulnerabilities)
return fetchedVulnerabilities
}
private fetchSecurityAlertsPerItem(token, type, List<Object> fetchedVulnerabilities) {
def requestBody = [
requestType : type,
alertType : "SECURITY_VULNERABILITY",
projectToken: token
]
def response = fetchWhitesourceResource(requestBody)
fetchedVulnerabilities.addAll(response.alerts)
}
protected def fetchWhitesourceResource(Map requestBody) {
final def response = httpWhitesource(requestBody)
def parsedResponse = new JsonUtils().jsonStringToGroovyObject(response.content)
if(parsedResponse?.errorCode){
script.error "[WhiteSource] Request failed with error message '${parsedResponse.errorMessage}' (${parsedResponse.errorCode})."
}
return parsedResponse
}
@NonCPS
void sortLibrariesAlphabeticallyGAV(List libraries) {
script.echo "found a total of ${libraries.size()} dependencies (direct and indirect)"
libraries.sort { o1, o2 ->
String groupID1 = o1.groupId
String groupID2 = o2.groupId
def comparisionResult = groupID1 <=> groupID2;
if (comparisionResult != 0) {
comparisionResult
} else {
String artifactID1 = o1.artifactId
String artifactID2 = o2.artifactId
artifactID1 <=> artifactID2
}
}
}
@NonCPS
void sortVulnerabilitiesByScore(List vulnerabilities) {
script.echo "${vulnerabilities.size() > 0 ? 'WARNING: ' : ''}found a total of ${vulnerabilities.size()} vulnerabilities"
vulnerabilities.sort { o1, o2 ->
def cvss3score1 = o1.vulnerability.cvss3_score == 0 ? o1.vulnerability.score : o1.vulnerability.cvss3_score
def cvss3score2 = o2.vulnerability.cvss3_score == 0 ? o2.vulnerability.score : o2.vulnerability.cvss3_score
def comparisionResult = cvss3score1 <=> cvss3score2
if (comparisionResult != 0) {
-comparisionResult
} else {
def score1 = o1.vulnerability.score
def score2 = o2.vulnerability.score
-(score1 <=> score2)
}
}
}
List fetchProjectsMetaInfo() {
def projectsMetaInfo = []
if(config.whitesource.projectNames){
def requestBody = [
requestType: "getProductProjectVitals",
productToken: config.whitesource.productToken
]
def response = fetchWhitesourceResource(requestBody)
if(response?.projectVitals) {
projectsMetaInfo.addAll(findProjectsMeta(response.projectVitals))
} else {
script.error "[WhiteSource] Could not fetch any projects for product '${config.whitesource.productName}' from backend, response was ${response}"
}
}
return projectsMetaInfo
}
List findProjectsMeta(projectVitals) {
def matchedProjects = []
for (int i = 0; i < config.whitesource.projectNames?.size(); i++) {
def requestedProjectName = config.whitesource.projectNames[i].trim()
def matchedProjectInfo = null
for (int j = 0; j < projectVitals.size(); j++) {
def projectResponse = projectVitals[j]
if (projectResponse.name == requestedProjectName) {
matchedProjectInfo = projectResponse
break
}
}
if (matchedProjectInfo != null) {
matchedProjects.add(matchedProjectInfo)
} else {
script.error "[WhiteSource] Could not fetch/find requested project '${requestedProjectName}' for product '${config.whitesource.productName}'"
}
}
return matchedProjects
}
void fetchReportForProduct(reportName) {
def headers = [[name: 'Cache-Control', value: 'no-cache, no-store, must-revalidate'], [name: 'Pragma', value: 'no-cache']]
def requestContent = [
requestType: "getProductRiskReport",
productToken: config.whitesource.productToken
]
//fetchFileFromWhiteSource(reportName, requestContent)
httpWhitesource(requestContent, 'APPLICATION_OCTETSTREAM', headers, reportName)
}
def fetchProductLicenseAlerts() {
def requestContent = [
requestType: "getProductAlertsByType",
alertType: "REJECTED_BY_POLICY_RESOURCE",
productToken: config.whitesource.productToken
]
def parsedResponse = fetchWhitesourceResource(requestContent)
return parsedResponse
}
def fetchProjectLicenseAlerts(String projectToken) {
def requestContent = [
requestType: "getProjectAlertsByType",
alertType: "REJECTED_BY_POLICY_RESOURCE",
projectToken: projectToken
]
def parsedResponse = fetchWhitesourceResource(requestContent)
return parsedResponse
}
protected def httpWhitesource(requestBody, acceptType = 'APPLICATION_JSON', customHeaders = null, outputFile = null) {
handleAdditionalRequestParameters(requestBody)
def serializedBody = new JsonUtils().groovyObjectToPrettyJsonString(requestBody)
def params = [
url : config.whitesource.serviceUrl,
httpMode : 'POST',
acceptType : acceptType,
contentType: 'APPLICATION_JSON',
requestBody: serializedBody,
quiet : !config.verbose,
timeout : config.whitesource.timeout
]
if(customHeaders) params["customHeaders"] = customHeaders
if (outputFile) params["outputFile"] = outputFile
if (script.env.HTTP_PROXY) params["httpProxy"] = script.env.HTTP_PROXY
if(config.verbose)
script.echo "Sending http request with parameters ${params}"
def response = script.httpRequest(params)
if(config.verbose)
script.echo "Received response ${response}"
return response
}
@NonCPS
protected void handleAdditionalRequestParameters(params) {
if(config.whitesource.userKey)
params["userKey"] = config.whitesource.userKey
}
}

View File

@@ -170,6 +170,7 @@ public class CommonStepsTest extends BasePiperTest{
'kanikoExecute', //implementing new golang pattern without fields
'gitopsUpdateDeployment', //implementing new golang pattern without fields
'vaultRotateSecretId', //implementing new golang pattern without fields
'whitesourceExecuteScan', //implementing new golang pattern without fields
'uiVeri5ExecuteTests', //implementing new golang pattern without fields
'integrationArtifactDeploy', //implementing new golang pattern without fields
'integrationArtifactUpdateConfiguration', //implementing new golang pattern without fields

File diff suppressed because it is too large Load Diff

View File

@@ -1,204 +0,0 @@
package com.sap.piper
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.RuleChain
import util.BasePiperTest
import util.JenkinsLoggingRule
import util.JenkinsReadFileRule
import util.JenkinsWriteFileRule
import util.Rules
import static org.hamcrest.Matchers.containsString
import static org.hamcrest.Matchers.hasItem
import static org.hamcrest.Matchers.not
import static org.hamcrest.Matchers.allOf
import static org.junit.Assert.assertThat
class WhitesourceConfigurationHelperTest extends BasePiperTest {
JenkinsReadFileRule jrfr = new JenkinsReadFileRule(this, 'test/resources/utilsTest/')
JenkinsWriteFileRule jwfr = new JenkinsWriteFileRule(this)
JenkinsLoggingRule jlr = new JenkinsLoggingRule(this)
@Rule
public RuleChain ruleChain = Rules
.getCommonRules(this)
.around(jrfr)
.around(jwfr)
.around(jlr)
@Before
void init() {
def p = new Properties()
p.put("log.level", "debug")
helper.registerAllowedMethod('readProperties', [Map], {return p})
}
@Test
void testExtendConfigurationFileUnifiedAgentEmptyConfig() {
helper.registerAllowedMethod('readProperties', [Map], {return new Properties()})
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'none', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
allOf(
not(containsString("log.level=debug")),
containsString("apiKey=abcd"),
containsString("productName=DIST - name1"),
containsString("productToken=1234"),
containsString("userKey=0000")
)
)
assertThat(jlr.log, containsString("[Whitesource] Configuration for scanType: 'none' is not yet hardened, please do a quality assessment of your scan results."))
}
@Test
void testExtendConfigurationFileUnifiedAgentConfigDeeper() {
helper.registerAllowedMethod('readProperties', [Map], { m -> if (!m.file.contains('testModule')) return new Properties() else return null })
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'none', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./testModule/")
assertThat(jwfr.files['./testModule/config.13954509c7675edfce373138f51c68464d1abcac'],
allOf(
not(containsString("log.level=debug")),
containsString("apiKey=abcd"),
containsString("productName=DIST - name1"),
containsString("productToken=1234"),
containsString("userKey=0000")
)
)
assertThat(jlr.log, containsString("[Whitesource] Configuration for scanType: 'none' is not yet hardened, please do a quality assessment of your scan results."))
}
@Test
void testExtendConfigurationFileUnifiedAgentMaven() {
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'none', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
allOf(
containsString("apiKey=abcd"),
containsString("productName=DIST - name1"),
containsString("productToken=1234"),
containsString("userKey=0000")
)
)
assertThat(jlr.log, containsString("[Whitesource] Configuration for scanType: 'none' is not yet hardened, please do a quality assessment of your scan results."))
}
@Test
void testExtendConfigurationFileUnifiedAgentNpm() {
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'npm', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
allOf(
containsString("apiKey=abcd"),
containsString("productName=DIST - name1"),
containsString("productToken=1234"),
containsString("userKey=0000")
)
)
assertThat(jlr.log, containsString("[Whitesource] Configuration for scanType: 'npm' is not yet hardened, please do a quality assessment of your scan results."))
}
@Test
void testExtendConfigurationFileUnifiedAgentDocker() {
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'docker', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
allOf(
containsString("apiKey=abcd"),
containsString("productName=DIST - name1"),
containsString("productToken=1234"),
containsString("docker.scanImages=true"),
containsString("docker.scanTarFiles=true"),
containsString("docker.includes=.*.tar"),
)
)
}
@Test
void testExtendConfigurationFileUnifiedAgentSbt() {
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'sbt', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
allOf(
containsString("apiKey=abcd"),
containsString("productName=DIST - name1"),
containsString("productToken=1234"),
containsString("userKey=0000"),
containsString("sbt.resolveDependencies=true"),
containsString("log.level=debug")
)
)
}
@Test
void testExtendConfigurationFileUnifiedAgentDub() {
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'dub', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
allOf(
containsString("apiKey=abcd"),
containsString("productName=DIST - name1"),
containsString("productToken=1234"),
containsString("userKey=0000"),
containsString("includes=**/*.d **/*.di")
)
)
}
@Test
void testExtendConfigurationFileUnifiedAgentPip() {
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'pip', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
allOf(
containsString("apiKey=abcd"),
containsString("productName=DIST - name1"),
containsString("productToken=1234"),
containsString("userKey=0000"),
containsString("python.resolveDependencies=true")
)
)
assertThat(jlr.log, not(containsString("[Whitesource] Configuration for scanType: 'pip' is not yet hardened, please do a quality assessment of your scan results.")))
}
@Test
void testExtendConfigurationFileUnifiedAgentGolangVerbose() {
def config = [scanType: 'golang', whitesource: [configFilePath: './config', serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'SHC - name2', productToken: '1234', userKey: '0000'], stashContent: ['some', 'stashes'], verbose: true]
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, config, "./")
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
allOf(
containsString("apiKey=abcd"),
containsString("productName=SHC - name2"),
containsString("productToken=1234"),
containsString("userKey=0000"),
containsString("go.resolveDependencies=true"),
containsString("log.level=debug")
)
)
assertThat(config.stashContent, hasItem(containsString('modified whitesource config ')))
assertThat(jlr.log, not(containsString("[Warning][Whitesource] Configuration for scanType: 'golang' is not yet hardened, please do a quality assessment of your scan results.")))
}
@Test
void testExtendConfigurationFileUnifiedAgentEnforcement() {
def p = new Properties()
p.putAll(['python.resolveDependencies': 'false', 'python.ignoreSourceFiles': 'false', 'python.ignorePipInstallErrors': 'true','python.installVirtualenv': 'false'])
helper.registerAllowedMethod('readProperties', [Map], {return p})
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'pip', whitesource: [configFilePath: './config', serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'cdfg', productName: 'name', productToken: '1234', userKey: '0000'], verbose: true], "./")
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
allOf(
containsString("apiKey=cdfg"),
containsString("productName=name"),
containsString("productToken=1234"),
containsString("userKey=0000"),
containsString("python.resolveDependencies=true"),
containsString("log.level=debug"),
containsString("python.resolveDependencies=true"),
containsString("python.ignoreSourceFiles=true"),
containsString("python.ignorePipInstallErrors=true"),
containsString("python.installVirtualenv=false")
)
)
}
}

View File

@@ -1,281 +0,0 @@
package com.sap.piper.integration
import hudson.AbortException
import org.junit.After
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.ExpectedException
import org.junit.rules.RuleChain
import util.BasePiperTest
import util.JenkinsEnvironmentRule
import util.JenkinsErrorRule
import util.JenkinsLoggingRule
import util.LibraryLoadingTestExecutionListener
import util.Rules
import static org.assertj.core.api.Assertions.assertThat
import static org.hamcrest.Matchers.containsString
import static org.hamcrest.Matchers.is
import static org.hamcrest.Matchers.isA
class WhitesourceOrgAdminRepositoryTest extends BasePiperTest {
private ExpectedException expectedException = ExpectedException.none()
private JenkinsLoggingRule loggingRule = new JenkinsLoggingRule(this)
@Rule
public RuleChain ruleChain = Rules
.getCommonRules(this)
.around(expectedException)
.around(loggingRule)
WhitesourceOrgAdminRepository repository
@Before
void init() throws Exception {
repository = new WhitesourceOrgAdminRepository(nullScript, [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/"], verbose: true])
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(repository)
}
@After
void tearDown() {
printCallStack()
nullScript.env = [:]
}
@Test
void testMissingConfig() {
expectedException.expect(AbortException)
expectedException.expectMessage("Parameter 'whitesource.serviceUrl' must be provided as part of the configuration.")
new WhitesourceOrgAdminRepository(nullScript, [:])
}
@Test
void testAccessor() {
new WhitesourceOrgAdminRepository(nullScript, [whitesourceAccessor: "com.sap.piper.integration.WhitesourceRepository", whitesource: [serviceUrl: "http://test.com"]])
}
@Test
void testResolveProductMeta() {
def whitesourceMetaResponse = [
productVitals: [
[
token: '410389ae-0269-4719-9cbf-fb5e299c8415',
name : 'NW'
],
[
token: '2892f1db-4361-4e83-a89d-d28a262d65b9',
name : 'XS UAA'
],
[
token: '1111111-1111-1111-1111-111111111111',
name : 'Correct Name Cloud'
]
]
]
repository.config.putAll([whitesource: [productName: "Correct Name Cloud"]])
def result = repository.findProductMeta(whitesourceMetaResponse)
assertThat(result).isEqualTo([
token: '1111111-1111-1111-1111-111111111111',
name : 'Correct Name Cloud'
])
}
@Test
void testHttpWhitesourceInternalCallUserKey() {
def config = [whitesource: [ serviceUrl: "http://some.host.whitesource.com/api/", orgAdminUserKey: "4711"], verbose: false]
repository.config.putAll(config)
def requestBody = ["someJson" : [ "someObject" : "abcdef" ]]
def requestParams
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams = p
})
repository.httpWhitesource(requestBody)
assertThat(requestParams, is(
[
url : config.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody,
quiet : true,
userKey : config.orgAdminUserKey
]
))
}
@Test
void testHttpWhitesourceInternalCallUserKeyVerboseProxy() {
def config = [whitesource: [ serviceUrl: "http://some.host.whitesource.com/api/", orgAdminUserKey: "4711"], verbose: true]
nullScript.env['HTTP_PROXY'] = "http://test.sap.com:8080"
repository.config.putAll(config)
def requestBody = ["someJson" : [ "someObject" : "abcdef" ]]
def requestParams
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams = p
})
repository.httpWhitesource(requestBody)
assertThat(requestParams, is(
[
url : config.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody,
quiet : false,
userKey : config.orgAdminUserKey,
httpProxy : "http://test.sap.com:8080"
]
))
assertThat(loggingRule.log, containsString("Sending http request with parameters"))
assertThat(loggingRule.log, containsString("Received response"))
}
@Test
void testCreateProduct() {
def config = [
whitesource: [
serviceUrl: "http://some.host.whitesource.com/api/",
verbose: false,
orgAdminUserKey: "4711",
orgToken: "abcd1234",
productName: "testProduct",
emailAddressesOfInitialProductAdmins: ['some@somewhere.com', 'some2@somewhere.com']
]
]
repository.config.putAll(config)
def requestBody1 = [
requestType: "getOrganizationProductVitals",
orgToken: config.orgToken,
userKey: "4711"
]
def requestBody2 = [
"requestType" : "setProductAssignments",
"productToken" : "54785",
"productMembership" : ["userAssignments":[], "groupAssignments":[]],
"productAdmins" : ["userAssignments":[[ "email": "some@somewhere.com" ], ["email": "some2@somewhere.com"]]],
"alertsEmailReceivers" : ["userAssignments":[]],
"userKey": "4711"
]
def requestParams = []
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams.add(p)
return [ content : "{ \"productToken\" : \"54785\" }" ]
})
repository.createProduct()
assertThat(requestParams[0], is(
[
url : config.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody1,
quiet : false,
userKey : config.orgAdminUserKey,
httpProxy : "http://test.sap.com:8080"
]
))
assertThat(requestParams[1], is(
[
url : config.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody2,
quiet : false,
userKey : config.orgAdminUserKey,
httpProxy : "http://test.sap.com:8080"
]
))
}
@Test
void testIssueHttpRequestError() {
def config = [whitesource: [ serviceUrl: "http://some.host.whitesource.com/api/", orgAdminUserKey: "4711"], verbose: false]
repository.config.putAll(config)
def requestBody = ["someJson" : [ "someObject" : "abcdef" ]]
def requestParams
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams = p
return [content: "{ \"errorCode\" : \"4546\", \"errorMessage\" : \"some text\" } }"]
})
def errorCaught = false
try {
repository.issueHttpRequest(requestBody)
} catch (e) {
errorCaught = true
assertThat(e, isA(AbortException.class))
assertThat(e.getMessage(), equals("[WhiteSource] Request failed with error message 'some text' (4546)."))
}
assertThat(errorCaught, is(true))
assertThat(requestParams, is(
[
url : config.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody,
quiet : true,
userKey : config.orgAdminUserKey
]
))
}
@Test
void testFetchProductMetaInfo() {
def config = [whitesource: [ serviceUrl: "http://some.host.whitesource.com/api/", orgAdminUserKey: "4711", orgToken: "12345", productName: "testProduct"], verbose: true]
nullScript.env['HTTP_PROXY'] = "http://test.sap.com:8080"
repository.config.putAll(config)
def requestBody = [
requestType: "getOrganizationProductVitals",
orgToken: config.orgToken,
userKey: "4711"
]
def requestParams
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams = p
return [ content: "{ \"productVitals\" : [ { \"name\": \"testProduct\"} ] }"]
})
def result = repository.fetchProductMetaInfo()
assertThat(requestParams, is(
[
url : config.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody,
quiet : false,
userKey : config.orgAdminUserKey,
httpProxy : "http://test.sap.com:8080"
]
))
assertThat(result, is([ name: "testProduct"]))
assertThat(loggingRule.log, containsString("Sending http request with parameters"))
assertThat(loggingRule.log, containsString("Received response"))
}
}

View File

@@ -1,575 +0,0 @@
package com.sap.piper.integration
import hudson.AbortException
import org.junit.After
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.ExpectedException
import org.junit.rules.RuleChain
import util.BasePiperTest
import util.JenkinsLoggingRule
import util.LibraryLoadingTestExecutionListener
import util.Rules
import static org.assertj.core.api.Assertions.assertThat
import static org.hamcrest.Matchers.containsString
import static org.hamcrest.Matchers.is
import static org.hamcrest.Matchers.isA
class WhitesourceRepositoryTest extends BasePiperTest {
private ExpectedException exception = ExpectedException.none()
private JenkinsLoggingRule loggingRule = new JenkinsLoggingRule(this)
@Rule
public RuleChain ruleChain = Rules
.getCommonRules(this)
.around(exception)
.around(loggingRule)
WhitesourceRepository repository
@Before
void init() throws Exception {
nullScript.env['HTTP_PROXY'] = "http://proxy.org:8080"
repository = new WhitesourceRepository(nullScript, [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/"]])
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(repository)
}
@After
void tearDown() {
printCallStack()
nullScript.env = [:]
}
@Test
void testMissingConfig() {
exception.expect(AbortException)
exception.expectMessage("Parameter 'whitesource.serviceUrl' must be provided as part of the configuration.")
new WhitesourceRepository(nullScript, [:])
}
@Test
void testResolveProjectsMeta() {
def whitesourceMetaResponse = [
projectVitals: [
[
token: '410389ae-0269-4719-9cbf-fb5e299c8415',
name : 'NW'
],
[
token: '2892f1db-4361-4e83-a89d-d28a262d65b9',
name : 'Correct Project Name2'
],
[
token: '1111111-1111-1111-1111-111111111111',
name : 'Correct Project Name'
]
]
]
repository.config.whitesource = [:]
repository.config.whitesource['productName'] = "Correct Name Cloud"
repository.config.whitesource['projectNames'] = ["Correct Project Name", "Correct Project Name2"]
def result = repository.findProjectsMeta(whitesourceMetaResponse.projectVitals)
assertThat(result, is(
[
{
token: '1111111-1111-1111-1111-111111111111'
name: 'Correct Name Cloud'
},
{
token: '2892f1db-4361-4e83-a89d-d28a262d65b9'
name: 'Correct Project Name2'
}
]))
assertThat(result.size(), 2)
}
@Test
void testResolveProjectsMetaFailNotFound() {
def whitesourceMetaResponse = [
projectVitals: [
[
token: '410389ae-0269-4719-9cbf-fb5e299c8415',
name : 'NW'
],
[
token: '2892f1db-4361-4e83-a89d-d28a262d65b9',
name : 'Product Name'
],
[
token: '1111111-1111-1111-1111-111111111111',
name : 'Product Name2'
]
]
]
exception.expect(AbortException.class)
exception.expectMessage("Correct Project Name")
repository.config.putAll([whitesource : [projectNames: ["Correct Project Name"]]])
repository.findProjectsMeta(whitesourceMetaResponse.projectVitals)
}
@Test
void testSortLibrariesAlphabeticallyGAV() {
def librariesResponse = [
[
groupId : 'xyz',
artifactId: 'abc'
],
[
groupId : 'abc',
artifactId: 'abc-def'
],
[
groupId : 'abc',
artifactId: 'def-abc'
],
[
groupId : 'def',
artifactId: 'test'
]
]
repository.sortLibrariesAlphabeticallyGAV(librariesResponse)
assertThat(librariesResponse, is(
[
{
groupId: 'abc'
artifactId: 'abc-def'
},
{
groupId: 'abc'
artifactId: 'def-abc'
},
{
groupId: 'def'
artifactId: 'test'
},
{
groupId: 'xyz'
artifactId: 'abc'
}
]))
}
@Test
void testSortVulnerabilitiesByScore() {
def vulnerabilitiesResponse = [
[
vulnerability: [
score : 6.9,
cvss3_score: 8.5
]
],
[
vulnerability: [
score : 7.5,
cvss3_score: 9.8
]
],
[
vulnerability: [
score : 4,
cvss3_score: 0
]
],
[
vulnerability: [
score : 9.8,
cvss3_score: 0
]
],
[
vulnerability: [
score : 0,
cvss3_score: 5
]
]
]
repository.sortVulnerabilitiesByScore(vulnerabilitiesResponse)
assertThat(vulnerabilitiesResponse, is(
[
{vulnerability: {
score: 9.8
cvss3_score: 0
}}
,
{vulnerability: {
score : 7.5
cvss3_score: 9.8
}}
,
{vulnerability: {
score : 6.9
cvss3_score: 8.5
}}
,
{vulnerability: {
score : 0
cvss3_score: 5
}}
,
{vulnerability: {
score : 4
cvss3_score: 0
}}
]))
}
@Test
void testHttpWhitesourceExternalCallNoUserKey() {
def config = [whitesource: [serviceUrl: "https://saas.whitesource.com/api"], verbose: true]
repository.config.putAll(config)
def requestBody = "{ \"someJson\" : { \"someObject\" : \"abcdef\" } }"
def requestParams
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams = p
})
repository.httpWhitesource(requestBody)
assertThat(requestParams, is(
[
url : config.whitesource.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody,
quiet : false,
proxy : "http://proxy.org:8080"
]
))
}
@Test
void testHttpWhitesourceExternalCallUserKey() {
def config = [whitesource: [ serviceUrl: "https://saas.whitesource.com/api", userKey: "4711"], verbose: true]
def requestBody = "{ \"someJson\" : { \"someObject\" : \"abcdef\" } }"
def requestParams
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams = p
})
repository.httpWhitesource(requestBody)
assertThat(requestParams, is(
[
url : config.whitesource.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody,
quiet : false,
proxy : "http://proxy.org:8080",
userKey : "4711"
]
))
}
@Test
void testHttpWhitesourceInternalCallUserKey() {
def config = [whitesource: [serviceUrl: "http://test.org/some", userKey: "4711"], verbose: false]
def requestBody = "{ \"someJson\" : { \"someObject\" : \"abcdef\" } }"
def requestParams
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams = p
})
repository.httpWhitesource(requestBody)
assertThat(requestParams, is(
[
url : config.whitesource.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody,
quiet : true
]
))
}
@Test
void testHttpCallWithError() {
def responseBody = """{
\"errorCode\": 5001,
\"errorMessage\": \"User is not allowed to perform this action\"
}"""
exception.expect(isA(AbortException.class))
exception.expectMessage("[WhiteSource] Request failed with error message 'User is not allowed to perform this action' (5001)")
helper.registerAllowedMethod('httpRequest', [Map], { p ->
return [content: responseBody]
})
repository.fetchWhitesourceResource([httpMode: 'POST'])
}
@Test
void testFetchReportForProduct() {
repository.config.putAll([whitesource: [serviceUrl: "http://test.org/some", productToken: "4712", userKey: "4711"], verbose: true])
def requestBody = "{ \"requestType\": \"getProductRiskReport\", \"productToken\": \"${repository.config.whitesource.productToken}\" }"
def requestParams
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams = p
})
repository.fetchReportForProduct("test.file")
assertThat(requestParams, is(
[
url : repository.config.whitesource.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_OCTETSTREAM',
contentType : 'APPLICATION_JSON',
requestBody : requestBody,
quiet : false,
userKey : repository.config.whitesource.userKey,
httpProxy : "http://test.sap.com:8080",
outputFile : "test.file",
customHeaders : [[name: 'Cache-Control', value: 'no-cache, no-store, must-revalidate'], [name: 'Pragma', value: 'no-cache']]
]
))
assertThat(loggingRule.log, containsString("Sending http request with parameters [requestType:getProductRiskReport, productToken:4711]"))
}
@Test
void testFetchProductLicenseAlerts() {
def config = [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/", userKey: "4711", productToken: "8547"]]
nullScript.env['HTTP_PROXY'] = "http://test.sap.com:8080"
repository.config.putAll(config)
def requestBody = [
requestType: "getProductAlertsByType",
alertType: "REJECTED_BY_POLICY_RESOURCE",
productToken: config.productToken
]
def requestParams
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams = p
return [ content: "{ \"alerts\" : [] }"]
})
repository.fetchProductLicenseAlerts()
assertThat(requestParams, is(
[
url : config.whitesource.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody,
quiet : false,
userKey : config.whitesource.userKey,
httpProxy : "http://test.sap.com:8080"
]
))
}
@Test
void testFetchProjectLicenseAlerts() {
def projectToken = "8547"
def config = [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/", userKey: "4711"]]
repository.config.putAll(config)
def requestBody = [
requestType: "getProjectAlertsByType",
alertType: "REJECTED_BY_POLICY_RESOURCE",
projectToken: projectToken
]
def requestParams
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams = p
return [ content: "{ \"alerts\" : [] }"]
})
repository.fetchProjectLicenseAlerts(projectToken)
assertThat(requestParams, is(
[
url : config.whitesource.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody,
quiet : false,
userKey : config.whitesource.userKey,
httpProxy : "http://test.sap.com:8080"
]
))
}
@Test
void testFetchProjectsMetaInfo() {
def config = [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/", userKey: "4711", productToken: '8475', projectNames: ['testProject1', 'testProject2']]]
nullScript.env['HTTP_PROXY'] = "http://test.sap.com:8080"
repository.config.putAll(config)
def requestBody = [
requestType: "getProductProjectVitals",
productToken: config.productToken
]
def requestParams
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams = p
return [ content: "{ \"projectVitals\" : [ { \"name\": \"testProject1\"}, { \"name\": \"testProject2\"} ] }"]
})
def result = repository.fetchProjectsMetaInfo()
assertThat(requestParams, is(
[
url : config.whitesource.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody,
quiet : false,
userKey : config.whitesource.userKey,
httpProxy : "http://test.sap.com:8080"
]
))
assertThat(result, is([[ name: "testProduct1"], [ name: "testProduct2"]]))
}
@Test
void testFetchProjectsMetaInfoError() {
def config = [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/", userKey: "4711", productName: 'kjdkjkhd', productToken: '8475', projectNames: ['testProject1', 'testProject2']]]
repository.config.putAll(config)
def requestParams
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams = p
return [ content: "{ }"]
})
def errorCaught = false
try {
repository.fetchProjectsMetaInfo()
} catch (e) {
errorCaught = true
assertThat(e, isA(AbortException.class))
assertThat(e.getMessage(), is("[WhiteSource] Could not fetch any projects for product '${config.productName}' from backend, response was {}"))
}
assertThat(errorCaught, is(true))
}
@Test
void testFetchVulnerabilitiesOnProjects() {
def config = [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/", userKey: "4711", productToken: '8475', projectNames: ['testProject1', 'testProject2']]]
nullScript.env['HTTP_PROXY'] = "http://test.sap.com:8080"
repository.config.putAll(config)
def requestBody1 = [
requestType : "getProjectAlertsByType",
alertType : "SECURITY_VULNERABILITY",
projectToken: "1234"
]
def requestBody2 = [
requestType : "getProjectAlertsByType",
alertType : "SECURITY_VULNERABILITY",
projectToken: "2345"
]
def requestParams = []
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams.add(p)
return [ content: "{ \"alerts\" : [ { \"vulnerability\" : { \"cvss3_score\" : \"7\"} } ] }"]
})
def result = repository.fetchVulnerabilities([ [name: "testProject1", token: "1234"], [name: "testProject2", token: "2345"] ])
assertThat(requestParams[0], is(
[
url : config.whitesource.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody1,
quiet : false,
userKey : config.whitesource.userKey,
httpProxy : "http://test.sap.com:8080"
]
))
assertThat(requestParams[1], is(
[
url : config.whitesource.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody2,
quiet : false,
userKey : config.whitesource.userKey,
httpProxy : "http://test.sap.com:8080"
]
))
assertThat(result.size(), is(2))
}
@Test
void testFetchVulnerabilitiesOnProduct() {
def config = [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/", userKey: "4711", productToken: '8475', productName : 'testProduct']]
nullScript.env['HTTP_PROXY'] = "http://test.sap.com:8080"
repository.config.putAll(config)
def requestBody = [
requestType : "getProductAlertsByType",
alertType : "SECURITY_VULNERABILITY",
productToken: config.productToken,
]
def requestParams = []
helper.registerAllowedMethod('httpRequest', [Map], { p ->
requestParams.add(p)
return [ content: "{ \"alerts\" : [ { \"vulnerability\" : { \"cvss3_score\" : \"7\"} } ] }"]
})
def result = repository.fetchVulnerabilities([ [name: "testProject1", token: "1234"], [name: "testProject2", token: "2345"] ])
assertThat(requestParams[0], is(
[
url : config.whitesource.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: requestBody,
quiet : false,
userKey : config.whitesource.userKey,
httpProxy : "http://test.sap.com:8080"
]
))
assertThat(result.size(), is(1))
}
}

View File

@@ -1,659 +1,21 @@
import com.sap.piper.BuildTool
import com.sap.piper.DescriptorUtils
import com.sap.piper.DownloadCacheUtils
import com.sap.piper.GenerateDocumentation
import com.sap.piper.JsonUtils
import com.sap.piper.Utils
import com.sap.piper.integration.WhitesourceOrgAdminRepository
import com.sap.piper.integration.WhitesourceRepository
import com.sap.piper.ConfigurationHelper
import com.sap.piper.WhitesourceConfigurationHelper
import com.sap.piper.mta.MtaMultiplexer
import groovy.transform.Field
import groovy.text.GStringTemplateEngine
import static com.sap.piper.Prerequisites.checkScript
@Field String STEP_NAME = getClass().getName()
@Field Set GENERAL_CONFIG_KEYS = [
'whitesource',
/**
* Jenkins credentials ID referring to the organization admin's token.
* @parentConfigKey whitesource
*/
'orgAdminUserTokenCredentialsId',
/**
* WhiteSource token identifying your organization.
* @parentConfigKey whitesource
*/
'orgToken',
/**
* Name of the WhiteSource product to be created and used for results aggregation.
* @parentConfigKey whitesource
*/
'productName',
/**
* Version of the WhiteSource product to be created and used for results aggregation, usually determined automatically.
* @parentConfigKey whitesource
*/
'productVersion',
/**
* Token of the WhiteSource product to be created and used for results aggregation, usually determined automatically.
* @parentConfigKey whitesource
*/
'productToken',
/**
* List of WhiteSource projects to be included in the assessment part of the step, usually determined automatically.
* @parentConfigKey whitesource
*/
'projectNames',
/**
* URL used for downloading the Java Runtime Environment (JRE) required to run the WhiteSource Unified Agent.
* @parentConfigKey whitesource
*/
'jreDownloadUrl',
/**
* URL to the WhiteSource server API used for communication, defaults to `https://saas.whitesourcesoftware.com/api`.
* @parentConfigKey whitesource
*/
'serviceUrl',
/**
* Jenkins credentials ID referring to the product admin's token.
* @parentConfigKey whitesource
*/
'userTokenCredentialsId',
/**
* Type of development stack used to implement the solution.
* @possibleValues `golang`, `maven`, `mta`, `npm`, `pip`, `sbt`, `dub`
*/
'scanType',
/**
* Whether verbose output should be produced.
* @possibleValues `true`, `false`
*/
'verbose',
/**
* Toggle to activate the new go-implementation of the step. Off by default.
* @possibleValues true, false
*/
'useGoStep',
]
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS + [
/**
* Install command that can be used to populate the default docker image for some scenarios.
*/
'installCommand',
/**
* URL used to download the latest version of the WhiteSource Unified Agent.
*/
'agentDownloadUrl',
/**
* Locally used name for the Unified Agent jar file after download.
*/
'agentFileName',
/**
* Additional parameters passed to the Unified Agent command line.
*/
'agentParameters',
/**
* List of build descriptors and therefore modules to exclude from the scan and assessment activities.
*/
'buildDescriptorExcludeList',
/**
* Explicit path to the build descriptor file.
*/
'buildDescriptorFile',
/**
* Explicit path to the WhiteSource Unified Agent configuration file.
*/
'configFilePath',
/**
* Whether to create the related WhiteSource product on the fly based on the supplied pipeline configuration.
*/
'createProductFromPipeline',
/**
* The list of email addresses to assign as product admins for newly created WhiteSource products.
*/
'emailAddressesOfInitialProductAdmins',
/**
* Docker image to be used for scanning.
*/
'dockerImage',
/**
* Docker workspace to be used for scanning.
*/
'dockerWorkspace',
/** @see dockerExecute*/
'dockerEnvVars',
/** @see dockerExecute */
'dockerOptions',
/**
* Whether license compliance is considered and reported as part of the assessment.
* @possibleValues `true`, `false`
*/
'licensingVulnerabilities',
/**
* Limit of parallel jobs being run at once in case of `scanType: 'mta'` based scenarios, defaults to `15`.
*/
'parallelLimit',
/**
* Whether assessment is being done at all, defaults to `true`.
* @possibleValues `true`, `false`
*/
'reporting',
/**
* Whether security compliance is considered and reported as part of the assessment.
* @possibleValues `true`, `false`
*/
'securityVulnerabilities',
/**
* Limit of tollerable CVSS v3 score upon assessment and in consequence fails the build, defaults to `-1`.
* @possibleValues `-1` to switch failing off, any `positive integer between 0 and 10` to fail on issues with the specified limit or above
*/
'cvssSeverityLimit',
/**
* For `scanType: docker`: defines the docker image which should be scanned
*/
'scanImage',
/**
* For `scanType: docker`: defines the registry where the scanImage is located
*/
'scanImageRegistryUrl',
/**
* List of stashes to be unstashed into the workspace before performing the scan.
*/
'stashContent',
/**
* Timeout in seconds until a HTTP call is forcefully terminated.
*/
'timeout',
/**
* Name of the file the vulnerability report is written to.
*/
'vulnerabilityReportFileName',
/**
* Title of vulnerability report written during the assessment phase.
*/
'vulnerabilityReportTitle'
]
@Field String METADATA_FILE = 'metadata/whitesource.yaml'
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
//Metadata maintained in file project://resources/metadata/whitesource.yaml
@Field Map CONFIG_KEY_COMPATIBILITY = [
productName : 'whitesourceProductName',
productToken : 'whitesourceProductToken',
projectName : 'whitesourceProjectName',
projectNames : 'whitesourceProjectNames',
productVersion : 'whitesourceProductVersion',
userTokenCredentialsId : 'whitesourceUserTokenCredentialsId',
serviceUrl : 'whitesourceServiceUrl',
agentDownloadUrl : 'fileAgentDownloadUrl',
agentParameters : 'fileAgentParameters',
whitesource : [
orgAdminUserTokenCredentialsId : 'orgAdminUserTokenCredentialsId',
orgToken : 'orgToken',
productName : 'productName',
productToken : 'productToken',
projectName : 'projectName',
projectNames : 'projectNames',
productVersion : 'productVersion',
serviceUrl : 'serviceUrl',
configFilePath : 'configFilePath',
userTokenCredentialsId : 'userTokenCredentialsId',
agentDownloadUrl : 'agentDownloadUrl',
agentFileName : 'agentFileName',
agentParameters : 'agentParameters',
buildDescriptorExcludeList : 'buildDescriptorExcludeList',
buildDescriptorFile : 'buildDescriptorFile',
createProductFromPipeline : 'createProductFromPipeline',
emailAddressesOfInitialProductAdmins : 'emailAddressesOfInitialProductAdmins',
jreDownloadUrl : 'jreDownloadUrl',
licensingVulnerabilities : 'licensingVulnerabilities',
parallelLimit : 'parallelLimit',
reporting : 'reporting',
securityVulnerabilities : 'securityVulnerabilities',
cvssSeverityLimit : 'cvssSeverityLimit',
timeout : 'timeout',
vulnerabilityReportFileName : 'vulnerabilityReportFileName',
vulnerabilityReportTitle : 'vulnerabilityReportTitle',
installCommand : 'installCommand'
]
]
/**
* BETA
*
* With this step [WhiteSource](https://www.whitesourcesoftware.com) security and license compliance scans can be executed and assessed.
*
* WhiteSource is a Software as a Service offering based on a so called unified agent that locally determines the dependency
* tree of a node.js, Java, Python, Ruby, or Scala based solution and sends it to the WhiteSource server for a policy based license compliance
* check and additional Free and Open Source Software Publicly Known Vulnerabilities detection.
*
* !!! note "Docker Images"
* The underlying Docker images are public and specific to the solution's programming language(s) and therefore may have to be exchanged
* to fit to and support the relevant scenario. The default Python environment used is i.e. Python 3 based.
*
* !!! warn "Restrictions"
* Currently the step does contain hardened scan configurations for `scanType` `'pip'` and `'go'`. Other environments are still being elaborated,
* so please thoroughly check your results and do not take them for granted by default.
* Also not all environments have been thoroughly tested already therefore you might need to tweak around with the default containers used or
* create your own ones to adequately support your scenario. To do so please modify `dockerImage` and `dockerWorkspace` parameters.
* The step expects an environment containing the programming language related compiler/interpreter as well as the related build tool. For a list
* of the supported build tools per environment please refer to the [WhiteSource Unified Agent Documentation](https://whitesource.atlassian.net/wiki/spaces/WD/pages/804814917/Unified+Agent+Configuration+File+and+Parameters).
*/
@GenerateDocumentation
void call(Map parameters = [:]) {
handlePipelineStepErrors(stepName: STEP_NAME, stepParameters: parameters) {
def script = checkScript(this, parameters) ?: this
def utils = parameters.juStabUtils ?: new Utils()
String stageName = parameters.stageName ?: env.STAGE_NAME
def descriptorUtils = parameters.descriptorUtilsStub ?: new DescriptorUtils()
def statusCode = 1
final script = checkScript(this, parameters) ?: this
parameters = DownloadCacheUtils.injectDownloadCacheInParameters(script, parameters, BuildTool.MTA)
//initialize CPE for passing whiteSourceProjects
if (script.commonPipelineEnvironment.getValue('whitesourceProjectNames') == null) {
script.commonPipelineEnvironment.setValue('whitesourceProjectNames', [])
}
// load default & individual configuration
Map config = ConfigurationHelper.newInstance(this)
.loadStepDefaults(CONFIG_KEY_COMPATIBILITY, stageName)
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS, CONFIG_KEY_COMPATIBILITY)
.mixinStepConfig(script.commonPipelineEnvironment, STEP_CONFIG_KEYS, CONFIG_KEY_COMPATIBILITY)
.mixinStageConfig(script.commonPipelineEnvironment, stageName, STEP_CONFIG_KEYS, CONFIG_KEY_COMPATIBILITY)
.mixin([
style: libraryResource('piper-os.css')
])
.mixin(parameters, PARAMETER_KEYS, CONFIG_KEY_COMPATIBILITY)
.addIfEmpty('scanType', script.commonPipelineEnvironment.getBuildTool())
.use()
if (config.useGoStep == true && config.scanType != "unified-agent") {
parameters = DownloadCacheUtils.injectDownloadCacheInParameters(script, parameters, BuildTool.MTA)
List credentials = [
[type: 'token', id: 'orgAdminUserTokenCredentialsId', env: ['PIPER_orgToken']],
[type: 'token', id: 'userTokenCredentialsId', env: ['PIPER_userToken']],
]
piperExecuteBin(parameters, "whitesourceExecuteScan", "metadata/whitesource.yaml", credentials)
return
}
// Apply Groovy specific config handling if not using the go-step.
config = ConfigurationHelper.newInstance(this, config)
.dependingOn('scanType').mixin('buildDescriptorFile')
.dependingOn('scanType').mixin('dockerImage')
.dependingOn('scanType').mixin('dockerWorkspace')
.dependingOn('scanType').mixin('dockerOptions')
.dependingOn('scanType').mixin('dockerEnvVars')
.dependingOn('scanType').mixin('stashContent')
.dependingOn('scanType').mixin('whitesource/configFilePath')
.dependingOn('scanType').mixin('whitesource/installCommand')
.withMandatoryProperty('whitesource/serviceUrl')
.withMandatoryProperty('whitesource/orgToken')
.withMandatoryProperty('whitesource/userTokenCredentialsId')
.withMandatoryProperty('whitesource/productName')
.addIfEmpty('whitesource/scanImage', script.commonPipelineEnvironment.containerProperties?.imageNameTag)
.addIfEmpty('whitesource/scanImageRegistryUrl', script.commonPipelineEnvironment.containerProperties?.registryUrl)
.use()
config.whitesource.cvssSeverityLimit = config.whitesource.cvssSeverityLimit == null ?: Integer.valueOf(config.whitesource.cvssSeverityLimit)
config.stashContent = utils.unstashAll(config.stashContent)
config.whitesource['projectNames'] = (config.whitesource['projectNames'] instanceof List) ? config.whitesource['projectNames'] : config.whitesource['projectNames']?.tokenize(',')
parameters.whitesource = parameters.whitesource ?: [:]
parameters.whitesource['projectNames'] = config.whitesource['projectNames']
script.commonPipelineEnvironment.setInfluxStepData('whitesource', false)
utils.pushToSWA([
step : STEP_NAME,
stepParamKey1: 'scanType',
stepParam1 : config.scanType
], config)
echo "Parameters: scanType: ${config.scanType}"
def whitesourceRepository = parameters.whitesourceRepositoryStub ?: new WhitesourceRepository(this, config)
def whitesourceOrgAdminRepository = parameters.whitesourceOrgAdminRepositoryStub ?: new WhitesourceOrgAdminRepository(this, config)
if (config.whitesource.orgAdminUserTokenCredentialsId) {
statusCode = triggerWhitesourceScanWithOrgAdminUserKey(script, config, utils, descriptorUtils, parameters, whitesourceRepository, whitesourceOrgAdminRepository)
} else {
statusCode = triggerWhitesourceScanWithUserKey(script, config, utils, descriptorUtils, parameters, whitesourceRepository, whitesourceOrgAdminRepository)
}
checkStatus(statusCode, config)
script.commonPipelineEnvironment.setInfluxStepData('whitesource', true)
}
}
private def triggerWhitesourceScanWithOrgAdminUserKey(script, config, utils, descriptorUtils, parameters, repository, orgAdminRepository) {
withCredentials ([script.string(
credentialsId: config.whitesource.orgAdminUserTokenCredentialsId,
variable: 'orgAdminUserKey'
)]) {
config.whitesource.orgAdminUserKey = orgAdminUserKey
triggerWhitesourceScanWithUserKey(script, config, utils, descriptorUtils, parameters, repository, orgAdminRepository)
}
}
private def triggerWhitesourceScanWithUserKey(script, config, utils, descriptorUtils, parameters, repository, orgAdminRepository) {
withCredentials ([string(
credentialsId: config.whitesource.userTokenCredentialsId,
variable: 'userKey'
)]) {
config.whitesource.userKey = userKey
def statusCode = 1
echo "Triggering Whitesource scan on product '${config.whitesource.productName}'${config.whitesource.productToken ? ' with token \'' + config.whitesource.productToken + '\'' : ''} using product admin credentials with ID '${config.whitesource.userTokenCredentialsId}'${config.whitesource.orgAdminUserTokenCredentialsId ? ' and organization admin credentials with ID \'' + config.whitesource.orgAdminUserTokenCredentialsId + '\'' : ''}"
if (!config.whitesource.productToken) {
def metaInfo = orgAdminRepository.fetchProductMetaInfo()
def key = "token"
if((null == metaInfo || !metaInfo[key]) && config.whitesource.createProductFromPipeline) {
metaInfo = orgAdminRepository.createProduct()
key = "productToken"
} else if(null == metaInfo || !metaInfo[key]) {
error "[WhiteSource] Could not fetch/find requested product '${config.whitesource.productName}' and automatic creation has been disabled"
}
echo "Meta Info: ${metaInfo}"
config.whitesource.productToken = metaInfo[key]
}
switch (config.scanType) {
case 'mta':
def scanJobs = [:]
def mtaParameters = [:] + parameters + [reporting: false]
// harmonize buildDescriptorExcludeList
config.buildDescriptorExcludeList = config.buildDescriptorExcludeList instanceof List ? config.buildDescriptorExcludeList : config.buildDescriptorExcludeList?.replaceAll(', ', ',').replaceAll(' ,', ',').tokenize(',')
// create job for each pom.xml with scanType: 'maven'
scanJobs.putAll(MtaMultiplexer.createJobs(
this, mtaParameters, config.buildDescriptorExcludeList, 'Whitesource', 'pom.xml', 'maven'
) { options -> return whitesourceExecuteScan(options) })
// create job for each pom.xml with scanType: 'maven'
scanJobs.putAll(MtaMultiplexer.createJobs(
this, mtaParameters, config.buildDescriptorExcludeList, 'Whitesource', 'package.json', 'npm'
) { options -> whitesourceExecuteScan(options) })
// create job for each setup.py with scanType: 'pip'
scanJobs.putAll(MtaMultiplexer.createJobs(
this, mtaParameters, config.buildDescriptorExcludeList, 'Whitesource', 'setup.py', 'pip'
) { options -> whitesourceExecuteScan(options) })
// execute scan jobs
if (config.whitesource.parallelLimit > 0 && config.whitesource.parallelLimit < scanJobs.keySet().size()) {
// block wise
def scanJobsAll = scanJobs
scanJobs = [failFast: false]
for (int i = 1; i <= scanJobsAll.keySet().size(); i++) {
def index = i - 1
def key = scanJobsAll.keySet()[index]
scanJobs[key] = scanJobsAll[key]
if (i % config.whitesource.parallelLimit == 0 || i == scanJobsAll.keySet().size()) {
parallel scanJobs
scanJobs = [failFast: false]
}
}
} else {
// in parallel
scanJobs += [failFast: false]
parallel scanJobs
}
statusCode = 0
break
default:
def path = config.buildDescriptorFile ? config.buildDescriptorFile.substring(0, config.buildDescriptorFile.lastIndexOf('/') + 1) : './'
resolveProjectIdentifiers(script, descriptorUtils, config)
def projectName = "${config.whitesource.projectName}${config.whitesource.productVersion?' - ':''}${config.whitesource.productVersion?:''}".toString()
if(!config.whitesource['projectNames'].contains(projectName))
config.whitesource['projectNames'].add(projectName)
//share projectNames with other steps
if(!script.commonPipelineEnvironment.getValue('whitesourceProjectNames').contains(projectName))
script.commonPipelineEnvironment.getValue('whitesourceProjectNames').add(projectName)
WhitesourceConfigurationHelper.extendUAConfigurationFile(script, utils, config, path)
dockerExecute(
script: script,
dockerImage: config.dockerImage,
dockerEnvVars: config.dockerEnvVars,
dockerOptions: config.dockerOptions,
dockerWorkspace: config.dockerWorkspace,
stashContent: config.stashContent
) {
if (config.scanType == 'docker') {
containerSaveImage script: parameters.script, containerImage: config.whitesource.scanImage, containerRegistryUrl: config.whitesource.scanImageRegistryUrl
}
if (config.whitesource.agentDownloadUrl) {
def agentDownloadUrl = new GStringTemplateEngine().createTemplate(config.whitesource.agentDownloadUrl).make([config: config]).toString()
//if agentDownloadUrl empty, rely on dockerImage to contain unifiedAgent correctly set up and available
sh "curl ${script.env.HTTP_PROXY ? '--proxy ' + script.env.HTTP_PROXY + ' ' : ''}--location --output ${config.whitesource.agentFileName} ${agentDownloadUrl}".toString()
}
def javaCmd = 'java'
if (config.whitesource.jreDownloadUrl) {
//if jreDownloadUrl empty, rely on dockerImage to contain java correctly set up and available on the path
sh "curl ${script.env.HTTP_PROXY ? '--proxy ' + script.env.HTTP_PROXY + ' ' : ''}--location --output jvm.tar.gz ${config.whitesource.jreDownloadUrl} && tar --strip-components=1 -xzf jvm.tar.gz".toString()
javaCmd = './bin/java'
}
if(config.whitesource.installCommand)
sh new GStringTemplateEngine().createTemplate(config.whitesource.installCommand).make([config: config]).toString()
def options = ["-jar ${config.whitesource.agentFileName} -c \'${config.whitesource.configFilePath}\'"]
if (config.whitesource.orgToken) options.push("-apiKey '${config.whitesource.orgToken}'")
if (config.whitesource.userKey) options.push("-userKey '${config.whitesource.userKey}'")
if (config.whitesource.productName) options.push("-product '${config.whitesource.productName}'")
statusCode = sh(script: "${javaCmd} ${options.join(' ')} ${config.whitesource.agentParameters}", returnStatus: true)
if (config.whitesource.agentDownloadUrl) {
sh "rm -f ${config.whitesource.agentFileName}"
}
if (config.whitesource.jreDownloadUrl) {
sh "rm -rf ./bin ./conf ./legal ./lib ./man"
sh "rm -f jvm.tar.gz"
}
// archive whitesource result files for UA
archiveArtifacts artifacts: "whitesource/*.*", allowEmptyArchive: true
// archive whitesource debug files, if available
archiveArtifacts artifacts: "**/ws-l*", allowEmptyArchive: true
try {
// archive UA log file
sh "cp -Rf --parents /var/log/UA/* ."
archiveArtifacts artifacts: "**/var/log/UA/**/*.log", allowEmptyArchive: true
} catch (e) {
echo "Failed archiving WhiteSource UA logs"
}
}
break
}
if (config.reporting) {
analyseWhitesourceResults(config, repository)
}
return statusCode
}
}
private resolveProjectIdentifiers(script, descriptorUtils, config) {
if (!config.whitesource.projectName || !config.whitesource.productVersion) {
def gav
switch (config.scanType) {
case 'npm':
gav = descriptorUtils.getNpmGAV(config.buildDescriptorFile)
break
case 'sbt':
gav = descriptorUtils.getSbtGAV(config.buildDescriptorFile)
break
case 'pip':
gav = descriptorUtils.getPipGAV(config.buildDescriptorFile)
break
case 'golang':
gav = descriptorUtils.getGoGAV(config.buildDescriptorFile, new URI(script.commonPipelineEnvironment.getGitHttpsUrl()))
break
case 'dub':
gav = descriptorUtils.getDubGAV(config.buildDescriptorFile)
break
case 'maven':
gav = descriptorUtils.getMavenGAV(config.buildDescriptorFile)
break
}
if(!config.whitesource.projectName)
config.whitesource.projectName = "${gav.group?:''}${gav.group?'.':''}${gav.artifact}"
def versionFragments = gav.version?.tokenize('.')
def version = versionFragments?.size() > 0 ? versionFragments?.head() : null
if(version && !config.whitesource.productVersion)
config.whitesource.productVersion = version
}
}
void analyseWhitesourceResults(Map config, WhitesourceRepository repository) {
def pdfName = "whitesource-riskReport.pdf"
try {
repository.fetchReportForProduct(pdfName)
archiveArtifacts artifacts: pdfName
echo "A summary of the Whitesource findings was stored as artifact under the name ${pdfName}"
} catch (e) {
echo "[${STEP_NAME}][WARNING] Failed to fetch and archive report ${pdfName}"
}
if(config.whitesource.licensingVulnerabilities) {
def violationCount = fetchViolationCount(config, repository)
checkViolationStatus(violationCount)
}
if (config.whitesource.securityVulnerabilities)
config.whitesource.severeVulnerabilities = checkSecurityViolations(config, repository)
}
int fetchViolationCount(Map config, WhitesourceRepository repository) {
int violationCount = 0
if (config.whitesource?.projectNames) {
def projectsMeta = repository.fetchProjectsMetaInfo()
for (int i = 0; i < projectsMeta.size(); i++) {
def project = projectsMeta[i]
def responseAlertsProject = repository.fetchProjectLicenseAlerts(project.token)
violationCount += responseAlertsProject.alerts.size()
}
} else {
def responseAlerts = repository.fetchProductLicenseAlerts()
violationCount += responseAlerts.alerts.size()
}
return violationCount
}
void checkViolationStatus(int violationCount) {
if (violationCount == 0) {
echo "[${STEP_NAME}] No policy violations found"
} else {
error "[${STEP_NAME}] Whitesource found ${violationCount} policy violations for your product"
}
}
int checkSecurityViolations(Map config, WhitesourceRepository repository) {
def projectsMetaInformation = repository.fetchProjectsMetaInfo()
def vulnerabilities = repository.fetchVulnerabilities(projectsMetaInformation)
def severeVulnerabilities = 0
for(int i = 0; i < vulnerabilities.size(); i++) {
def item = vulnerabilities.get(i)
if ((item.vulnerability.score >= config.whitesource.cvssSeverityLimit || item.vulnerability.cvss3_score >= config.whitesource.cvssSeverityLimit) && config.whitesource.cvssSeverityLimit >= 0)
severeVulnerabilities++
}
writeFile(file: "${config.vulnerabilityReportFileName}.json", text: new JsonUtils().groovyObjectToPrettyJsonString(vulnerabilities))
writeFile(file: "${config.vulnerabilityReportFileName}.html", text: getReportHtml(config, vulnerabilities, severeVulnerabilities))
archiveArtifacts(artifacts: "${config.vulnerabilityReportFileName}.*")
if (vulnerabilities.size() - severeVulnerabilities > 0)
echo "[${STEP_NAME}] WARNING: ${vulnerabilities.size() - severeVulnerabilities} Open Source Software Security vulnerabilities with CVSS score below ${config.whitesource.cvssSeverityLimit} detected."
if (vulnerabilities.size() == 0)
echo "[${STEP_NAME}] No Open Source Software Security vulnerabilities detected."
return severeVulnerabilities
}
// ExitCodes: https://whitesource.atlassian.net/wiki/spaces/WD/pages/34209870/NPM+Plugin#NPMPlugin-ExitCode
void checkStatus(int statusCode, config) {
def errorMessage = ""
if(config.whitesource.securityVulnerabilities && config.whitesource.severeVulnerabilities > 0)
errorMessage += "${config.whitesource.severeVulnerabilities} Open Source Software Security vulnerabilities with CVSS score greater or equal ${config.whitesource.cvssSeverityLimit} detected. - "
if (config.whitesource.licensingVulnerabilities)
switch (statusCode) {
case 0:
break
case 255:
errorMessage += "The scan resulted in an error"
break
case 254:
errorMessage += "Whitesource found one or multiple policy violations"
break
case 253:
errorMessage += "The local scan client failed to execute the scan"
break
case 252:
errorMessage += "There was a failure in the connection to the WhiteSource servers"
break
case 251:
errorMessage += "The server failed to analyze the scan"
break
case 250:
errorMessage += "Pre-step failure"
break
default:
errorMessage += "Whitesource scan failed with unknown error code '${statusCode}'"
}
if (errorMessage)
error "[${STEP_NAME}] " + errorMessage
}
def getReportHtml(config, vulnerabilityList, numSevereVulns) {
def now = new Date().format('MMM dd, yyyy - HH:mm:ss z', TimeZone.getTimeZone('UTC'))
def vulnerabilityTable = ''
if (vulnerabilityList.size() == 0) {
vulnerabilityTable += '''
<tr>
<td colspan=12> No publicly known vulnerabilities detected </td>
</tr>'''
} else {
for (int i = 0; i < vulnerabilityList.size(); i++) {
def item = vulnerabilityList[i]
def score = item.vulnerability.cvss3_score > 0 ? item.vulnerability.cvss3_score : item.vulnerability.score
def topFix = item.vulnerability.topFix ? "${item.vulnerability.topFix?.message}<br>${item.vulnerability.topFix?.fixResolution}<br><a href=\"${item.vulnerability.topFix?.url}\">${item.vulnerability.topFix?.url}</a>}" : ''
vulnerabilityTable += """
<tr>
<td>${i + 1}</td>
<td>${item.date}</td>
<td><a href=\"${item.vulnerability.url}\">${item.vulnerability.name}</a></td>
<td class=\"${score < config.cvssSeverityLimit ? 'warn' : 'notok'}\">${score}</td>
<td>${item.vulnerability.cvss3_score > 0 ? 'v3' : 'v2'}</td>
<td>${item.project}</td>
<td>${item.library.filename}</td>
<td>${item.library.groupId}</td>
<td>${item.library.artifactId}</td>
<td>${item.library.version}</td>
<td>${item.vulnerability.description}</td>
<td>${topFix}</td>
</tr>"""
}
}
return GStringTemplateEngine.newInstance().createTemplate(libraryResource('com.sap.piper/templates/whitesourceVulnerabilities.html')).make(
[
now : now,
reportTitle : config.whitesource.vulnerabilityReportTitle,
style : config.style,
cvssSeverityLimit : config.whitesource.cvssSeverityLimit,
totalSevereVulnerabilities : numSevereVulns,
totalVulnerabilities : vulnerabilityList.size(),
vulnerabilityTable : vulnerabilityTable,
whitesourceProductName : config.whitesource.productName,
whitesourceProjectNames : config.whitesource.projectNames
]).toString()
List credentials = [
[type: 'token', id: 'orgAdminUserTokenCredentialsId', env: ['PIPER_orgToken']],
[type: 'token', id: 'userTokenCredentialsId', env: ['PIPER_userToken']],
]
piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials)
}