mirror of
https://github.com/SAP/jenkins-library.git
synced 2024-12-12 10:55:20 +02:00
Merge pull request #548 from SAP/whitesource-step
whitesourceExecuteScan: Introduce new step for OSS vulnerability scanning
This commit is contained in:
commit
e3cb31da04
@ -6,7 +6,7 @@ import java.util.regex.Matcher
|
||||
import groovy.text.StreamingTemplateEngine
|
||||
|
||||
//
|
||||
// Collects helper functions for rendering the docu
|
||||
// Collects helper functions for rendering the documentation
|
||||
//
|
||||
class TemplateHelper {
|
||||
|
||||
@ -62,15 +62,17 @@ class TemplateHelper {
|
||||
//
|
||||
class Helper {
|
||||
|
||||
static projectRoot = new File(Helper.class.protectionDomain.codeSource.location.path).getParentFile().getParentFile().getParentFile()
|
||||
|
||||
static getConfigHelper(classLoader, roots, script) {
|
||||
|
||||
def compilerConfig = new CompilerConfiguration()
|
||||
compilerConfig.setClasspathList( roots )
|
||||
|
||||
new GroovyClassLoader(classLoader, compilerConfig, true)
|
||||
.parseClass(new File('src/com/sap/piper/ConfigurationHelper.groovy'))
|
||||
.parseClass(new File(projectRoot, 'src/com/sap/piper/ConfigurationHelper.groovy'))
|
||||
.newInstance(script, [:]).loadStepDefaults()
|
||||
}
|
||||
}
|
||||
|
||||
static getPrepareDefaultValuesStep(def gse) {
|
||||
|
||||
@ -80,7 +82,7 @@ class Helper {
|
||||
m, c -> c()
|
||||
}
|
||||
prepareDefaultValuesStep.metaClass.libraryResource {
|
||||
f -> new File("resources/${f}").text
|
||||
f -> new File(projectRoot,"resources/${f}").text
|
||||
}
|
||||
prepareDefaultValuesStep.metaClass.readYaml {
|
||||
m -> new Yaml().load(m.text)
|
||||
@ -100,6 +102,7 @@ class Helper {
|
||||
|
||||
def prepareDefaultValues() {
|
||||
_prepareDefaultValuesStep()
|
||||
|
||||
}
|
||||
|
||||
def run() {
|
||||
@ -174,9 +177,10 @@ class Helper {
|
||||
boolean docu = false,
|
||||
value = false,
|
||||
mandatory = false,
|
||||
parentObject = false,
|
||||
docuEnd = false
|
||||
|
||||
def docuLines = [], valueLines = [], mandatoryLines = []
|
||||
def docuLines = [], valueLines = [], mandatoryLines = [], parentObjectLines = []
|
||||
|
||||
f.eachLine {
|
||||
line ->
|
||||
@ -197,13 +201,17 @@ class Helper {
|
||||
throw new RuntimeException('Cannot retrieve parameter for a comment')
|
||||
}
|
||||
|
||||
def _docu = [], _value = [], _mandatory = [], _parentObject = []
|
||||
docuLines.each { _docu << it }
|
||||
valueLines.each { _value << it }
|
||||
mandatoryLines.each { _mandatory << it }
|
||||
parentObjectLines.each { _parentObject << it }
|
||||
_parentObject << param
|
||||
param = _parentObject*.trim().join('/').trim()
|
||||
|
||||
if(step.parameters[param].docu || step.parameters[param].value)
|
||||
System.err << "[WARNING] There is already some documentation for parameter '${param}. Is this parameter documented twice?'\n"
|
||||
|
||||
def _docu = [], _value = [], _mandatory = []
|
||||
docuLines.each { _docu << it }
|
||||
valueLines.each { _value << it}
|
||||
mandatoryLines.each { _mandatory << it}
|
||||
step.parameters[param].docu = _docu*.trim().join(' ').trim()
|
||||
step.parameters[param].value = _value*.trim().join(' ').trim()
|
||||
step.parameters[param].mandatory = _mandatory*.trim().join(' ').trim()
|
||||
@ -211,6 +219,7 @@ class Helper {
|
||||
docuLines.clear()
|
||||
valueLines.clear()
|
||||
mandatoryLines.clear()
|
||||
parentObjectLines.clear()
|
||||
}
|
||||
|
||||
if( line.trim() ==~ /^\/\*\*.*/ ) {
|
||||
@ -227,11 +236,19 @@ class Helper {
|
||||
if(_line ==~ /.*@possibleValues.*/) {
|
||||
mandatory = false // should be something like reset attributes
|
||||
value = true
|
||||
parentObject = false
|
||||
}
|
||||
// some remark for mandatory e.g. some parameters are only mandatory under certain conditions
|
||||
if(_line ==~ /.*@mandatory.*/) {
|
||||
value = false // should be something like reset attributes ...
|
||||
mandatory = true
|
||||
parentObject = false
|
||||
}
|
||||
// grouping config properties within a parent object for easier readability
|
||||
if(_line ==~ /.*@parentConfigKey.*/) {
|
||||
value = false // should be something like reset attributes ...
|
||||
mandatory = false
|
||||
parentObject = true
|
||||
}
|
||||
|
||||
if(value) {
|
||||
@ -248,7 +265,14 @@ class Helper {
|
||||
}
|
||||
}
|
||||
|
||||
if(! value && ! mandatory) {
|
||||
if(parentObject) {
|
||||
if(_line) {
|
||||
_line = (_line =~ /.*@parentConfigKey\s*?(.*)/)[0][1]
|
||||
parentObjectLines << _line
|
||||
}
|
||||
}
|
||||
|
||||
if(!value && !mandatory && !parentObject) {
|
||||
docuLines << _line
|
||||
}
|
||||
}
|
||||
@ -257,6 +281,7 @@ class Helper {
|
||||
docu = false
|
||||
value = false
|
||||
mandatory = false
|
||||
parentObject = false
|
||||
docuEnd = true
|
||||
}
|
||||
}
|
||||
@ -289,14 +314,33 @@ class Helper {
|
||||
def params = [] as Set
|
||||
f.eachLine {
|
||||
line ->
|
||||
if( line ==~ /.*withMandatoryProperty.*/ ) {
|
||||
def param = (line =~ /.*withMandatoryProperty\('(.*)'/)[0][1]
|
||||
params << param
|
||||
}
|
||||
if (line ==~ /.*withMandatoryProperty.*/) {
|
||||
def param = (line =~ /.*withMandatoryProperty\('(.*)'/)[0][1]
|
||||
params << param
|
||||
}
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
static getParentObjectMappings(File f) {
|
||||
def mappings = [:]
|
||||
def parentObjectKey = ''
|
||||
f.eachLine {
|
||||
line ->
|
||||
if (line ==~ /.*parentConfigKey.*/ && !parentObjectKey) {
|
||||
def param = (line =~ /.*parentConfigKey\s*?(.*)/)[0][1]
|
||||
parentObjectKey = param.trim()
|
||||
} else if (line ==~ /\s*?(.*)[,]{0,1}/ && parentObjectKey) {
|
||||
def pName = retrieveParameterName(line)
|
||||
if(pName) {
|
||||
mappings.put(pName, parentObjectKey)
|
||||
parentObjectKey = ''
|
||||
}
|
||||
}
|
||||
}
|
||||
return mappings
|
||||
}
|
||||
|
||||
static getValue(Map config, def pPath) {
|
||||
def p =config[pPath.head()]
|
||||
if(pPath.size() == 1) return p // there is no tail
|
||||
@ -325,8 +369,8 @@ class Helper {
|
||||
}
|
||||
|
||||
roots = [
|
||||
'vars',
|
||||
'src',
|
||||
new File(Helper.projectRoot, "vars").getAbsolutePath(),
|
||||
new File(Helper.projectRoot, "src").getAbsolutePath()
|
||||
]
|
||||
|
||||
stepsDir = null
|
||||
@ -340,12 +384,12 @@ steps = []
|
||||
if(args.length >= 1)
|
||||
stepsDir = new File(args[0])
|
||||
|
||||
stepsDir = stepsDir ?: new File('vars')
|
||||
stepsDir = stepsDir ?: new File(Helper.projectRoot, "vars")
|
||||
|
||||
if(args.length >= 2)
|
||||
stepsDocuDir = new File(args[1])
|
||||
|
||||
stepsDocuDir = stepsDocuDir ?: new File('documentation/docs/steps')
|
||||
stepsDocuDir = stepsDocuDir ?: new File(Helper.projectRoot, "documentation/docs/steps")
|
||||
|
||||
|
||||
if(args.length >= 3)
|
||||
@ -372,7 +416,7 @@ if( !stepsDir.exists() ) {
|
||||
// sanity checks
|
||||
//
|
||||
|
||||
def gse = new GroovyScriptEngine( [ stepsDir.getName() ] as String[] , getClass().getClassLoader() )
|
||||
def gse = new GroovyScriptEngine([ stepsDir.getAbsolutePath() ] as String[], GenerateDocumentation.class.getClassLoader() )
|
||||
|
||||
//
|
||||
// find all the steps we have to document (if no step has been provided from outside)
|
||||
@ -502,6 +546,20 @@ def handleStep(stepName, prepareDefaultValuesStep, gse) {
|
||||
|
||||
params.addAll(requiredParameters)
|
||||
|
||||
// translate parameter names according to compatibility annotations
|
||||
def parentObjectMappings = Helper.getParentObjectMappings(theStep)
|
||||
def compatibleParams = [] as Set
|
||||
if(parentObjectMappings) {
|
||||
params.each {
|
||||
if (parentObjectMappings[it])
|
||||
compatibleParams.add(parentObjectMappings[it] + '/' + it)
|
||||
else
|
||||
compatibleParams.add(it)
|
||||
}
|
||||
if (compatibleParams)
|
||||
params = compatibleParams
|
||||
}
|
||||
|
||||
def step = [parameters:[:]]
|
||||
|
||||
//
|
||||
@ -533,14 +591,14 @@ def handleStep(stepName, prepareDefaultValuesStep, gse) {
|
||||
required: requiredParameters.contains((it as String)) && defaultValue == null
|
||||
]
|
||||
|
||||
step.parameters.put(it, parameterProperties)
|
||||
step.parameters.put(it, parameterProperties)
|
||||
|
||||
// The scope is only defined for the first level of a hierarchical configuration.
|
||||
// If the first part is found, all nested parameters are allowed with that scope.
|
||||
def firstPart = it.split('/').head()
|
||||
scopedParameters.each { key, val ->
|
||||
parameterProperties.put(key, val.contains(firstPart))
|
||||
}
|
||||
// The scope is only defined for the first level of a hierarchical configuration.
|
||||
// If the first part is found, all nested parameters are allowed with that scope.
|
||||
def firstPart = it.split('/').head()
|
||||
scopedParameters.each { key, val ->
|
||||
parameterProperties.put(key, val.contains(firstPart))
|
||||
}
|
||||
}
|
||||
|
||||
Helper.scanDocu(theStep, step)
|
||||
|
23
documentation/docs/steps/whitesourceExecuteScan.md
Normal file
23
documentation/docs/steps/whitesourceExecuteScan.md
Normal file
@ -0,0 +1,23 @@
|
||||
# ${docGenStepName}
|
||||
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Your company has registered an account with WhiteSource and you have enabled the use of so called `User Keys` to manage
|
||||
access to your organization in WhiteSource via dedicated privileges. Scanning your products without adequate user level
|
||||
access protection imposed on the WhiteSource backend would simply allow access based on the organization token.
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Exceptions
|
||||
|
||||
None
|
||||
|
||||
## Examples
|
||||
|
||||
```groovy
|
||||
whitesourceExecuteScan script: this, scanType: 'pip', productName: 'My Whitesource Product', userTokenCredentialsId: 'companyAdminToken', orgAdminUserTokenCredentialsId: 'orgAdminToken', orgToken: 'myWhitesourceOrganizationToken'
|
||||
```
|
@ -38,6 +38,7 @@ nav:
|
||||
- transportRequestRelease: steps/transportRequestRelease.md
|
||||
- transportRequestUploadFile: steps/transportRequestUploadFile.md
|
||||
- uiVeri5ExecuteTests: steps/uiVeri5ExecuteTests.md
|
||||
- whitesourceExecuteScan: steps/whitesourceExecuteScan.md
|
||||
- 'Scenarios':
|
||||
- 'Build and Deploy Hybrid Applications with Jenkins and SAP Solution Manager': scenarios/changeManagement.md
|
||||
- 'Build and Deploy SAP UI5 or SAP Fiori Applications on SAP Cloud Platform with Jenkins': scenarios/ui5-sap-cp/Readme.md
|
||||
|
@ -0,0 +1,41 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>${reportTitle}</title>
|
||||
<style type="text/css">${style}</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>${reportTitle}</h1>
|
||||
<h2>
|
||||
<span>
|
||||
<i>WhiteSource product name: ${whitesourceProductName}</i><br />
|
||||
<i>Filtered project names: ${whitesourceProjectNames?:''}</i>
|
||||
</span>
|
||||
</h2>
|
||||
<div>
|
||||
<h3> total number of vulnerabilities: ${totalVulnerabilities}<br />
|
||||
total number of high/critical vulnerabilities with CVSS score >= ${cvssSeverityLimit}: ${totalSevereVulnerabilities}
|
||||
</h3>
|
||||
</div>
|
||||
<p>Snapshot taken:${now}</p>
|
||||
<table>
|
||||
<tr>
|
||||
<th>Entry #</th>
|
||||
<th>Date</th>
|
||||
<th>CVE</th>
|
||||
<th>CVSS Score</th>
|
||||
<th>CVSS Version</th>
|
||||
<th>Project</th>
|
||||
<th>Library file name</th>
|
||||
<th>Library group ID</th>
|
||||
<th>Library artifact ID</th>
|
||||
<th>Library version</th>
|
||||
<th>Description</th>
|
||||
<th>Top fix</th>
|
||||
</tr>
|
||||
|
||||
${vulnerabilityTable}
|
||||
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
@ -45,6 +45,8 @@ general:
|
||||
# fsGroup: 1000
|
||||
manualConfirmation: true
|
||||
productiveBranch: 'master'
|
||||
whitesource:
|
||||
serviceUrl: 'https://saas.whitesourcesoftware.com/api'
|
||||
|
||||
#Steps Specific Configuration
|
||||
steps:
|
||||
@ -282,6 +284,69 @@ steps:
|
||||
- 'tests'
|
||||
npmExecute:
|
||||
dockerImage: 'node:8-stretch'
|
||||
whitesourceExecuteScan:
|
||||
createProductFromPipeline: true
|
||||
emailAddressesOfInitialProductAdmins: []
|
||||
buildDescriptorExcludeList: []
|
||||
parallelLimit: 15
|
||||
licensingVulnerabilities: true
|
||||
securityVulnerabilities: true
|
||||
cvssSeverityLimit: -1
|
||||
reporting: true
|
||||
vulnerabilityReportFileName: 'piper_whitesource_vulnerability_report'
|
||||
vulnerabilityReportTitle: 'WhiteSource Security Vulnerability Report'
|
||||
projectNames: []
|
||||
jreDownloadUrl: 'https://github.com/SAP/SapMachine/releases/download/sapmachine-11.0.2/sapmachine-jre-11.0.2_linux-x64_bin.tar.gz'
|
||||
agentFileName: 'wss-unified-agent.jar'
|
||||
agentDownloadUrl: 'https://github.com/whitesource/unified-agent-distribution/raw/master/standAlone/${config.agentFileName}'
|
||||
agentParameters: ''
|
||||
configFilePath: './wss-unified-agent.config'
|
||||
mta:
|
||||
stashContent:
|
||||
- 'buildDescriptor'
|
||||
- 'opensourceConfiguration'
|
||||
maven:
|
||||
buildDescriptorFile: './pom.xml'
|
||||
dockerImage: 'maven:3.5-jdk-8'
|
||||
dockerWorkspace: '/home/java'
|
||||
stashContent:
|
||||
- 'buildDescriptor'
|
||||
- 'opensourceConfiguration'
|
||||
npm:
|
||||
buildDescriptorFile: './package.json'
|
||||
dockerImage: 'node:8-stretch'
|
||||
dockerWorkspace: '/home/node'
|
||||
stashContent:
|
||||
- 'buildDescriptor'
|
||||
- 'opensourceConfiguration'
|
||||
pip:
|
||||
buildDescriptorFile: './setup.py'
|
||||
dockerImage: 'python:3.7.2-stretch'
|
||||
dockerWorkspace: '/home/python'
|
||||
stashContent:
|
||||
- 'buildDescriptor'
|
||||
- 'opensourceConfiguration'
|
||||
golang:
|
||||
buildDescriptorFile: './Gopkg.toml'
|
||||
dockerImage: 'golang:1.12-stretch'
|
||||
dockerWorkspace: '/home/dep'
|
||||
stashContent:
|
||||
- 'buildDescriptor'
|
||||
- 'opensourceConfiguration'
|
||||
additionalInstallCommand: >-
|
||||
curl --fail https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
|
||||
&& mkdir -p \$GOPATH/src/${config.whitesource.projectName.substring(0, config.whitesource.projectName.lastIndexOf('/'))}
|
||||
&& ln -s \$(pwd) \$GOPATH/src/${config.whitesource.projectName}
|
||||
&& cd \$GOPATH/src/${config.whitesource.projectName} && dep ensure
|
||||
sbt:
|
||||
buildDescriptorFile: './build.sbt'
|
||||
dockerImage: 'hseeberger/scala-sbt:8u181_2.12.8_1.2.8'
|
||||
dockerWorkspace: '/home/scala'
|
||||
stashContent:
|
||||
- 'buildDescriptor'
|
||||
- 'opensourceConfiguration'
|
||||
verbose: false
|
||||
timeout: 0
|
||||
pipelineRestartSteps:
|
||||
sendMail: true
|
||||
timeoutInSeconds: 900
|
||||
@ -297,11 +362,11 @@ steps:
|
||||
noDefaultExludes: []
|
||||
pipelineStashFilesBeforeBuild:
|
||||
stashIncludes:
|
||||
buildDescriptor: '**/pom.xml, **/.mvn/**, **/assembly.xml, **/.swagger-codegen-ignore, **/package.json, **/requirements.txt, **/setup.py, **/whitesource_config.py, **/mta*.y*ml, **/.npmrc, **/whitesource.*.json, **/whitesource-fs-agent.config, Dockerfile, **/VERSION, **/version.txt, **/build.sbt, **/sbtDescriptor.json, **/project/*'
|
||||
buildDescriptor: '**/pom.xml, **/.mvn/**, **/assembly.xml, **/.swagger-codegen-ignore, **/package.json, **/requirements.txt, **/setup.py, **/mta*.y*ml, **/.npmrc, Dockerfile, **/VERSION, **/version.txt, **/Gopkg.*, **/build.sbt, **/sbtDescriptor.json, **/project/*'
|
||||
deployDescriptor: '**/manifest*.y*ml, **/*.mtaext.y*ml, **/*.mtaext, **/xs-app.json, helm/**, *.y*ml'
|
||||
git: '.git/**'
|
||||
opa5: '**/*.*'
|
||||
opensourceConfiguration: '**/srcclr.yml, **/vulas-custom.properties, **/.nsprc, **/.retireignore, **/.retireignore.json, **/.snyk'
|
||||
opensourceConfiguration: '**/srcclr.yml, **/vulas-custom.properties, **/.nsprc, **/.retireignore, **/.retireignore.json, **/.snyk, **/wss-unified-agent.config, **/vendor/**/*'
|
||||
pipelineConfigAndTests: '.pipeline/**'
|
||||
securityDescriptor: '**/xs-security.json'
|
||||
tests: '**/pom.xml, **/*.json, **/*.xml, **/src/**, **/node_modules/**, **/specs/**, **/env/**, **/*.js, **/tests/**'
|
||||
|
60
resources/piper-os.css
Normal file
60
resources/piper-os.css
Normal file
@ -0,0 +1,60 @@
|
||||
body {
|
||||
font-family: Arial, Verdana;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
div.code {
|
||||
font-family: "Courier New", "Lucida Console";
|
||||
}
|
||||
th {
|
||||
border-top: 1px solid #ddd;
|
||||
}
|
||||
th, td {
|
||||
padding: 12px;
|
||||
text-align: left;
|
||||
border-bottom: 1px solid #ddd;
|
||||
border-right: 1px solid #ddd;
|
||||
}
|
||||
tr:nth-child(even) {
|
||||
background-color: #f2f2f2;
|
||||
}
|
||||
.bold {
|
||||
font-weight: bold;
|
||||
}
|
||||
.nobullets {
|
||||
list-style-type:none;
|
||||
padding-left: 0;
|
||||
padding-bottom: 0;
|
||||
margin: 0;
|
||||
}
|
||||
.notok {
|
||||
background-color: #ffe5e5;
|
||||
padding: 5px
|
||||
}
|
||||
.warn {
|
||||
background-color: #ffff99;
|
||||
padding: 5px
|
||||
}
|
||||
.ok {
|
||||
background-color: #e1f5a9;
|
||||
padding: 5px
|
||||
}
|
||||
.green{
|
||||
color: olivedrab;
|
||||
}
|
||||
.red{
|
||||
color: orangered;
|
||||
}
|
||||
.risk-yellow{
|
||||
padding: 5px;
|
||||
color: rgba(255, 255, 0, 0.6);
|
||||
}
|
||||
.risk-grey{
|
||||
background-color: rgba(212, 212, 212, 0.7);
|
||||
padding: 5px;
|
||||
}
|
||||
.risk-black{
|
||||
background-color: rgba(0, 0, 0, 0.75);
|
||||
padding: 5px;
|
||||
}
|
@ -5,14 +5,17 @@ import com.cloudbees.groovy.cps.NonCPS
|
||||
@API
|
||||
class ConfigurationHelper implements Serializable {
|
||||
|
||||
def static SEPARATOR = '/'
|
||||
|
||||
static ConfigurationHelper newInstance(Script step, Map config = [:]) {
|
||||
new ConfigurationHelper(step, config)
|
||||
}
|
||||
|
||||
ConfigurationHelper loadStepDefaults() {
|
||||
ConfigurationHelper loadStepDefaults(Map compatibleParameters = [:]) {
|
||||
this.step.prepareDefaultValues()
|
||||
this.config = ConfigurationLoader.defaultGeneralConfiguration()
|
||||
mixin(ConfigurationLoader.defaultStepConfiguration(null, name))
|
||||
mixin(ConfigurationLoader.defaultGeneralConfiguration(), null, compatibleParameters)
|
||||
mixin(ConfigurationLoader.defaultStepConfiguration(null, name), null, compatibleParameters)
|
||||
}
|
||||
|
||||
private Map config
|
||||
@ -33,8 +36,8 @@ class ConfigurationHelper implements Serializable {
|
||||
}
|
||||
|
||||
ConfigurationHelper mixinGeneralConfig(commonPipelineEnvironment, Set filter = null, Map compatibleParameters = [:]){
|
||||
Map stepConfiguration = ConfigurationLoader.generalConfiguration([commonPipelineEnvironment: commonPipelineEnvironment])
|
||||
return mixin(stepConfiguration, filter, compatibleParameters)
|
||||
Map generalConfiguration = ConfigurationLoader.generalConfiguration([commonPipelineEnvironment: commonPipelineEnvironment])
|
||||
return mixin(generalConfiguration, filter, compatibleParameters)
|
||||
}
|
||||
|
||||
ConfigurationHelper mixinStageConfig(commonPipelineEnvironment, stageName, Set filter = null, Map compatibleParameters = [:]){
|
||||
@ -58,21 +61,24 @@ class ConfigurationHelper implements Serializable {
|
||||
return this
|
||||
}
|
||||
|
||||
private Map handleCompatibility(Map compatibleParameters, String paramStructure = '', Map configMap ) {
|
||||
private Map handleCompatibility(Map compatibleParameters, String paramStructure = '', Map configMap, Map newConfigMap = [:] ) {
|
||||
Map newConfig = [:]
|
||||
compatibleParameters.each {entry ->
|
||||
if (entry.getValue() instanceof Map) {
|
||||
paramStructure = (paramStructure ? paramStructure + '.' : '') + entry.getKey()
|
||||
newConfig[entry.getKey()] = handleCompatibility(entry.getValue(), paramStructure, configMap)
|
||||
def internalParamStructure = (paramStructure ? paramStructure + '.' : '') + entry.getKey()
|
||||
newConfig[entry.getKey()] = handleCompatibility(entry.getValue(), internalParamStructure, configMap, newConfig)
|
||||
} else {
|
||||
def configSubMap = configMap
|
||||
for(String key in paramStructure.tokenize('.')){
|
||||
configSubMap = configSubMap?.get(key)
|
||||
}
|
||||
if (configSubMap == null || (configSubMap != null && configSubMap[entry.getKey()] == null)) {
|
||||
newConfig[entry.getKey()] = configMap[entry.getValue()]
|
||||
def paramName = (paramStructure ? paramStructure + '.' : '') + entry.getKey()
|
||||
if (configMap[entry.getValue()] != null) {
|
||||
def value = configMap[entry.getValue()]
|
||||
if(null == value)
|
||||
value = newConfigMap[entry.getValue()]
|
||||
if (value != null) {
|
||||
newConfig[entry.getKey()] = value
|
||||
def paramName = (paramStructure ? paramStructure + '.' : '') + entry.getKey()
|
||||
this.step.echo ("[INFO] The parameter '${entry.getValue()}' is COMPATIBLE to the parameter '${paramName}'")
|
||||
}
|
||||
}
|
||||
@ -84,9 +90,16 @@ class ConfigurationHelper implements Serializable {
|
||||
Map dependingOn(dependentKey){
|
||||
return [
|
||||
mixin: {key ->
|
||||
def parts = tokenizeKey(key)
|
||||
def targetMap = config
|
||||
if(parts.size() > 1) {
|
||||
key = parts.last()
|
||||
parts.remove(key)
|
||||
targetMap = getConfigPropertyNested(config, (parts as Iterable).join(SEPARATOR))
|
||||
}
|
||||
def dependentValue = config[dependentKey]
|
||||
if(config[key] == null && dependentValue && config[dependentValue])
|
||||
config[key] = config[dependentValue][key]
|
||||
if(targetMap[key] == null && dependentValue && config[dependentValue])
|
||||
targetMap[key] = config[dependentValue][key]
|
||||
return this
|
||||
}
|
||||
]
|
||||
@ -123,26 +136,28 @@ class ConfigurationHelper implements Serializable {
|
||||
|
||||
/* private */ static getConfigPropertyNested(Map config, key) {
|
||||
|
||||
def separator = '/'
|
||||
List parts = tokenizeKey(key)
|
||||
|
||||
// reason for cast to CharSequence: String#tokenize(./.) causes a deprecation warning.
|
||||
List parts = (key in String) ? (key as CharSequence).tokenize(separator) : ([key] as List)
|
||||
if (config[parts.head()] != null) {
|
||||
|
||||
if(config[parts.head()] != null) {
|
||||
|
||||
if(config[parts.head()] in Map && ! parts.tail().isEmpty()) {
|
||||
return getConfigPropertyNested(config[parts.head()], (parts.tail() as Iterable).join(separator))
|
||||
if (config[parts.head()] in Map && !parts.tail().isEmpty()) {
|
||||
return getConfigPropertyNested(config[parts.head()], (parts.tail() as Iterable).join(SEPARATOR))
|
||||
}
|
||||
|
||||
if (config[parts.head()].class == String) {
|
||||
return (config[parts.head()] as String).trim()
|
||||
}
|
||||
}
|
||||
|
||||
return config[parts.head()]
|
||||
}
|
||||
|
||||
private void existsMandatoryProperty(key, errorMessage) {
|
||||
/* private */ static tokenizeKey(String key) {
|
||||
// reason for cast to CharSequence: String#tokenize(./.) causes a deprecation warning.
|
||||
List parts = (key in String) ? (key as CharSequence).tokenize(SEPARATOR) : ([key] as List)
|
||||
return parts
|
||||
}
|
||||
|
||||
private void existsMandatoryProperty(key, errorMessage) {
|
||||
|
||||
def paramValue = getConfigPropertyNested(config, key)
|
||||
|
||||
|
128
src/com/sap/piper/DescriptorUtils.groovy
Normal file
128
src/com/sap/piper/DescriptorUtils.groovy
Normal file
@ -0,0 +1,128 @@
|
||||
package com.sap.piper
|
||||
|
||||
import com.cloudbees.groovy.cps.NonCPS
|
||||
import groovy.transform.Field
|
||||
|
||||
import java.util.regex.Matcher
|
||||
import java.util.regex.Pattern
|
||||
|
||||
@Field
|
||||
def name = Pattern.compile("(.*)name=['\"](.*?)['\"](.*)", Pattern.DOTALL)
|
||||
@Field
|
||||
def version = Pattern.compile("(.*)version=['\"](.*?)['\"](.*)", Pattern.DOTALL)
|
||||
@Field
|
||||
def method = Pattern.compile("(.*)\\(\\)", Pattern.DOTALL)
|
||||
|
||||
def getMavenGAV(file = 'pom.xml') {
|
||||
def result = [:]
|
||||
def descriptor = readMavenPom(file: file)
|
||||
def group = descriptor.getGroupId()
|
||||
def artifact = descriptor.getArtifactId()
|
||||
def version = descriptor.getVersion()
|
||||
result['packaging'] = descriptor.getPackaging()
|
||||
result['group'] = (null != group && group.length() > 0) ? group : sh(returnStdout: true, script: "mvn -f ${file} help:evaluate -Dexpression=project.groupId | grep -Ev '(^\\s*\\[|Download|Java\\w+:)'").trim()
|
||||
result['artifact'] = (null != artifact && artifact.length() > 0) ? artifact : sh(returnStdout: true, script: "mvn -f ${file} help:evaluate -Dexpression=project.artifactId | grep -Ev '(^\\s*\\[|Download|Java\\w+:)'").trim()
|
||||
result['version'] = (null != version && version.length() > 0) ? version : sh(returnStdout: true, script: "mvn -f ${file} help:evaluate -Dexpression=project.version | grep ^[0-9].*").trim()
|
||||
echo "loaded ${result} from ${file}"
|
||||
return result
|
||||
}
|
||||
|
||||
def getNpmGAV(file = 'package.json') {
|
||||
def result = [:]
|
||||
def descriptor = readJSON(file: file)
|
||||
|
||||
if (descriptor.name.startsWith('@')) {
|
||||
def packageNameArray = descriptor.name.split('/')
|
||||
if (packageNameArray.length != 2)
|
||||
error "Unable to parse package name '${descriptor.name}'"
|
||||
result['group'] = packageNameArray[0]
|
||||
result['artifact'] = packageNameArray[1]
|
||||
} else {
|
||||
result['group'] = ''
|
||||
result['artifact'] = descriptor.name
|
||||
}
|
||||
result['version'] = descriptor.version
|
||||
echo "loaded ${result} from ${file}"
|
||||
return result
|
||||
}
|
||||
|
||||
def getDlangGAV(file = 'dub.json') {
|
||||
def result = [:]
|
||||
def descriptor = readJSON(file: file)
|
||||
|
||||
result['group'] = 'com.sap.dlang'
|
||||
result['artifact'] = descriptor.name
|
||||
result['version'] = descriptor.version
|
||||
result['packaging'] = 'tar.gz'
|
||||
echo "loaded ${result} from ${file}"
|
||||
return result
|
||||
}
|
||||
|
||||
def getSbtGAV(file = 'sbtDescriptor.json') {
|
||||
def result = [:]
|
||||
def descriptor = readJSON(file: file)
|
||||
|
||||
result['group'] = descriptor.group
|
||||
result['artifact'] = descriptor.artifactId
|
||||
result['version'] = descriptor.version
|
||||
result['packaging'] = descriptor.packaging
|
||||
echo "loaded ${result} from ${file}"
|
||||
return result
|
||||
}
|
||||
|
||||
def getPipGAV(file = 'setup.py') {
|
||||
def result = [:]
|
||||
def descriptor = readFile(file: file)
|
||||
|
||||
result['group'] = ''
|
||||
result['packaging'] = ''
|
||||
result['artifact'] = matches(name, descriptor)
|
||||
result['version'] = matches(version, descriptor)
|
||||
|
||||
if (result['version'] == '' || matches(method, result['version'])) {
|
||||
file = file.replace('setup.py', 'version.txt')
|
||||
result['version'] = getVersionFromFile(file)
|
||||
}
|
||||
|
||||
echo "loaded ${result} from ${file}"
|
||||
return result
|
||||
}
|
||||
|
||||
def getGoGAV(file = 'Gopkg.toml', URI repoUrl) {
|
||||
def name = "${repoUrl.getHost()}${repoUrl.getPath().replaceAll(/\.git/, '')}"
|
||||
def path = file.substring(0, file.lastIndexOf('/') + 1)
|
||||
def module = path?.replaceAll(/\./, '')?.replaceAll('/', '')
|
||||
def result = [:]
|
||||
|
||||
result['group'] = ''
|
||||
result['packaging'] = ''
|
||||
result['artifact'] = "${name}${module?'.':''}${module?:''}".toString()
|
||||
file = path + 'version.txt'
|
||||
result['version'] = getVersionFromFile(file)
|
||||
|
||||
if (!result['version']) {
|
||||
file = path + 'VERSION'
|
||||
result['version'] = getVersionFromFile(file)
|
||||
}
|
||||
|
||||
echo "loaded ${result} from ${file}"
|
||||
return result
|
||||
}
|
||||
|
||||
private getVersionFromFile(file) {
|
||||
try {
|
||||
def versionString = readFile(file: file)
|
||||
if (versionString) {
|
||||
return versionString.trim()
|
||||
}
|
||||
} catch (java.nio.file.NoSuchFileException e) {
|
||||
echo "Failed to load version string from file ${file} due to ${e}"
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
private def matches(regex, input) {
|
||||
def m = new Matcher(regex, input)
|
||||
return m.matches() ? m.group(2) : ''
|
||||
}
|
@ -3,6 +3,11 @@ package com.sap.piper
|
||||
import com.cloudbees.groovy.cps.NonCPS
|
||||
|
||||
@NonCPS
|
||||
String getPrettyJsonString(object) {
|
||||
String groovyObjectToPrettyJsonString(object) {
|
||||
return groovy.json.JsonOutput.prettyPrint(groovy.json.JsonOutput.toJson(object))
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
def jsonStringToGroovyObject(text) {
|
||||
return new groovy.json.JsonSlurperClassic().parseText(text)
|
||||
}
|
||||
|
129
src/com/sap/piper/WhitesourceConfigurationHelper.groovy
Normal file
129
src/com/sap/piper/WhitesourceConfigurationHelper.groovy
Normal file
@ -0,0 +1,129 @@
|
||||
package com.sap.piper
|
||||
|
||||
import com.cloudbees.groovy.cps.NonCPS
|
||||
|
||||
class WhitesourceConfigurationHelper implements Serializable {
|
||||
|
||||
static def extendUAConfigurationFile(script, utils, config, path) {
|
||||
def mapping = []
|
||||
def parsingClosure = { fileReadPath -> return script.readProperties (file: fileReadPath) }
|
||||
def serializationClosure = { configuration -> serializeUAConfig(configuration) }
|
||||
def inputFile = config.whitesource.configFilePath.replaceFirst('\\./', '')
|
||||
def suffix = utils.generateSha1(config.whitesource.configFilePath)
|
||||
def targetFile = "${inputFile}.${suffix}"
|
||||
if(config.whitesource.productName.startsWith('DIST - ')) {
|
||||
mapping += [
|
||||
[name: 'checkPolicies', value: false, force: true],
|
||||
[name: 'forceCheckAllDependencies', value: false, force: true]
|
||||
]
|
||||
} else {
|
||||
mapping += [
|
||||
[name: 'checkPolicies', value: true, force: true],
|
||||
[name: 'forceCheckAllDependencies', value: true, force: true]
|
||||
]
|
||||
}
|
||||
if(config.verbose)
|
||||
mapping += [name: 'log.level', value: 'debug']
|
||||
|
||||
mapping += [
|
||||
[name: 'apiKey', value: config.whitesource.orgToken, force: true],
|
||||
[name: 'productName', value: config.whitesource.productName, force: true],
|
||||
[name: 'productVersion', value: config.whitesource.productVersion?:'', force: true],
|
||||
[name: 'projectName', value: config.whitesource.projectName, force: true],
|
||||
[name: 'projectVersion', value: config.whitesource.productVersion?:'', force: true],
|
||||
[name: 'productToken', value: config.whitesource.productToken, omitIfPresent: 'projectToken', force: true],
|
||||
[name: 'userKey', value: config.whitesource.userKey, force: true],
|
||||
[name: 'forceUpdate', value: true, force: true],
|
||||
[name: 'offline', value: false, force: true],
|
||||
[name: 'ignoreSourceFiles', value: true, force: true],
|
||||
[name: 'resolveAllDependencies', value: false, force: true],
|
||||
[name: 'failErrorLevel', value: 'ALL', force: true],
|
||||
[name: 'case.sensitive.glob', value: false],
|
||||
[name: 'followSymbolicLinks', value: true]
|
||||
]
|
||||
|
||||
switch (config.scanType) {
|
||||
case 'pip':
|
||||
mapping += [
|
||||
[name: 'python.resolveDependencies', value: true, force: true],
|
||||
[name: 'python.ignoreSourceFiles', value: true, force: true],
|
||||
[name: 'python.ignorePipInstallErrors', value: false],
|
||||
[name: 'python.installVirtualenv', value: true],
|
||||
[name: 'python.resolveHierarchyTree', value: true],
|
||||
[name: 'python.requirementsFileIncludes', value: 'requirements.txt'],
|
||||
[name: 'python.resolveSetupPyFiles', value: true],
|
||||
[name: 'python.runPipenvPreStep', value: true],
|
||||
[name: 'python.pipenvDevDependencies', value: true],
|
||||
[name: 'python.IgnorePipenvInstallErrors', value: false],
|
||||
[name: 'includes', value: '**/*.py **/*.txt'],
|
||||
[name: 'excludes', value: '**/*sources.jar **/*javadoc.jar']
|
||||
]
|
||||
break
|
||||
case 'sbt':
|
||||
mapping += [
|
||||
[name: 'sbt.resolveDependencies', value: true, force: true],
|
||||
[name: 'sbt.ignoreSourceFiles', value: true, force: true],
|
||||
[name: 'sbt.aggregateModules', value: false, force: true],
|
||||
[name: 'sbt.runPreStep', value: true],
|
||||
[name: 'includes', value: '**/*.jar'],
|
||||
[name: 'excludes', value: '**/*sources.jar **/*javadoc.jar']
|
||||
]
|
||||
break
|
||||
case 'golang':
|
||||
mapping += [
|
||||
[name: 'go.resolveDependencies', value: true, force: true],
|
||||
[name: 'go.ignoreSourceFiles', value: true, force: true],
|
||||
[name: 'go.collectDependenciesAtRuntime', value: false],
|
||||
[name: 'go.dependencyManager', value: 'dep'],
|
||||
[name: 'includes', value: '**/*.lock'],
|
||||
[name: 'excludes', value: '**/*sources.jar **/*javadoc.jar']
|
||||
]
|
||||
break
|
||||
default:
|
||||
script.echo "[Warning][Whitesource] Configuration for scanType: '${config.scanType}' is not yet hardened, please do a quality assessment of your scan results."
|
||||
}
|
||||
|
||||
rewriteConfiguration(script, utils, config, mapping, suffix, path, inputFile, targetFile, parsingClosure, serializationClosure)
|
||||
}
|
||||
|
||||
static private def rewriteConfiguration(script, utils, config, mapping, suffix, path, inputFile, targetFile, parsingClosure, serializationClosure) {
|
||||
def inputFilePath = "${path}${inputFile}"
|
||||
def outputFilePath = "${path}${targetFile}"
|
||||
def moduleSpecificFile = parsingClosure(inputFilePath)
|
||||
if (!moduleSpecificFile && inputFilePath != config.whitesource.configFilePath)
|
||||
moduleSpecificFile = parsingClosure(config.whitesource.configFilePath)
|
||||
if (!moduleSpecificFile)
|
||||
moduleSpecificFile = [:]
|
||||
|
||||
mapping.each {
|
||||
entry ->
|
||||
def dependentValue = entry.omitIfPresent ? moduleSpecificFile[entry.omitIfPresent] : null
|
||||
if ((entry.omitIfPresent && !dependentValue || !entry.omitIfPresent) && (entry.force || moduleSpecificFile[entry.name] == null) && entry.value != 'null')
|
||||
moduleSpecificFile[entry.name] = entry.value.toString()
|
||||
}
|
||||
|
||||
def output = serializationClosure(moduleSpecificFile)
|
||||
|
||||
if(config.verbose)
|
||||
script.echo "Writing config file ${outputFilePath} with content:\n${output}"
|
||||
script.writeFile file: outputFilePath, text: output
|
||||
if(config.stashContent && config.stashContent.size() > 0) {
|
||||
def stashName = "modified whitesource config ${suffix}".toString()
|
||||
utils.stashWithMessage (
|
||||
stashName,
|
||||
"Stashing modified Whitesource configuration",
|
||||
outputFilePath.replaceFirst('\\./', '')
|
||||
)
|
||||
config.stashContent += [stashName]
|
||||
}
|
||||
config.whitesource.configFilePath = outputFilePath
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
static private def serializeUAConfig(configuration) {
|
||||
Properties p = new Properties()
|
||||
p.putAll(configuration)
|
||||
|
||||
new StringWriter().with{ w -> p.store(w, null); w }.toString()
|
||||
}
|
||||
}
|
@ -0,0 +1,108 @@
|
||||
package com.sap.piper.integration
|
||||
|
||||
import com.cloudbees.groovy.cps.NonCPS
|
||||
import com.sap.piper.JsonUtils
|
||||
|
||||
class WhitesourceOrgAdminRepository implements Serializable {
|
||||
|
||||
final Script script
|
||||
final internalWhitesource
|
||||
final Map config
|
||||
|
||||
WhitesourceOrgAdminRepository(Script script, Map config) {
|
||||
this.script = script
|
||||
this.config = config
|
||||
if(!this.config.whitesource?.serviceUrl && !this.config.whitesourceAccessor)
|
||||
script.error "Parameter 'whitesource.serviceUrl' must be provided as part of the configuration."
|
||||
if(this.config.whitesourceAccessor instanceof String) {
|
||||
def clazz = this.class.classLoader.loadClass(this.config.whitesourceAccessor)
|
||||
this.internalWhitesource = clazz?.newInstance(this.script, this.config)
|
||||
}
|
||||
}
|
||||
|
||||
def fetchProductMetaInfo() {
|
||||
def requestBody = [
|
||||
requestType: "getOrganizationProductVitals",
|
||||
orgToken: config.whitesource.orgToken
|
||||
]
|
||||
def parsedResponse = issueHttpRequest(requestBody)
|
||||
|
||||
findProductMeta(parsedResponse)
|
||||
}
|
||||
|
||||
def findProductMeta(parsedResponse) {
|
||||
def foundMetaProduct = null
|
||||
for (product in parsedResponse.productVitals) {
|
||||
if (product.name == config.whitesource.productName) {
|
||||
foundMetaProduct = product
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return foundMetaProduct
|
||||
}
|
||||
|
||||
def createProduct() {
|
||||
def requestBody = [
|
||||
requestType: "createProduct",
|
||||
orgToken: config.whitesource.orgToken,
|
||||
productName: config.whitesource.productName
|
||||
]
|
||||
def parsedResponse = issueHttpRequest(requestBody)
|
||||
def metaInfo = parsedResponse
|
||||
|
||||
def groups = []
|
||||
def users = []
|
||||
config.whitesource.emailAddressesOfInitialProductAdmins.each {
|
||||
email -> users.add(["email": email])
|
||||
}
|
||||
|
||||
requestBody = [
|
||||
"requestType" : "setProductAssignments",
|
||||
"productToken" : metaInfo.productToken,
|
||||
"productMembership" : ["userAssignments":[], "groupAssignments":groups],
|
||||
"productAdmins" : ["userAssignments":users],
|
||||
"alertsEmailReceivers" : ["userAssignments":[]]
|
||||
]
|
||||
issueHttpRequest(requestBody)
|
||||
|
||||
return metaInfo
|
||||
}
|
||||
|
||||
def issueHttpRequest(requestBody) {
|
||||
def response = internalWhitesource ? internalWhitesource.httpWhitesource(requestBody) : httpWhitesource(requestBody)
|
||||
def parsedResponse = new JsonUtils().jsonStringToGroovyObject(response.content)
|
||||
if(parsedResponse?.errorCode){
|
||||
script.error "[WhiteSource] Request failed with error message '${parsedResponse.errorMessage}' (${parsedResponse.errorCode})."
|
||||
}
|
||||
return parsedResponse
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
protected def httpWhitesource(requestBody) {
|
||||
requestBody["userKey"] = config.whitesource.orgAdminUserKey
|
||||
def serializedBody = new JsonUtils().groovyObjectToPrettyJsonString(requestBody)
|
||||
def params = [
|
||||
url : config.whitesource.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: serializedBody,
|
||||
quiet : !config.verbose,
|
||||
timeout : config.whitesource.timeout
|
||||
]
|
||||
|
||||
if (script.env.HTTP_PROXY)
|
||||
params["httpProxy"] = script.env.HTTP_PROXY
|
||||
|
||||
if (config.verbose)
|
||||
script.echo "Sending http request with parameters ${params}"
|
||||
|
||||
def response = script.httpRequest(params)
|
||||
|
||||
if (config.verbose)
|
||||
script.echo "Received response ${response}"
|
||||
|
||||
return response
|
||||
}
|
||||
}
|
206
src/com/sap/piper/integration/WhitesourceRepository.groovy
Normal file
206
src/com/sap/piper/integration/WhitesourceRepository.groovy
Normal file
@ -0,0 +1,206 @@
|
||||
package com.sap.piper.integration
|
||||
|
||||
import com.cloudbees.groovy.cps.NonCPS
|
||||
import com.sap.piper.JsonUtils
|
||||
|
||||
class WhitesourceRepository implements Serializable {
|
||||
|
||||
final Script script
|
||||
final Map config
|
||||
|
||||
WhitesourceRepository(Script script, Map config) {
|
||||
this.script = script
|
||||
this.config = config
|
||||
|
||||
if(!config?.whitesource?.serviceUrl)
|
||||
script.error "Parameter 'whitesource.serviceUrl' must be provided as part of the configuration."
|
||||
}
|
||||
|
||||
List fetchVulnerabilities(whitesourceProjectsMetaInformation) {
|
||||
def fetchedVulnerabilities = []
|
||||
if (config.whitesource.projectNames) {
|
||||
for (int i = 0; i < whitesourceProjectsMetaInformation.size(); i++) {
|
||||
fetchSecurityAlertsPerItem(whitesourceProjectsMetaInformation[i].token, "getProjectAlertsByType", fetchedVulnerabilities)
|
||||
}
|
||||
} else {
|
||||
fetchSecurityAlertsPerItem(config.whitesource.productToken, "getProductAlertsByType", fetchedVulnerabilities)
|
||||
}
|
||||
|
||||
sortVulnerabilitiesByScore(fetchedVulnerabilities)
|
||||
|
||||
return fetchedVulnerabilities
|
||||
}
|
||||
|
||||
private fetchSecurityAlertsPerItem(token, type, List<Object> fetchedVulnerabilities) {
|
||||
def requestBody = [
|
||||
requestType : type,
|
||||
alertType : "SECURITY_VULNERABILITY",
|
||||
projectToken: token
|
||||
]
|
||||
|
||||
def response = fetchWhitesourceResource(requestBody)
|
||||
fetchedVulnerabilities.addAll(response.alerts)
|
||||
}
|
||||
|
||||
protected def fetchWhitesourceResource(Map requestBody) {
|
||||
final def response = httpWhitesource(requestBody)
|
||||
def parsedResponse = new JsonUtils().jsonStringToGroovyObject(response.content)
|
||||
|
||||
if(parsedResponse?.errorCode){
|
||||
script.error "[WhiteSource] Request failed with error message '${parsedResponse.errorMessage}' (${parsedResponse.errorCode})."
|
||||
}
|
||||
|
||||
return parsedResponse
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
void sortLibrariesAlphabeticallyGAV(List libraries) {
|
||||
script.echo "found a total of ${libraries.size()} dependencies (direct and indirect)"
|
||||
libraries.sort { o1, o2 ->
|
||||
String groupID1 = o1.groupId
|
||||
String groupID2 = o2.groupId
|
||||
def comparisionResult = groupID1 <=> groupID2;
|
||||
|
||||
if (comparisionResult != 0) {
|
||||
comparisionResult
|
||||
} else {
|
||||
String artifactID1 = o1.artifactId
|
||||
String artifactID2 = o2.artifactId
|
||||
|
||||
artifactID1 <=> artifactID2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
void sortVulnerabilitiesByScore(List vulnerabilities) {
|
||||
script.echo "${vulnerabilities.size() > 0 ? 'WARNING: ' : ''}found a total of ${vulnerabilities.size()} vulnerabilities"
|
||||
vulnerabilities.sort { o1, o2 ->
|
||||
def cvss3score1 = o1.vulnerability.cvss3_score == 0 ? o1.vulnerability.score : o1.vulnerability.cvss3_score
|
||||
def cvss3score2 = o2.vulnerability.cvss3_score == 0 ? o2.vulnerability.score : o2.vulnerability.cvss3_score
|
||||
|
||||
def comparisionResult = cvss3score1 <=> cvss3score2
|
||||
|
||||
if (comparisionResult != 0) {
|
||||
-comparisionResult
|
||||
} else {
|
||||
def score1 = o1.vulnerability.score
|
||||
def score2 = o2.vulnerability.score
|
||||
|
||||
-(score1 <=> score2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List fetchProjectsMetaInfo() {
|
||||
def projectsMetaInfo = []
|
||||
if(config.whitesource.projectNames){
|
||||
def requestBody = [
|
||||
requestType: "getProductProjectVitals",
|
||||
productToken: config.whitesource.productToken
|
||||
]
|
||||
def response = fetchWhitesourceResource(requestBody)
|
||||
|
||||
if(response?.projectVitals) {
|
||||
projectsMetaInfo.addAll(findProjectsMeta(response.projectVitals))
|
||||
} else {
|
||||
script.error "[WhiteSource] Could not fetch any projects for product '${config.whitesource.productName}' from backend, response was ${response}"
|
||||
}
|
||||
}
|
||||
return projectsMetaInfo
|
||||
}
|
||||
|
||||
List findProjectsMeta(projectVitals) {
|
||||
def matchedProjects = []
|
||||
for (int i = 0; i < config.whitesource.projectNames?.size(); i++) {
|
||||
def requestedProjectName = config.whitesource.projectNames[i].trim()
|
||||
def matchedProjectInfo = null
|
||||
|
||||
for (int j = 0; j < projectVitals.size(); j++) {
|
||||
def projectResponse = projectVitals[j]
|
||||
if (projectResponse.name == requestedProjectName) {
|
||||
matchedProjectInfo = projectResponse
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (matchedProjectInfo != null) {
|
||||
matchedProjects.add(matchedProjectInfo)
|
||||
} else {
|
||||
script.error "[WhiteSource] Could not fetch/find requested project '${requestedProjectName}' for product '${config.whitesource.productName}'"
|
||||
}
|
||||
}
|
||||
|
||||
return matchedProjects
|
||||
}
|
||||
|
||||
void fetchReportForProduct(reportName) {
|
||||
def headers = [[name: 'Cache-Control', value: 'no-cache, no-store, must-revalidate'], [name: 'Pragma', value: 'no-cache']]
|
||||
def requestContent = [
|
||||
requestType: "getProductRiskReport",
|
||||
productToken: config.whitesource.productToken
|
||||
]
|
||||
|
||||
//fetchFileFromWhiteSource(reportName, requestContent)
|
||||
httpWhitesource(requestContent, 'APPLICATION_OCTETSTREAM', headers, reportName)
|
||||
}
|
||||
|
||||
def fetchProductLicenseAlerts() {
|
||||
def requestContent = [
|
||||
requestType: "getProductAlertsByType",
|
||||
alertType: "REJECTED_BY_POLICY_RESOURCE",
|
||||
productToken: config.whitesource.productToken
|
||||
]
|
||||
def parsedResponse = fetchWhitesourceResource(requestContent)
|
||||
|
||||
return parsedResponse
|
||||
}
|
||||
|
||||
def fetchProjectLicenseAlerts(String projectToken) {
|
||||
def requestContent = [
|
||||
requestType: "getProjectAlertsByType",
|
||||
alertType: "REJECTED_BY_POLICY_RESOURCE",
|
||||
projectToken: projectToken
|
||||
]
|
||||
def parsedResponse = fetchWhitesourceResource(requestContent)
|
||||
|
||||
return parsedResponse
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
protected def httpWhitesource(requestBody, acceptType = 'APPLICATION_JSON', customHeaders = null, outputFile = null) {
|
||||
handleAdditionalRequestParameters(requestBody)
|
||||
def serializedBody = new JsonUtils().groovyObjectToPrettyJsonString(requestBody)
|
||||
def params = [
|
||||
url : config.whitesource.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : acceptType,
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: serializedBody,
|
||||
quiet : !config.verbose,
|
||||
timeout : config.whitesource.timeout
|
||||
]
|
||||
|
||||
if(customHeaders) params["customHeaders"] = customHeaders
|
||||
|
||||
if (outputFile) params["outputFile"] = outputFile
|
||||
|
||||
if (script.env.HTTP_PROXY) params["httpProxy"] = script.env.HTTP_PROXY
|
||||
|
||||
if(config.verbose)
|
||||
script.echo "Sending http request with parameters ${params}"
|
||||
|
||||
def response = script.httpRequest(params)
|
||||
|
||||
if(config.verbose)
|
||||
script.echo "Received response ${response}"
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
protected void handleAdditionalRequestParameters(params) {
|
||||
if(config.whitesource.userKey)
|
||||
params["userKey"] = config.whitesource.userKey
|
||||
}
|
||||
}
|
1090
test/groovy/WhitesourceExecuteScanTest.groovy
Normal file
1090
test/groovy/WhitesourceExecuteScanTest.groovy
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,12 +1,8 @@
|
||||
package com.sap.piper
|
||||
|
||||
import groovy.test.GroovyAssert
|
||||
|
||||
import static org.hamcrest.Matchers.*
|
||||
import static org.junit.Assert.assertEquals
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
import org.hamcrest.Matchers
|
||||
import org.junit.Assert
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
@ -16,6 +12,8 @@ class ConfigurationHelperTest {
|
||||
|
||||
Script mockScript = new Script() {
|
||||
|
||||
def prepareDefaultValues() {}
|
||||
|
||||
def run() {
|
||||
// it never runs
|
||||
throw new UnsupportedOperationException()
|
||||
@ -92,6 +90,97 @@ class ConfigurationHelperTest {
|
||||
Assert.assertThat(config, not(hasKey('property3')))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testConfigurationHelperLoadingStepDefaults() {
|
||||
Set filter = ['property2']
|
||||
Map config = ConfigurationHelper.newInstance(mockScript, [property1: '27'])
|
||||
.loadStepDefaults()
|
||||
.mixinGeneralConfig([configuration:[general: ['general': 'test', 'oldGeneral': 'test2']]], null, [general2: 'oldGeneral'])
|
||||
.mixinStageConfig([configuration:[stages:[testStage:['stage': 'test', 'oldStage': 'test2']]]], 'testStage', null, [stage2: 'oldStage'])
|
||||
.mixinStepConfig([configuration:[steps:[mock: [step: 'test', 'oldStep': 'test2']]]], null, [step2: 'oldStep'])
|
||||
.mixin([property1: '41', property2: '28', property3: '29'], filter)
|
||||
.use()
|
||||
// asserts
|
||||
Assert.assertThat(config, not(hasEntry('property1', '27')))
|
||||
Assert.assertThat(config, hasEntry('property2', '28'))
|
||||
Assert.assertThat(config, hasEntry('general', 'test'))
|
||||
Assert.assertThat(config, hasEntry('general2', 'test2'))
|
||||
Assert.assertThat(config, hasEntry('stage', 'test'))
|
||||
Assert.assertThat(config, hasEntry('stage2', 'test2'))
|
||||
Assert.assertThat(config, hasEntry('step', 'test'))
|
||||
Assert.assertThat(config, hasEntry('step2', 'test2'))
|
||||
Assert.assertThat(config, not(hasKey('property3')))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testConfigurationHelperAddIfEmpty() {
|
||||
Map config = ConfigurationHelper.newInstance(mockScript, [:])
|
||||
.mixin([property1: '41', property2: '28', property3: '29', property4: ''])
|
||||
.addIfEmpty('property3', '30')
|
||||
.addIfEmpty('property4', '40')
|
||||
.addIfEmpty('property5', '50')
|
||||
.use()
|
||||
// asserts
|
||||
Assert.assertThat(config, hasEntry('property1', '41'))
|
||||
Assert.assertThat(config, hasEntry('property2', '28'))
|
||||
Assert.assertThat(config, hasEntry('property3', '29'))
|
||||
Assert.assertThat(config, hasEntry('property4', '40'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testConfigurationHelperAddIfNull() {
|
||||
Map config = ConfigurationHelper.newInstance(mockScript, [:])
|
||||
.mixin([property1: '29', property2: '', property3: null])
|
||||
.addIfNull('property1', '30')
|
||||
.addIfNull('property2', '30')
|
||||
.addIfNull('property3', '30')
|
||||
.addIfNull('property4', '30')
|
||||
.use()
|
||||
// asserts
|
||||
Assert.assertThat(config, hasEntry('property1', '29'))
|
||||
Assert.assertThat(config, hasEntry('property2', ''))
|
||||
Assert.assertThat(config, hasEntry('property3', '30'))
|
||||
Assert.assertThat(config, hasEntry('property4', '30'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testConfigurationHelperDependingOn() {
|
||||
Map config = ConfigurationHelper.newInstance(mockScript, [:])
|
||||
.mixin([deep: [deeper: 'test'], scanType: 'maven', maven: [path: 'test2']])
|
||||
.dependingOn('scanType').mixin('deep/path')
|
||||
.use()
|
||||
// asserts
|
||||
Assert.assertThat(config, hasKey('deep'))
|
||||
Assert.assertThat(config.deep, allOf(hasEntry('deeper', 'test'), hasEntry('path', 'test2')))
|
||||
Assert.assertThat(config, hasEntry('scanType', 'maven'))
|
||||
Assert.assertThat(config, hasKey('maven'))
|
||||
Assert.assertThat(config.maven, hasEntry('path', 'test2'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testConfigurationHelperWithPropertyInValues() {
|
||||
ConfigurationHelper.newInstance(mockScript, [:])
|
||||
.mixin([test: 'allowed'])
|
||||
.withPropertyInValues('test', ['allowed', 'allowed2'] as Set)
|
||||
.use()
|
||||
}
|
||||
|
||||
@Test
|
||||
void testConfigurationHelperWithPropertyInValuesException() {
|
||||
def errorCaught = false
|
||||
try {
|
||||
ConfigurationHelper.newInstance(mockScript, [:])
|
||||
.mixin([test: 'disallowed'])
|
||||
.withPropertyInValues('test', ['allowed', 'allowed2'] as Set)
|
||||
.use()
|
||||
} catch (e) {
|
||||
errorCaught = true
|
||||
assertThat(e, isA(IllegalArgumentException))
|
||||
assertThat(e.getMessage(), is('Invalid test = \'disallowed\'. Valid \'test\' values are: [allowed, allowed2].'))
|
||||
}
|
||||
assertThat(errorCaught, is(true))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testConfigurationLoaderWithBooleanValue() {
|
||||
Map config = ConfigurationHelper.newInstance(mockScript, [property1: '27'])
|
||||
@ -196,6 +285,17 @@ class ConfigurationHelperTest {
|
||||
Assert.assertThat(configuration.newStructure.new1, is(null))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testHandleCompatibilityPremigratedValues() {
|
||||
def configuration = ConfigurationHelper.newInstance(mockScript, [old1: null, test: 'testValue'])
|
||||
.mixin([someValueToMigrate: 'testValue2'], null, [someValueToMigrateSecondTime: 'someValueToMigrate', newStructure: [new1: 'old1', new2: 'someValueToMigrateSecondTime']])
|
||||
.use()
|
||||
|
||||
Assert.assertThat(configuration.size(), is(4))
|
||||
Assert.assertThat(configuration.newStructure.new1, is(null))
|
||||
Assert.assertThat(configuration.newStructure.new2, is('testValue2'))
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithMandoryParameterReturnDefaultFailureMessage() {
|
||||
|
||||
|
229
test/groovy/com/sap/piper/DescriptorUtilsTest.groovy
Normal file
229
test/groovy/com/sap/piper/DescriptorUtilsTest.groovy
Normal file
@ -0,0 +1,229 @@
|
||||
package com.sap.piper
|
||||
|
||||
import hudson.AbortException
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.RuleChain
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsEnvironmentRule
|
||||
import util.JenkinsErrorRule
|
||||
import util.JenkinsLoggingRule
|
||||
import util.JenkinsSetupRule
|
||||
import util.LibraryLoadingTestExecutionListener
|
||||
import util.Rules
|
||||
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.junit.Assert.assertEquals
|
||||
import static org.junit.Assert.assertThat
|
||||
import static org.hamcrest.core.Is.*
|
||||
|
||||
class DescriptorUtilsTest extends BasePiperTest {
|
||||
|
||||
@Rule
|
||||
public JenkinsErrorRule errorRule = new JenkinsErrorRule(this)
|
||||
@Rule
|
||||
public JenkinsEnvironmentRule environmentRule = new JenkinsEnvironmentRule(this)
|
||||
@Rule
|
||||
public JenkinsSetupRule setUpRule = new JenkinsSetupRule(this)
|
||||
@Rule
|
||||
public JenkinsLoggingRule loggingRule = new JenkinsLoggingRule(this)
|
||||
|
||||
@Rule
|
||||
public RuleChain ruleChain = Rules.getCommonRules(this)
|
||||
.around(loggingRule)
|
||||
|
||||
DescriptorUtils descriptorUtils
|
||||
|
||||
@Before
|
||||
void init() throws Exception {
|
||||
descriptorUtils = new DescriptorUtils()
|
||||
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(descriptorUtils)
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetNpmGAVSapArtifact() {
|
||||
|
||||
helper.registerAllowedMethod("readJSON", [Map.class], {
|
||||
searchConfig ->
|
||||
def packageJsonFile = new File("test/resources/DescriptorUtils/npm/${searchConfig.file}")
|
||||
return new JsonUtils().jsonStringToGroovyObject(packageJsonFile.text)
|
||||
})
|
||||
|
||||
def gav = descriptorUtils.getNpmGAV('package2.json')
|
||||
|
||||
assertEquals(gav.group, '')
|
||||
assertEquals(gav.artifact, 'some-test')
|
||||
assertEquals(gav.version, '1.2.3')
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetNpmGAV() {
|
||||
|
||||
helper.registerAllowedMethod("readJSON", [Map.class], {
|
||||
searchConfig ->
|
||||
def packageJsonFile = new File("test/resources/DescriptorUtils/npm/${searchConfig.file}")
|
||||
return new JsonUtils().jsonStringToGroovyObject(packageJsonFile.text)
|
||||
})
|
||||
|
||||
def gav = descriptorUtils.getNpmGAV('package.json')
|
||||
|
||||
assertEquals(gav.group, '@sap')
|
||||
assertEquals(gav.artifact, 'hdi-deploy')
|
||||
assertEquals(gav.version, '2.3.0')
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetNpmGAVSapArtifactError() {
|
||||
|
||||
helper.registerAllowedMethod("readJSON", [Map.class], {
|
||||
searchConfig ->
|
||||
def packageJsonFile = new File("test/resources/DescriptorUtils/npm/${searchConfig.file}")
|
||||
return new JsonUtils().jsonStringToGroovyObject(packageJsonFile.text)
|
||||
})
|
||||
|
||||
def errorCaught = false
|
||||
try {
|
||||
descriptorUtils.getNpmGAV('package3.json')
|
||||
} catch (e) {
|
||||
errorCaught = true
|
||||
assertThat(e, isA(AbortException.class))
|
||||
assertThat(e.getMessage(), is("Unable to parse package name '@someerror'"))
|
||||
}
|
||||
assertThat(errorCaught, is(true))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetSbtGAV() {
|
||||
|
||||
helper.registerAllowedMethod("readJSON", [Map.class], {
|
||||
searchConfig ->
|
||||
def packageJsonFile = new File("test/resources/DescriptorUtils/sbt/${searchConfig.file}")
|
||||
return new JsonUtils().jsonStringToGroovyObject(packageJsonFile.text)
|
||||
})
|
||||
|
||||
def gav = descriptorUtils.getSbtGAV('sbtDescriptor.json')
|
||||
|
||||
assertEquals(gav.group, 'sap')
|
||||
assertEquals(gav.artifact, 'hdi-deploy')
|
||||
assertEquals(gav.packaging, 'test')
|
||||
assertEquals(gav.version, '2.3.0')
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetDlangGAV() {
|
||||
|
||||
helper.registerAllowedMethod("readJSON", [Map.class], {
|
||||
searchConfig ->
|
||||
def packageJsonFile = new File("test/resources/DescriptorUtils/dlang/${searchConfig.file}")
|
||||
return new JsonUtils().jsonStringToGroovyObject(packageJsonFile.text)
|
||||
})
|
||||
|
||||
def gav = descriptorUtils.getDlangGAV('dub.json')
|
||||
|
||||
assertEquals(gav.group, 'com.sap.dlang')
|
||||
assertEquals(gav.artifact, 'hdi-deploy')
|
||||
assertEquals(gav.version, '2.3.0')
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetPipGAV() {
|
||||
|
||||
helper.registerAllowedMethod("readFile", [Map.class], {
|
||||
map ->
|
||||
def descriptorFile = new File("test/resources/utilsTest/${map.file}")
|
||||
return descriptorFile.text
|
||||
})
|
||||
|
||||
def gav = descriptorUtils.getPipGAV('setup.py')
|
||||
|
||||
assertEquals('', gav.group)
|
||||
assertEquals('py_connect', gav.artifact)
|
||||
assertEquals('1.0', gav.version)
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetPipGAVFromVersionTxt() {
|
||||
|
||||
helper.registerAllowedMethod("readFile", [Map.class], {
|
||||
map ->
|
||||
def descriptorFile = new File("test/resources/DescriptorUtils/pip/${map.file}")
|
||||
return descriptorFile.text
|
||||
})
|
||||
|
||||
def gav = descriptorUtils.getPipGAV('setup.py')
|
||||
|
||||
assertEquals('', gav.group)
|
||||
assertEquals('some-test', gav.artifact)
|
||||
assertEquals('1.0.0-SNAPSHOT', gav.version)
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetMavenGAVComplete() {
|
||||
|
||||
helper.registerAllowedMethod("readMavenPom", [Map.class], {
|
||||
searchConfig ->
|
||||
return new Object(){
|
||||
def groupId = 'test.group', artifactId = 'test-artifact', version = '1.2.4', packaging = 'jar'
|
||||
}
|
||||
})
|
||||
|
||||
def gav = descriptorUtils.getMavenGAV('pom.xml')
|
||||
|
||||
assertEquals(gav.group, 'test.group')
|
||||
assertEquals(gav.artifact, 'test-artifact')
|
||||
assertEquals(gav.version, '1.2.4')
|
||||
assertEquals(gav.packaging, 'jar')
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetMavenGAVPartial() {
|
||||
def parameters = []
|
||||
|
||||
helper.registerAllowedMethod("readMavenPom", [Map.class], {
|
||||
searchConfig ->
|
||||
return new Object(){
|
||||
def groupId = null, artifactId = null, version = null, packaging = 'jar'
|
||||
}
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod("sh", [Map.class], {
|
||||
mvnHelpCommand ->
|
||||
def scriptCommand = mvnHelpCommand['script']
|
||||
parameters.add(scriptCommand)
|
||||
if(scriptCommand.contains('project.groupId'))
|
||||
return 'test.group'
|
||||
if(scriptCommand.contains('project.artifactId'))
|
||||
return 'test-artifact'
|
||||
if(scriptCommand.contains('project.version'))
|
||||
return '1.2.4'
|
||||
})
|
||||
|
||||
def gav = descriptorUtils.getMavenGAV('pom.xml')
|
||||
|
||||
assertEquals(gav.group, 'test.group')
|
||||
assertEquals(gav.artifact, 'test-artifact')
|
||||
assertEquals(gav.version, '1.2.4')
|
||||
assertEquals(gav.packaging, 'jar')
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetGoGAV() {
|
||||
|
||||
helper.registerAllowedMethod("readFile", [Map.class], {
|
||||
map ->
|
||||
def path = 'test/resources/DescriptorUtils/go/' + map.file.substring(map.file.lastIndexOf('/') + 1, map.file.length())
|
||||
def descriptorFile = new File(path)
|
||||
if(descriptorFile.exists())
|
||||
return descriptorFile.text
|
||||
else
|
||||
return null
|
||||
})
|
||||
|
||||
def gav = descriptorUtils.getGoGAV('./myProject/Gopkg.toml', new URI('https://github.wdf.sap.corp/test/golang'))
|
||||
|
||||
assertEquals('', gav.group)
|
||||
assertEquals('github.wdf.sap.corp/test/golang.myProject', gav.artifact)
|
||||
assertEquals('1.2.3', gav.version)
|
||||
}
|
||||
}
|
@ -0,0 +1,190 @@
|
||||
package com.sap.piper
|
||||
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.RuleChain
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsLoggingRule
|
||||
import util.JenkinsReadFileRule
|
||||
import util.JenkinsWriteFileRule
|
||||
import util.Rules
|
||||
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
import static org.hamcrest.Matchers.hasItem
|
||||
import static org.hamcrest.Matchers.not
|
||||
import static org.hamcrest.Matchers.allOf
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
class WhitesourceConfigurationHelperTest extends BasePiperTest {
|
||||
JenkinsReadFileRule jrfr = new JenkinsReadFileRule(this, 'test/resources/utilsTest/')
|
||||
JenkinsWriteFileRule jwfr = new JenkinsWriteFileRule(this)
|
||||
JenkinsLoggingRule jlr = new JenkinsLoggingRule(this)
|
||||
|
||||
@Rule
|
||||
public RuleChain ruleChain = Rules
|
||||
.getCommonRules(this)
|
||||
.around(jrfr)
|
||||
.around(jwfr)
|
||||
.around(jlr)
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
def p = new Properties()
|
||||
p.put("log.level", "debug")
|
||||
helper.registerAllowedMethod('readProperties', [Map], {return p})
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExtendConfigurationFileUnifiedAgentEmptyConfig() {
|
||||
helper.registerAllowedMethod('readProperties', [Map], {return new Properties()})
|
||||
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'none', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
|
||||
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
|
||||
allOf(
|
||||
not(containsString("log.level=debug")),
|
||||
containsString("apiKey=abcd"),
|
||||
containsString("productName=DIST - name1"),
|
||||
containsString("productToken=1234"),
|
||||
containsString("userKey=0000")
|
||||
)
|
||||
)
|
||||
|
||||
assertThat(jlr.log, containsString("[Whitesource] Configuration for scanType: 'none' is not yet hardened, please do a quality assessment of your scan results."))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExtendConfigurationFileUnifiedAgentConfigDeeper() {
|
||||
helper.registerAllowedMethod('readProperties', [Map], { m -> if (!m.file.contains('testModule')) return new Properties() else return null })
|
||||
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'none', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./testModule/")
|
||||
assertThat(jwfr.files['./testModule/config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
|
||||
allOf(
|
||||
not(containsString("log.level=debug")),
|
||||
containsString("apiKey=abcd"),
|
||||
containsString("productName=DIST - name1"),
|
||||
containsString("productToken=1234"),
|
||||
containsString("userKey=0000")
|
||||
)
|
||||
)
|
||||
|
||||
assertThat(jlr.log, containsString("[Whitesource] Configuration for scanType: 'none' is not yet hardened, please do a quality assessment of your scan results."))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExtendConfigurationFileUnifiedAgentMaven() {
|
||||
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'none', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
|
||||
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
|
||||
allOf(
|
||||
containsString("apiKey=abcd"),
|
||||
containsString("productName=DIST - name1"),
|
||||
containsString("productToken=1234"),
|
||||
containsString("userKey=0000")
|
||||
)
|
||||
)
|
||||
|
||||
assertThat(jlr.log, containsString("[Whitesource] Configuration for scanType: 'none' is not yet hardened, please do a quality assessment of your scan results."))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExtendConfigurationFileUnifiedAgentNpm() {
|
||||
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'npm', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
|
||||
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
|
||||
allOf(
|
||||
containsString("apiKey=abcd"),
|
||||
containsString("productName=DIST - name1"),
|
||||
containsString("productToken=1234"),
|
||||
containsString("userKey=0000")
|
||||
)
|
||||
)
|
||||
|
||||
assertThat(jlr.log, containsString("[Whitesource] Configuration for scanType: 'npm' is not yet hardened, please do a quality assessment of your scan results."))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExtendConfigurationFileUnifiedAgentSbt() {
|
||||
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'sbt', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
|
||||
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
|
||||
allOf(
|
||||
containsString("apiKey=abcd"),
|
||||
containsString("productName=DIST - name1"),
|
||||
containsString("productToken=1234"),
|
||||
containsString("userKey=0000"),
|
||||
containsString("sbt.resolveDependencies=true"),
|
||||
containsString("log.level=debug")
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExtendConfigurationFileUnifiedAgentDlang() {
|
||||
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'dlang', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
|
||||
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
|
||||
allOf(
|
||||
containsString("apiKey=abcd"),
|
||||
containsString("productName=DIST - name1"),
|
||||
containsString("productToken=1234"),
|
||||
containsString("userKey=0000")
|
||||
)
|
||||
)
|
||||
|
||||
assertThat(jlr.log, containsString("[Whitesource] Configuration for scanType: 'dlang' is not yet hardened, please do a quality assessment of your scan results."))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExtendConfigurationFileUnifiedAgentPip() {
|
||||
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'pip', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
|
||||
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
|
||||
allOf(
|
||||
containsString("apiKey=abcd"),
|
||||
containsString("productName=DIST - name1"),
|
||||
containsString("productToken=1234"),
|
||||
containsString("userKey=0000"),
|
||||
containsString("python.resolveDependencies=true")
|
||||
)
|
||||
)
|
||||
|
||||
assertThat(jlr.log, not(containsString("[Whitesource] Configuration for scanType: 'pip' is not yet hardened, please do a quality assessment of your scan results.")))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExtendConfigurationFileUnifiedAgentGolangVerbose() {
|
||||
def config = [scanType: 'golang', whitesource: [configFilePath: './config', serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'SHC - name2', productToken: '1234', userKey: '0000'], stashContent: ['some', 'stashes'], verbose: true]
|
||||
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, config, "./")
|
||||
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
|
||||
allOf(
|
||||
containsString("apiKey=abcd"),
|
||||
containsString("productName=SHC - name2"),
|
||||
containsString("productToken=1234"),
|
||||
containsString("userKey=0000"),
|
||||
containsString("go.resolveDependencies=true"),
|
||||
containsString("log.level=debug")
|
||||
)
|
||||
)
|
||||
|
||||
assertThat(config.stashContent, hasItem(containsString('modified whitesource config ')))
|
||||
assertThat(jlr.log, not(containsString("[Warning][Whitesource] Configuration for scanType: 'golang' is not yet hardened, please do a quality assessment of your scan results.")))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExtendConfigurationFileUnifiedAgentEnforcement() {
|
||||
def p = new Properties()
|
||||
p.putAll(['python.resolveDependencies': 'false', 'python.ignoreSourceFiles': 'false', 'python.ignorePipInstallErrors': 'true','python.installVirtualenv': 'false'])
|
||||
helper.registerAllowedMethod('readProperties', [Map], {return p})
|
||||
|
||||
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'pip', whitesource: [configFilePath: './config', serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'cdfg', productName: 'name', productToken: '1234', userKey: '0000'], verbose: true], "./")
|
||||
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
|
||||
allOf(
|
||||
containsString("apiKey=cdfg"),
|
||||
containsString("productName=name"),
|
||||
containsString("productToken=1234"),
|
||||
containsString("userKey=0000"),
|
||||
containsString("python.resolveDependencies=true"),
|
||||
containsString("log.level=debug"),
|
||||
containsString("python.resolveDependencies=true"),
|
||||
containsString("python.ignoreSourceFiles=true"),
|
||||
containsString("python.ignorePipInstallErrors=true"),
|
||||
containsString("python.installVirtualenv=false")
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,281 @@
|
||||
package com.sap.piper.integration
|
||||
|
||||
import hudson.AbortException
|
||||
import org.junit.After
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.ExpectedException
|
||||
import org.junit.rules.RuleChain
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsEnvironmentRule
|
||||
import util.JenkinsErrorRule
|
||||
import util.JenkinsLoggingRule
|
||||
import util.LibraryLoadingTestExecutionListener
|
||||
import util.Rules
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.hamcrest.Matchers.isA
|
||||
|
||||
class WhitesourceOrgAdminRepositoryTest extends BasePiperTest {
|
||||
|
||||
private ExpectedException expectedException = ExpectedException.none()
|
||||
private JenkinsLoggingRule loggingRule = new JenkinsLoggingRule(this)
|
||||
|
||||
@Rule
|
||||
public RuleChain ruleChain = Rules
|
||||
.getCommonRules(this)
|
||||
.around(expectedException)
|
||||
.around(loggingRule)
|
||||
|
||||
WhitesourceOrgAdminRepository repository
|
||||
|
||||
@Before
|
||||
void init() throws Exception {
|
||||
repository = new WhitesourceOrgAdminRepository(nullScript, [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/"], verbose: true])
|
||||
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(repository)
|
||||
}
|
||||
|
||||
@After
|
||||
void tearDown() {
|
||||
printCallStack()
|
||||
nullScript.env = [:]
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMissingConfig() {
|
||||
expectedException.expect(AbortException)
|
||||
expectedException.expectMessage("Parameter 'whitesource.serviceUrl' must be provided as part of the configuration.")
|
||||
new WhitesourceOrgAdminRepository(nullScript, [:])
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAccessor() {
|
||||
new WhitesourceOrgAdminRepository(nullScript, [whitesourceAccessor: "com.sap.piper.integration.WhitesourceRepository", whitesource: [serviceUrl: "http://test.com"]])
|
||||
}
|
||||
|
||||
@Test
|
||||
void testResolveProductMeta() {
|
||||
|
||||
def whitesourceMetaResponse = [
|
||||
productVitals: [
|
||||
[
|
||||
token: '410389ae-0269-4719-9cbf-fb5e299c8415',
|
||||
name : 'NW'
|
||||
],
|
||||
[
|
||||
token: '2892f1db-4361-4e83-a89d-d28a262d65b9',
|
||||
name : 'XS UAA'
|
||||
],
|
||||
[
|
||||
token: '1111111-1111-1111-1111-111111111111',
|
||||
name : 'Correct Name Cloud'
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
repository.config.putAll([whitesource: [productName: "Correct Name Cloud"]])
|
||||
|
||||
def result = repository.findProductMeta(whitesourceMetaResponse)
|
||||
|
||||
assertThat(result).isEqualTo([
|
||||
token: '1111111-1111-1111-1111-111111111111',
|
||||
name : 'Correct Name Cloud'
|
||||
])
|
||||
}
|
||||
|
||||
@Test
|
||||
void testHttpWhitesourceInternalCallUserKey() {
|
||||
def config = [whitesource: [ serviceUrl: "http://some.host.whitesource.com/api/", orgAdminUserKey: "4711"], verbose: false]
|
||||
repository.config.putAll(config)
|
||||
def requestBody = ["someJson" : [ "someObject" : "abcdef" ]]
|
||||
|
||||
def requestParams
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams = p
|
||||
})
|
||||
|
||||
repository.httpWhitesource(requestBody)
|
||||
|
||||
assertThat(requestParams, is(
|
||||
[
|
||||
url : config.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody,
|
||||
quiet : true,
|
||||
userKey : config.orgAdminUserKey
|
||||
]
|
||||
))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testHttpWhitesourceInternalCallUserKeyVerboseProxy() {
|
||||
def config = [whitesource: [ serviceUrl: "http://some.host.whitesource.com/api/", orgAdminUserKey: "4711"], verbose: true]
|
||||
nullScript.env['HTTP_PROXY'] = "http://test.sap.com:8080"
|
||||
repository.config.putAll(config)
|
||||
def requestBody = ["someJson" : [ "someObject" : "abcdef" ]]
|
||||
|
||||
def requestParams
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams = p
|
||||
})
|
||||
|
||||
repository.httpWhitesource(requestBody)
|
||||
|
||||
assertThat(requestParams, is(
|
||||
[
|
||||
url : config.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody,
|
||||
quiet : false,
|
||||
userKey : config.orgAdminUserKey,
|
||||
httpProxy : "http://test.sap.com:8080"
|
||||
]
|
||||
))
|
||||
|
||||
assertThat(loggingRule.log, containsString("Sending http request with parameters"))
|
||||
assertThat(loggingRule.log, containsString("Received response"))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCreateProduct() {
|
||||
def config = [
|
||||
whitesource: [
|
||||
serviceUrl: "http://some.host.whitesource.com/api/",
|
||||
verbose: false,
|
||||
orgAdminUserKey: "4711",
|
||||
orgToken: "abcd1234",
|
||||
productName: "testProduct",
|
||||
emailAddressesOfInitialProductAdmins: ['some@somewhere.com', 'some2@somewhere.com']
|
||||
]
|
||||
]
|
||||
repository.config.putAll(config)
|
||||
def requestBody1 = [
|
||||
requestType: "getOrganizationProductVitals",
|
||||
orgToken: config.orgToken,
|
||||
userKey: "4711"
|
||||
]
|
||||
|
||||
def requestBody2 = [
|
||||
"requestType" : "setProductAssignments",
|
||||
"productToken" : "54785",
|
||||
"productMembership" : ["userAssignments":[], "groupAssignments":[]],
|
||||
"productAdmins" : ["userAssignments":[[ "email": "some@somewhere.com" ], ["email": "some2@somewhere.com"]]],
|
||||
"alertsEmailReceivers" : ["userAssignments":[]],
|
||||
"userKey": "4711"
|
||||
]
|
||||
|
||||
def requestParams = []
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams.add(p)
|
||||
return [ content : "{ \"productToken\" : \"54785\" }" ]
|
||||
})
|
||||
|
||||
repository.createProduct()
|
||||
|
||||
assertThat(requestParams[0], is(
|
||||
[
|
||||
url : config.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody1,
|
||||
quiet : false,
|
||||
userKey : config.orgAdminUserKey,
|
||||
httpProxy : "http://test.sap.com:8080"
|
||||
]
|
||||
))
|
||||
|
||||
assertThat(requestParams[1], is(
|
||||
[
|
||||
url : config.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody2,
|
||||
quiet : false,
|
||||
userKey : config.orgAdminUserKey,
|
||||
httpProxy : "http://test.sap.com:8080"
|
||||
]
|
||||
))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testIssueHttpRequestError() {
|
||||
def config = [whitesource: [ serviceUrl: "http://some.host.whitesource.com/api/", orgAdminUserKey: "4711"], verbose: false]
|
||||
repository.config.putAll(config)
|
||||
def requestBody = ["someJson" : [ "someObject" : "abcdef" ]]
|
||||
|
||||
def requestParams
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams = p
|
||||
return [content: "{ \"errorCode\" : \"4546\", \"errorMessage\" : \"some text\" } }"]
|
||||
})
|
||||
|
||||
def errorCaught = false
|
||||
try {
|
||||
repository.issueHttpRequest(requestBody)
|
||||
} catch (e) {
|
||||
errorCaught = true
|
||||
assertThat(e, isA(AbortException.class))
|
||||
assertThat(e.getMessage(), equals("[WhiteSource] Request failed with error message 'some text' (4546)."))
|
||||
}
|
||||
assertThat(errorCaught, is(true))
|
||||
|
||||
assertThat(requestParams, is(
|
||||
[
|
||||
url : config.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody,
|
||||
quiet : true,
|
||||
userKey : config.orgAdminUserKey
|
||||
]
|
||||
))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchProductMetaInfo() {
|
||||
def config = [whitesource: [ serviceUrl: "http://some.host.whitesource.com/api/", orgAdminUserKey: "4711", orgToken: "12345", productName: "testProduct"], verbose: true]
|
||||
nullScript.env['HTTP_PROXY'] = "http://test.sap.com:8080"
|
||||
repository.config.putAll(config)
|
||||
|
||||
def requestBody = [
|
||||
requestType: "getOrganizationProductVitals",
|
||||
orgToken: config.orgToken,
|
||||
userKey: "4711"
|
||||
]
|
||||
|
||||
def requestParams
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams = p
|
||||
return [ content: "{ \"productVitals\" : [ { \"name\": \"testProduct\"} ] }"]
|
||||
})
|
||||
|
||||
def result = repository.fetchProductMetaInfo()
|
||||
|
||||
assertThat(requestParams, is(
|
||||
[
|
||||
url : config.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody,
|
||||
quiet : false,
|
||||
userKey : config.orgAdminUserKey,
|
||||
httpProxy : "http://test.sap.com:8080"
|
||||
]
|
||||
))
|
||||
|
||||
assertThat(result, is([ name: "testProduct"]))
|
||||
assertThat(loggingRule.log, containsString("Sending http request with parameters"))
|
||||
assertThat(loggingRule.log, containsString("Received response"))
|
||||
}
|
||||
}
|
@ -0,0 +1,575 @@
|
||||
package com.sap.piper.integration
|
||||
|
||||
|
||||
import hudson.AbortException
|
||||
import org.junit.After
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.ExpectedException
|
||||
import org.junit.rules.RuleChain
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsLoggingRule
|
||||
import util.LibraryLoadingTestExecutionListener
|
||||
import util.Rules
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.hamcrest.Matchers.isA
|
||||
|
||||
class WhitesourceRepositoryTest extends BasePiperTest {
|
||||
|
||||
private ExpectedException exception = ExpectedException.none()
|
||||
private JenkinsLoggingRule loggingRule = new JenkinsLoggingRule(this)
|
||||
|
||||
@Rule
|
||||
public RuleChain ruleChain = Rules
|
||||
.getCommonRules(this)
|
||||
.around(exception)
|
||||
.around(loggingRule)
|
||||
|
||||
WhitesourceRepository repository
|
||||
|
||||
@Before
|
||||
void init() throws Exception {
|
||||
nullScript.env['HTTP_PROXY'] = "http://proxy.wdf.sap.corp:8080"
|
||||
|
||||
repository = new WhitesourceRepository(nullScript, [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/"]])
|
||||
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(repository)
|
||||
}
|
||||
|
||||
@After
|
||||
void tearDown() {
|
||||
printCallStack()
|
||||
nullScript.env = [:]
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMissingConfig() {
|
||||
exception.expect(AbortException)
|
||||
exception.expectMessage("Parameter 'whitesource.serviceUrl' must be provided as part of the configuration.")
|
||||
new WhitesourceRepository(nullScript, [:])
|
||||
}
|
||||
|
||||
@Test
|
||||
void testResolveProjectsMeta() {
|
||||
def whitesourceMetaResponse = [
|
||||
projectVitals: [
|
||||
[
|
||||
token: '410389ae-0269-4719-9cbf-fb5e299c8415',
|
||||
name : 'NW'
|
||||
],
|
||||
[
|
||||
token: '2892f1db-4361-4e83-a89d-d28a262d65b9',
|
||||
name : 'Correct Project Name2'
|
||||
],
|
||||
[
|
||||
token: '1111111-1111-1111-1111-111111111111',
|
||||
name : 'Correct Project Name'
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
repository.config.whitesource = [:]
|
||||
repository.config.whitesource['productName'] = "Correct Name Cloud"
|
||||
repository.config.whitesource['projectNames'] = ["Correct Project Name", "Correct Project Name2"]
|
||||
|
||||
def result = repository.findProjectsMeta(whitesourceMetaResponse.projectVitals)
|
||||
|
||||
assertThat(result, is(
|
||||
[
|
||||
{
|
||||
token: '1111111-1111-1111-1111-111111111111'
|
||||
name: 'Correct Name Cloud'
|
||||
},
|
||||
{
|
||||
token: '2892f1db-4361-4e83-a89d-d28a262d65b9'
|
||||
name: 'Correct Project Name2'
|
||||
}
|
||||
]))
|
||||
|
||||
assertThat(result.size(), 2)
|
||||
}
|
||||
|
||||
@Test
|
||||
void testResolveProjectsMetaFailNotFound() {
|
||||
def whitesourceMetaResponse = [
|
||||
projectVitals: [
|
||||
[
|
||||
token: '410389ae-0269-4719-9cbf-fb5e299c8415',
|
||||
name : 'NW'
|
||||
],
|
||||
[
|
||||
token: '2892f1db-4361-4e83-a89d-d28a262d65b9',
|
||||
name : 'Product Name'
|
||||
],
|
||||
[
|
||||
token: '1111111-1111-1111-1111-111111111111',
|
||||
name : 'Product Name2'
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
exception.expect(AbortException.class)
|
||||
|
||||
exception.expectMessage("Correct Project Name")
|
||||
|
||||
repository.config.putAll([whitesource : [projectNames: ["Correct Project Name"]]])
|
||||
|
||||
repository.findProjectsMeta(whitesourceMetaResponse.projectVitals)
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSortLibrariesAlphabeticallyGAV() {
|
||||
|
||||
def librariesResponse = [
|
||||
[
|
||||
groupId : 'xyz',
|
||||
artifactId: 'abc'
|
||||
],
|
||||
[
|
||||
groupId : 'abc',
|
||||
artifactId: 'abc-def'
|
||||
],
|
||||
[
|
||||
groupId : 'abc',
|
||||
artifactId: 'def-abc'
|
||||
],
|
||||
[
|
||||
groupId : 'def',
|
||||
artifactId: 'test'
|
||||
]
|
||||
]
|
||||
|
||||
repository.sortLibrariesAlphabeticallyGAV(librariesResponse)
|
||||
|
||||
assertThat(librariesResponse, is(
|
||||
[
|
||||
{
|
||||
groupId: 'abc'
|
||||
artifactId: 'abc-def'
|
||||
},
|
||||
{
|
||||
groupId: 'abc'
|
||||
artifactId: 'def-abc'
|
||||
},
|
||||
{
|
||||
groupId: 'def'
|
||||
artifactId: 'test'
|
||||
},
|
||||
{
|
||||
groupId: 'xyz'
|
||||
artifactId: 'abc'
|
||||
}
|
||||
]))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSortVulnerabilitiesByScore() {
|
||||
|
||||
def vulnerabilitiesResponse = [
|
||||
[
|
||||
vulnerability: [
|
||||
score : 6.9,
|
||||
cvss3_score: 8.5
|
||||
]
|
||||
],
|
||||
[
|
||||
vulnerability: [
|
||||
score : 7.5,
|
||||
cvss3_score: 9.8
|
||||
]
|
||||
],
|
||||
[
|
||||
vulnerability: [
|
||||
score : 4,
|
||||
cvss3_score: 0
|
||||
]
|
||||
],
|
||||
[
|
||||
vulnerability: [
|
||||
score : 9.8,
|
||||
cvss3_score: 0
|
||||
]
|
||||
],
|
||||
[
|
||||
vulnerability: [
|
||||
score : 0,
|
||||
cvss3_score: 5
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
repository.sortVulnerabilitiesByScore(vulnerabilitiesResponse)
|
||||
|
||||
assertThat(vulnerabilitiesResponse, is(
|
||||
[
|
||||
{vulnerability: {
|
||||
score: 9.8
|
||||
cvss3_score: 0
|
||||
}}
|
||||
,
|
||||
{vulnerability: {
|
||||
score : 7.5
|
||||
cvss3_score: 9.8
|
||||
}}
|
||||
,
|
||||
{vulnerability: {
|
||||
score : 6.9
|
||||
cvss3_score: 8.5
|
||||
}}
|
||||
,
|
||||
{vulnerability: {
|
||||
score : 0
|
||||
cvss3_score: 5
|
||||
}}
|
||||
,
|
||||
{vulnerability: {
|
||||
score : 4
|
||||
cvss3_score: 0
|
||||
}}
|
||||
]))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testHttpWhitesourceExternalCallNoUserKey() {
|
||||
def config = [whitesource: [serviceUrl: "https://saas.whitesource.com/api"], verbose: true]
|
||||
repository.config.putAll(config)
|
||||
def requestBody = "{ \"someJson\" : { \"someObject\" : \"abcdef\" } }"
|
||||
|
||||
def requestParams
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams = p
|
||||
})
|
||||
|
||||
repository.httpWhitesource(requestBody)
|
||||
|
||||
assertThat(requestParams, is(
|
||||
[
|
||||
url : config.whitesource.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody,
|
||||
quiet : false,
|
||||
proxy : "http://proxy.wdf.sap.corp:8080"
|
||||
]
|
||||
))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testHttpWhitesourceExternalCallUserKey() {
|
||||
def config = [whitesource: [ serviceUrl: "https://saas.whitesource.com/api", userKey: "4711"], verbose: true]
|
||||
def requestBody = "{ \"someJson\" : { \"someObject\" : \"abcdef\" } }"
|
||||
|
||||
def requestParams
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams = p
|
||||
})
|
||||
|
||||
repository.httpWhitesource(requestBody)
|
||||
|
||||
assertThat(requestParams, is(
|
||||
[
|
||||
url : config.whitesource.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody,
|
||||
quiet : false,
|
||||
proxy : "http://proxy.wdf.sap.corp:8080",
|
||||
userKey : "4711"
|
||||
]
|
||||
))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testHttpWhitesourceInternalCallUserKey() {
|
||||
def config = [whitesource: [serviceUrl: "http://mo-323123123.sap.corp/some", userKey: "4711"], verbose: false]
|
||||
def requestBody = "{ \"someJson\" : { \"someObject\" : \"abcdef\" } }"
|
||||
|
||||
def requestParams
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams = p
|
||||
})
|
||||
|
||||
repository.httpWhitesource(requestBody)
|
||||
|
||||
assertThat(requestParams, is(
|
||||
[
|
||||
url : config.whitesource.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody,
|
||||
quiet : true
|
||||
]
|
||||
))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testHttpCallWithError() {
|
||||
def responseBody = """{
|
||||
\"errorCode\": 5001,
|
||||
\"errorMessage\": \"User is not allowed to perform this action\"
|
||||
}"""
|
||||
|
||||
exception.expect(isA(AbortException.class))
|
||||
exception.expectMessage("[WhiteSource] Request failed with error message 'User is not allowed to perform this action' (5001)")
|
||||
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
return [content: responseBody]
|
||||
})
|
||||
|
||||
repository.fetchWhitesourceResource([httpMode: 'POST'])
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchReportForProduct() {
|
||||
repository.config.putAll([whitesource: [serviceUrl: "http://mo-323123123.sap.corp/some", productToken: "4712", userKey: "4711"], verbose: true])
|
||||
def requestBody = "{ \"requestType\": \"getProductRiskReport\", \"productToken\": \"${repository.config.whitesource.productToken}\" }"
|
||||
|
||||
def requestParams
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams = p
|
||||
})
|
||||
|
||||
repository.fetchReportForProduct("test.file")
|
||||
|
||||
assertThat(requestParams, is(
|
||||
[
|
||||
url : repository.config.whitesource.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_OCTETSTREAM',
|
||||
contentType : 'APPLICATION_JSON',
|
||||
requestBody : requestBody,
|
||||
quiet : false,
|
||||
userKey : repository.config.whitesource.userKey,
|
||||
httpProxy : "http://test.sap.com:8080",
|
||||
outputFile : "test.file",
|
||||
customHeaders : [[name: 'Cache-Control', value: 'no-cache, no-store, must-revalidate'], [name: 'Pragma', value: 'no-cache']]
|
||||
]
|
||||
))
|
||||
|
||||
assertThat(loggingRule.log, containsString("Sending http request with parameters [requestType:getProductRiskReport, productToken:4711]"))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchProductLicenseAlerts() {
|
||||
def config = [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/", userKey: "4711", productToken: "8547"]]
|
||||
nullScript.env['HTTP_PROXY'] = "http://test.sap.com:8080"
|
||||
repository.config.putAll(config)
|
||||
|
||||
def requestBody = [
|
||||
requestType: "getProductAlertsByType",
|
||||
alertType: "REJECTED_BY_POLICY_RESOURCE",
|
||||
productToken: config.productToken
|
||||
]
|
||||
|
||||
def requestParams
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams = p
|
||||
return [ content: "{ \"alerts\" : [] }"]
|
||||
})
|
||||
|
||||
repository.fetchProductLicenseAlerts()
|
||||
|
||||
assertThat(requestParams, is(
|
||||
[
|
||||
url : config.whitesource.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody,
|
||||
quiet : false,
|
||||
userKey : config.whitesource.userKey,
|
||||
httpProxy : "http://test.sap.com:8080"
|
||||
]
|
||||
))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchProjectLicenseAlerts() {
|
||||
def projectToken = "8547"
|
||||
def config = [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/", userKey: "4711"]]
|
||||
repository.config.putAll(config)
|
||||
|
||||
def requestBody = [
|
||||
requestType: "getProjectAlertsByType",
|
||||
alertType: "REJECTED_BY_POLICY_RESOURCE",
|
||||
projectToken: projectToken
|
||||
]
|
||||
|
||||
def requestParams
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams = p
|
||||
return [ content: "{ \"alerts\" : [] }"]
|
||||
})
|
||||
|
||||
repository.fetchProjectLicenseAlerts(projectToken)
|
||||
|
||||
assertThat(requestParams, is(
|
||||
[
|
||||
url : config.whitesource.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody,
|
||||
quiet : false,
|
||||
userKey : config.whitesource.userKey,
|
||||
httpProxy : "http://test.sap.com:8080"
|
||||
]
|
||||
))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchProjectsMetaInfo() {
|
||||
def config = [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/", userKey: "4711", productToken: '8475', projectNames: ['testProject1', 'testProject2']]]
|
||||
nullScript.env['HTTP_PROXY'] = "http://test.sap.com:8080"
|
||||
repository.config.putAll(config)
|
||||
|
||||
def requestBody = [
|
||||
requestType: "getProductProjectVitals",
|
||||
productToken: config.productToken
|
||||
]
|
||||
|
||||
def requestParams
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams = p
|
||||
return [ content: "{ \"projectVitals\" : [ { \"name\": \"testProject1\"}, { \"name\": \"testProject2\"} ] }"]
|
||||
})
|
||||
|
||||
def result = repository.fetchProjectsMetaInfo()
|
||||
|
||||
assertThat(requestParams, is(
|
||||
[
|
||||
url : config.whitesource.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody,
|
||||
quiet : false,
|
||||
userKey : config.whitesource.userKey,
|
||||
httpProxy : "http://test.sap.com:8080"
|
||||
]
|
||||
))
|
||||
|
||||
assertThat(result, is([[ name: "testProduct1"], [ name: "testProduct2"]]))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchProjectsMetaInfoError() {
|
||||
def config = [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/", userKey: "4711", productName: 'kjdkjkhd', productToken: '8475', projectNames: ['testProject1', 'testProject2']]]
|
||||
repository.config.putAll(config)
|
||||
|
||||
def requestParams
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams = p
|
||||
return [ content: "{ }"]
|
||||
})
|
||||
|
||||
def errorCaught = false
|
||||
try {
|
||||
repository.fetchProjectsMetaInfo()
|
||||
} catch (e) {
|
||||
errorCaught = true
|
||||
assertThat(e, isA(AbortException.class))
|
||||
assertThat(e.getMessage(), is("[WhiteSource] Could not fetch any projects for product '${config.productName}' from backend, response was {}"))
|
||||
}
|
||||
assertThat(errorCaught, is(true))
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void testFetchVulnerabilitiesOnProjects() {
|
||||
def config = [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/", userKey: "4711", productToken: '8475', projectNames: ['testProject1', 'testProject2']]]
|
||||
nullScript.env['HTTP_PROXY'] = "http://test.sap.com:8080"
|
||||
repository.config.putAll(config)
|
||||
|
||||
def requestBody1 = [
|
||||
requestType : "getProjectAlertsByType",
|
||||
alertType : "SECURITY_VULNERABILITY",
|
||||
projectToken: "1234"
|
||||
]
|
||||
|
||||
def requestBody2 = [
|
||||
requestType : "getProjectAlertsByType",
|
||||
alertType : "SECURITY_VULNERABILITY",
|
||||
projectToken: "2345"
|
||||
]
|
||||
|
||||
def requestParams = []
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams.add(p)
|
||||
return [ content: "{ \"alerts\" : [ { \"vulnerability\" : { \"cvss3_score\" : \"7\"} } ] }"]
|
||||
})
|
||||
|
||||
def result = repository.fetchVulnerabilities([ [name: "testProject1", token: "1234"], [name: "testProject2", token: "2345"] ])
|
||||
|
||||
assertThat(requestParams[0], is(
|
||||
[
|
||||
url : config.whitesource.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody1,
|
||||
quiet : false,
|
||||
userKey : config.whitesource.userKey,
|
||||
httpProxy : "http://test.sap.com:8080"
|
||||
]
|
||||
))
|
||||
|
||||
assertThat(requestParams[1], is(
|
||||
[
|
||||
url : config.whitesource.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody2,
|
||||
quiet : false,
|
||||
userKey : config.whitesource.userKey,
|
||||
httpProxy : "http://test.sap.com:8080"
|
||||
]
|
||||
))
|
||||
|
||||
assertThat(result.size(), is(2))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchVulnerabilitiesOnProduct() {
|
||||
def config = [whitesource: [serviceUrl: "http://some.host.whitesource.com/api/", userKey: "4711", productToken: '8475', productName : 'testProduct']]
|
||||
nullScript.env['HTTP_PROXY'] = "http://test.sap.com:8080"
|
||||
repository.config.putAll(config)
|
||||
|
||||
def requestBody = [
|
||||
requestType : "getProductAlertsByType",
|
||||
alertType : "SECURITY_VULNERABILITY",
|
||||
productToken: config.productToken,
|
||||
]
|
||||
|
||||
def requestParams = []
|
||||
helper.registerAllowedMethod('httpRequest', [Map], { p ->
|
||||
requestParams.add(p)
|
||||
return [ content: "{ \"alerts\" : [ { \"vulnerability\" : { \"cvss3_score\" : \"7\"} } ] }"]
|
||||
})
|
||||
|
||||
def result = repository.fetchVulnerabilities([ [name: "testProject1", token: "1234"], [name: "testProject2", token: "2345"] ])
|
||||
|
||||
assertThat(requestParams[0], is(
|
||||
[
|
||||
url : config.whitesource.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: requestBody,
|
||||
quiet : false,
|
||||
userKey : config.whitesource.userKey,
|
||||
httpProxy : "http://test.sap.com:8080"
|
||||
]
|
||||
))
|
||||
|
||||
assertThat(result.size(), is(1))
|
||||
}
|
||||
}
|
@ -2,6 +2,7 @@
|
||||
|
||||
package util
|
||||
|
||||
import com.sap.piper.DescriptorUtils
|
||||
import com.sap.piper.GitUtils
|
||||
import com.sap.piper.JenkinsUtils
|
||||
import com.sap.piper.Utils
|
||||
@ -45,4 +46,11 @@ class BasePiperTestContext {
|
||||
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(mockJenkinsUtils)
|
||||
return mockJenkinsUtils
|
||||
}
|
||||
|
||||
@Bean
|
||||
DescriptorUtils mockDescriptorUtils() {
|
||||
def mockDescriptorUtils = new DescriptorUtils()
|
||||
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(mockDescriptorUtils)
|
||||
return mockDescriptorUtils
|
||||
}
|
||||
}
|
||||
|
@ -14,6 +14,7 @@ import org.jenkinsci.plugins.credentialsbinding.impl.CredentialNotFoundException
|
||||
class JenkinsCredentialsRule implements TestRule {
|
||||
|
||||
Map credentials = [:]
|
||||
Map bindingTypes = [:]
|
||||
|
||||
final BasePipelineTest testInstance
|
||||
|
||||
@ -26,6 +27,11 @@ class JenkinsCredentialsRule implements TestRule {
|
||||
return this
|
||||
}
|
||||
|
||||
JenkinsCredentialsRule withCredentials(String credentialsId, String token) {
|
||||
credentials.put(credentialsId, [token: token])
|
||||
return this
|
||||
}
|
||||
|
||||
@Override
|
||||
Statement apply(Statement base, Description description) {
|
||||
return statement(base)
|
||||
@ -39,27 +45,58 @@ class JenkinsCredentialsRule implements TestRule {
|
||||
void evaluate() throws Throwable {
|
||||
|
||||
testInstance.helper.registerAllowedMethod('usernamePassword', [Map.class],
|
||||
{ m -> if (credentials.keySet().contains(m.credentialsId)) return m;
|
||||
// this is what really happens in case of an unknown credentials id,
|
||||
// checked with reality using credentials plugin 2.1.18.
|
||||
throw new CredentialNotFoundException(
|
||||
"Could not find credentials entry with ID '${m.credentialsId}'")
|
||||
{ m ->
|
||||
if (credentials.keySet().contains(m.credentialsId)) { bindingTypes[m.credentialsId] = 'usernamePassword'; return m }
|
||||
// this is what really happens in case of an unknown credentials id,
|
||||
// checked with reality using credentials plugin 2.1.18.
|
||||
throw new CredentialNotFoundException(
|
||||
"Could not find credentials entry with ID '${m.credentialsId}'")
|
||||
})
|
||||
|
||||
testInstance.helper.registerAllowedMethod('string', [Map.class],
|
||||
{ m ->
|
||||
if (credentials.keySet().contains(m.credentialsId)) { bindingTypes[m.credentialsId] = 'string'; return m }
|
||||
// this is what really happens in case of an unknown credentials id,
|
||||
// checked with reality using credentials plugin 2.1.18.
|
||||
throw new CredentialNotFoundException(
|
||||
"Could not find credentials entry with ID '${m.credentialsId}'")
|
||||
})
|
||||
|
||||
testInstance.helper.registerAllowedMethod('withCredentials', [List, Closure], { config, closure ->
|
||||
|
||||
def credsId = config[0].credentialsId
|
||||
def passwordVariable = config[0].passwordVariable
|
||||
def usernameVariable = config[0].usernameVariable
|
||||
def credentialsBindingType = bindingTypes.get(credsId)
|
||||
def creds = credentials.get(credsId)
|
||||
|
||||
binding.setProperty(usernameVariable, creds?.user)
|
||||
binding.setProperty(passwordVariable, creds?.passwd)
|
||||
def tokenVariable, usernameVariable, passwordVariable, prepare, destruct
|
||||
if(credentialsBindingType == "usernamePassword") {
|
||||
passwordVariable = config[0].passwordVariable
|
||||
usernameVariable = config[0].usernameVariable
|
||||
prepare = {
|
||||
binding.setProperty(usernameVariable, creds?.user)
|
||||
binding.setProperty(passwordVariable, creds?.passwd)
|
||||
}
|
||||
destruct = {
|
||||
binding.setProperty(usernameVariable, null)
|
||||
binding.setProperty(passwordVariable, null)
|
||||
}
|
||||
} else if(credentialsBindingType == "string") {
|
||||
tokenVariable = config[0].variable
|
||||
prepare = {
|
||||
binding.setProperty(tokenVariable, creds?.token)
|
||||
}
|
||||
destruct = {
|
||||
binding.setProperty(tokenVariable, null)
|
||||
}
|
||||
} else {
|
||||
throw new RuntimeException("Unknown binding type")
|
||||
}
|
||||
|
||||
prepare()
|
||||
try {
|
||||
closure()
|
||||
} finally {
|
||||
binding.setProperty(usernameVariable, null)
|
||||
binding.setProperty(passwordVariable, null)
|
||||
destruct()
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -1,9 +1,7 @@
|
||||
package util
|
||||
|
||||
import com.lesfurets.jenkins.unit.InterceptingGCL
|
||||
import com.lesfurets.jenkins.unit.MethodSignature
|
||||
import com.lesfurets.jenkins.unit.PipelineTestHelper
|
||||
import org.codehaus.groovy.control.CompilerConfiguration
|
||||
import org.springframework.test.context.TestContext
|
||||
import org.springframework.test.context.support.AbstractTestExecutionListener
|
||||
import org.springframework.test.context.support.DependencyInjectionTestExecutionListener
|
||||
@ -14,15 +12,6 @@ class LibraryLoadingTestExecutionListener extends AbstractTestExecutionListener
|
||||
|
||||
static PipelineTestHelper singletonInstance
|
||||
|
||||
static CompilerConfiguration configuration
|
||||
|
||||
static GroovyClassLoader cLoader
|
||||
|
||||
static {
|
||||
configuration = new CompilerConfiguration()
|
||||
cLoader = new InterceptingGCL(singletonInstance, LibraryLoadingTestExecutionListener.class.getClassLoader(), configuration)
|
||||
}
|
||||
|
||||
static List TRACKED_ON_CLASS = []
|
||||
static List TRACKED_ON_METHODS = []
|
||||
|
||||
|
4
test/resources/DescriptorUtils/dlang/dub.json
Normal file
4
test/resources/DescriptorUtils/dlang/dub.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"name": "hdi-deploy",
|
||||
"version": "2.3.0"
|
||||
}
|
1
test/resources/DescriptorUtils/go/VERSION
Normal file
1
test/resources/DescriptorUtils/go/VERSION
Normal file
@ -0,0 +1 @@
|
||||
1.2.3
|
7
test/resources/DescriptorUtils/go/glide.yaml
Normal file
7
test/resources/DescriptorUtils/go/glide.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
package: github.wdf.sap.corp/TestOrg/GolangTest
|
||||
import:
|
||||
- package: github.com/julienschmidt/httprouter
|
||||
version: ^1.1.0
|
||||
- package: github.com/tebeka/go2xunit
|
||||
version: ^1.4.4
|
||||
- package: github.wdf.sap.corp/dtxmake-acceptance/golang-sample
|
4
test/resources/DescriptorUtils/npm/package.json
Normal file
4
test/resources/DescriptorUtils/npm/package.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"name": "@sap/hdi-deploy",
|
||||
"version": "2.3.0"
|
||||
}
|
4
test/resources/DescriptorUtils/npm/package2.json
Normal file
4
test/resources/DescriptorUtils/npm/package2.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"name": "some-test",
|
||||
"version": "1.2.3"
|
||||
}
|
4
test/resources/DescriptorUtils/npm/package3.json
Normal file
4
test/resources/DescriptorUtils/npm/package3.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"name": "@someerror",
|
||||
"version": "1.2.3"
|
||||
}
|
22
test/resources/DescriptorUtils/pip/setup.py
Normal file
22
test/resources/DescriptorUtils/pip/setup.py
Normal file
@ -0,0 +1,22 @@
|
||||
from setuptools import setup, find_packages
|
||||
from codecs import open
|
||||
from os import path
|
||||
|
||||
def get_version():
|
||||
with open('version.txt') as ver_file:
|
||||
version_str = ver_file.readline().rstrip()
|
||||
return version_str
|
||||
|
||||
|
||||
def get_install_requires():
|
||||
with open('requirements.txt') as reqs_file:
|
||||
reqs = [line.rstrip() for line in reqs_file.readlines()]
|
||||
return reqs
|
||||
|
||||
setup(name="some-test",
|
||||
version=get_version(),
|
||||
python_requires='>=3',
|
||||
packages=find_packages(exclude=['contrib', 'docs', 'tests*', 'coverage', 'bin']),
|
||||
description="test",
|
||||
install_requires=get_install_requires(),
|
||||
)
|
1
test/resources/DescriptorUtils/pip/version.txt
Normal file
1
test/resources/DescriptorUtils/pip/version.txt
Normal file
@ -0,0 +1 @@
|
||||
1.0.0-SNAPSHOT
|
6
test/resources/DescriptorUtils/sbt/sbtDescriptor.json
Normal file
6
test/resources/DescriptorUtils/sbt/sbtDescriptor.json
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"group": "sap",
|
||||
"artifactId": "hdi-deploy",
|
||||
"version": "2.3.0",
|
||||
"packaging": "test"
|
||||
}
|
53
test/resources/utilsTest/build.sbt
Normal file
53
test/resources/utilsTest/build.sbt
Normal file
@ -0,0 +1,53 @@
|
||||
import scala.io.Source
|
||||
|
||||
val buildDescriptorMap = JSON
|
||||
.parseFull(Source.fromFile("sbtDescriptor.json").mkString)
|
||||
.get
|
||||
.asInstanceOf[Map[String, String]]
|
||||
|
||||
lazy val buildSettings = Seq(
|
||||
scalaVersion := "2.11.11",
|
||||
)
|
||||
|
||||
lazy val root = (project in file("."))
|
||||
.settings(buildSettings)
|
||||
|
||||
libraryDependencies ++= Seq(
|
||||
jdbc,
|
||||
"org.scalatestplus.play" % "scalatestplus-play_2.11" % "2.0.0" % Test
|
||||
)
|
||||
|
||||
dependencyOverrides += "com.fasterxml.jackson.core" % "jackson-databind" % "2.8.11.2"
|
||||
|
||||
resolvers ++= Seq(
|
||||
Resolver.url("Typesafe Ivy releases",
|
||||
url("https://repo.typesafe.com/typesafe/ivy-releases"))(Resolver.ivyStylePatterns)
|
||||
)
|
||||
|
||||
// Play provides two styles of routers, one expects its actions to be injected, the
|
||||
// other, legacy style, accesses its actions statically.
|
||||
routesGenerator := InjectedRoutesGenerator
|
||||
|
||||
javaOptions in run ++= Seq(
|
||||
"-Xmx12G"
|
||||
)
|
||||
|
||||
javaOptions in Universal ++= Seq(
|
||||
"-Dpidfile.path=/dev/null"
|
||||
)
|
||||
|
||||
javaOptions in Test += "-Dconfig.file=conf/application.test.conf"
|
||||
|
||||
// Do not add API documentation into generated package
|
||||
sources in (Compile, doc) := Seq.empty
|
||||
publishArtifact in (Universal, packageBin) := true
|
||||
|
||||
// scala style
|
||||
scalastyleConfig := baseDirectory.value / "scalastyle-production-config.xml"
|
||||
|
||||
// Whitesource
|
||||
whitesourceProduct in ThisBuild := "PRODUCT VERSION"
|
||||
whitesourceOrgToken in ThisBuild := "org-token"
|
||||
whitesourceAggregateProjectName in ThisBuild := "project-name"
|
||||
whitesourceAggregateProjectToken in ThisBuild := "project-token"
|
||||
whitesourceFailOnError in ThisBuild := false
|
15
test/resources/utilsTest/setup.py
Normal file
15
test/resources/utilsTest/setup.py
Normal file
@ -0,0 +1,15 @@
|
||||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
name='py_connect',
|
||||
version='1.0',
|
||||
|
||||
description='This is a python package to handle some ci-connect payload parts',
|
||||
|
||||
url='https://github.wdf.sap.corp/sap-production/py_connect',
|
||||
|
||||
# Author details
|
||||
author='Some Author',
|
||||
author_email='some.author@sap.com',
|
||||
packages=['payload']
|
||||
)
|
@ -206,7 +206,7 @@ private String generatePodSpec(Map config) {
|
||||
]
|
||||
podSpec.spec.securityContext = getSecurityContext(config)
|
||||
|
||||
return new JsonUtils().getPrettyJsonString(podSpec)
|
||||
return new JsonUtils().groovyObjectToPrettyJsonString(podSpec)
|
||||
}
|
||||
|
||||
|
||||
|
@ -106,9 +106,9 @@ private void writeToInflux(config, script){
|
||||
|
||||
//write results into json file for archiving - also benefitial when no InfluxDB is available yet
|
||||
def jsonUtils = new JsonUtils()
|
||||
writeFile file: 'jenkins_data.json', text: jsonUtils.getPrettyJsonString(config.customData)
|
||||
writeFile file: 'influx_data.json', text: jsonUtils.getPrettyJsonString(config.customDataMap)
|
||||
writeFile file: 'jenkins_data_tags.json', text: jsonUtils.getPrettyJsonString(config.customDataTags)
|
||||
writeFile file: 'influx_data_tags.json', text: jsonUtils.getPrettyJsonString(config.customDataMapTags)
|
||||
writeFile file: 'jenkins_data.json', text: jsonUtils.groovyObjectToPrettyJsonString(config.customData)
|
||||
writeFile file: 'influx_data.json', text: jsonUtils.groovyObjectToPrettyJsonString(config.customDataMap)
|
||||
writeFile file: 'jenkins_data_tags.json', text: jsonUtils.groovyObjectToPrettyJsonString(config.customDataTags)
|
||||
writeFile file: 'influx_data_tags.json', text: jsonUtils.groovyObjectToPrettyJsonString(config.customDataMapTags)
|
||||
archiveArtifacts artifacts: '*data.json', allowEmptyArchive: true
|
||||
}
|
||||
|
584
vars/whitesourceExecuteScan.groovy
Normal file
584
vars/whitesourceExecuteScan.groovy
Normal file
@ -0,0 +1,584 @@
|
||||
import com.sap.piper.DescriptorUtils
|
||||
import com.sap.piper.GenerateDocumentation
|
||||
import com.sap.piper.JsonUtils
|
||||
import com.sap.piper.Utils
|
||||
import com.sap.piper.integration.WhitesourceOrgAdminRepository
|
||||
import com.sap.piper.integration.WhitesourceRepository
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.WhitesourceConfigurationHelper
|
||||
import com.sap.piper.mta.MtaMultiplexer
|
||||
import groovy.text.GStringTemplateEngine
|
||||
import groovy.transform.Field
|
||||
import groovy.text.SimpleTemplateEngine
|
||||
|
||||
import static com.sap.piper.Prerequisites.checkScript
|
||||
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
@Field Set GENERAL_CONFIG_KEYS = [
|
||||
'whitesource',
|
||||
/**
|
||||
* Jenkins credentials ID referring to the organization admin's token.
|
||||
* @parentConfigKey whitesource
|
||||
*/
|
||||
'orgAdminUserTokenCredentialsId',
|
||||
/**
|
||||
* WhiteSource token identifying your organization.
|
||||
* @parentConfigKey whitesource
|
||||
*/
|
||||
'orgToken',
|
||||
/**
|
||||
* Name of the WhiteSource product to be created and used for results aggregation.
|
||||
* @parentConfigKey whitesource
|
||||
*/
|
||||
'productName',
|
||||
/**
|
||||
* Version of the WhiteSource product to be created and used for results aggregation, usually determined automatically.
|
||||
* @parentConfigKey whitesource
|
||||
*/
|
||||
'productVersion',
|
||||
/**
|
||||
* Token of the WhiteSource product to be created and used for results aggregation, usually determined automatically.
|
||||
* @parentConfigKey whitesource
|
||||
*/
|
||||
'productToken',
|
||||
/**
|
||||
* List of WhiteSource projects to be included in the assessment part of the step, usually determined automatically.
|
||||
* @parentConfigKey whitesource
|
||||
*/
|
||||
'projectNames',
|
||||
/**
|
||||
* URL used for downloading the Java Runtime Environment (JRE) required to run the WhiteSource Unified Agent.
|
||||
* @parentConfigKey whitesource
|
||||
*/
|
||||
'jreDownloadUrl',
|
||||
/**
|
||||
* URL to the WhiteSource server API used for communication, defaults to `https://saas.whitesourcesoftware.com/api`.
|
||||
* @parentConfigKey whitesource
|
||||
*/
|
||||
'serviceUrl',
|
||||
/**
|
||||
* Jenkins credentials ID referring to the product admin's token.
|
||||
* @parentConfigKey whitesource
|
||||
*/
|
||||
'userTokenCredentialsId',
|
||||
/**
|
||||
* Type of development stack used to implement the solution.
|
||||
* @possibleValues `maven`, `mta`, `npm`, `pip`, `sbt`
|
||||
*/
|
||||
'scanType',
|
||||
/**
|
||||
* Whether verbose output should be produced.
|
||||
* @possibleValues `true`, `false`
|
||||
*/
|
||||
'verbose'
|
||||
]
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS + [
|
||||
/**
|
||||
* Install command that can be used to populate the default docker image for some scenarios.
|
||||
*/
|
||||
'installCommand',
|
||||
/**
|
||||
* URL used to download the latest version of the WhiteSource Unified Agent.
|
||||
*/
|
||||
'agentDownloadUrl',
|
||||
/**
|
||||
* Locally used name for the Unified Agent jar file after download.
|
||||
*/
|
||||
'agentFileName',
|
||||
/**
|
||||
* Additional parameters passed to the Unified Agent command line.
|
||||
*/
|
||||
'agentParameters',
|
||||
/**
|
||||
* List of build descriptors and therefore modules to exclude from the scan and assessment activities.
|
||||
*/
|
||||
'buildDescriptorExcludeList',
|
||||
/**
|
||||
* Explicit path to the build descriptor file.
|
||||
*/
|
||||
'buildDescriptorFile',
|
||||
/**
|
||||
* Explicit path to the WhiteSource Unified Agent configuration file.
|
||||
*/
|
||||
'configFilePath',
|
||||
/**
|
||||
* Whether to create the related WhiteSource product on the fly based on the supplied pipeline configuration.
|
||||
*/
|
||||
'createProductFromPipeline',
|
||||
/**
|
||||
* The list of email addresses to assign as product admins for newly created WhiteSource products.
|
||||
*/
|
||||
'emailAddressesOfInitialProductAdmins',
|
||||
/**
|
||||
* Docker image to be used for scanning.
|
||||
*/
|
||||
'dockerImage',
|
||||
/**
|
||||
* Docker workspace to be used for scanning.
|
||||
*/
|
||||
'dockerWorkspace',
|
||||
/**
|
||||
* Whether license compliance is considered and reported as part of the assessment.
|
||||
* @possibleValues `true`, `false`
|
||||
*/
|
||||
'licensingVulnerabilities',
|
||||
/**
|
||||
* Limit of parallel jobs being run at once in case of `scanType: 'mta'` based scenarios, defaults to `15`.
|
||||
*/
|
||||
'parallelLimit',
|
||||
/**
|
||||
* Whether assessment is being done at all, defaults to `true`.
|
||||
* @possibleValues `true`, `false`
|
||||
*/
|
||||
'reporting',
|
||||
/**
|
||||
* Whether security compliance is considered and reported as part of the assessment.
|
||||
* @possibleValues `true`, `false`
|
||||
*/
|
||||
'securityVulnerabilities',
|
||||
/**
|
||||
* Limit of tollerable CVSS v3 score upon assessment and in consequence fails the build, defaults to `-1`.
|
||||
* @possibleValues `-1` to switch failing off, any `positive integer between 0 and 10` to fail on issues with the specified limit or above
|
||||
*/
|
||||
'cvssSeverityLimit',
|
||||
/**
|
||||
* List of stashes to be unstashed into the workspace before performing the scan.
|
||||
*/
|
||||
'stashContent',
|
||||
/**
|
||||
* Timeout in seconds until a HTTP call is forcefully terminated.
|
||||
*/
|
||||
'timeout',
|
||||
/**
|
||||
* Name of the file the vulnerability report is written to.
|
||||
*/
|
||||
'vulnerabilityReportFileName',
|
||||
/**
|
||||
* Title of vulnerability report written during the assessment phase.
|
||||
*/
|
||||
'vulnerabilityReportTitle'
|
||||
]
|
||||
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
@Field Map CONFIG_KEY_COMPATIBILITY = [
|
||||
productName : 'whitesourceProductName',
|
||||
productToken : 'whitesourceProductToken',
|
||||
projectNames : 'whitesourceProjectNames',
|
||||
userTokenCredentialsId : 'whitesourceUserTokenCredentialsId',
|
||||
serviceUrl : 'whitesourceServiceUrl',
|
||||
agentDownloadUrl : 'fileAgentDownloadUrl',
|
||||
agentParameters : 'fileAgentParameters',
|
||||
whitesource : [
|
||||
orgAdminUserTokenCredentialsId : 'orgAdminUserTokenCredentialsId',
|
||||
orgToken : 'orgToken',
|
||||
productName : 'productName',
|
||||
productToken : 'productToken',
|
||||
projectNames : 'projectNames',
|
||||
serviceUrl : 'serviceUrl',
|
||||
configFilePath : 'configFilePath',
|
||||
userTokenCredentialsId : 'userTokenCredentialsId',
|
||||
agentDownloadUrl : 'agentDownloadUrl',
|
||||
agentFileName : 'agentFileName',
|
||||
agentParameters : 'agentParameters',
|
||||
buildDescriptorExcludeList : 'buildDescriptorExcludeList',
|
||||
buildDescriptorFile : 'buildDescriptorFile',
|
||||
createProductFromPipeline : 'createProductFromPipeline',
|
||||
emailAddressesOfInitialProductAdmins : 'emailAddressesOfInitialProductAdmins',
|
||||
jreDownloadUrl : 'jreDownloadUrl',
|
||||
licensingVulnerabilities : 'licensingVulnerabilities',
|
||||
parallelLimit : 'parallelLimit',
|
||||
reporting : 'reporting',
|
||||
securityVulnerabilities : 'securityVulnerabilities',
|
||||
cvssSeverityLimit : 'cvssSeverityLimit',
|
||||
timeout : 'timeout',
|
||||
vulnerabilityReportFileName : 'vulnerabilityReportFileName',
|
||||
vulnerabilityReportTitle : 'vulnerabilityReportTitle',
|
||||
installCommand : 'installCommand'
|
||||
]
|
||||
]
|
||||
|
||||
/**
|
||||
* BETA
|
||||
*
|
||||
* With this step [WhiteSource](https://www.whitesourcesoftware.com) security and license compliance scans can be executed and assessed.
|
||||
*
|
||||
* WhiteSource is a Software as a Service offering based on a so called unified agent that locally determines the dependency
|
||||
* tree of a node.js, Java, Python, Ruby, or Scala based solution and sends it to the WhiteSource server for a policy based license compliance
|
||||
* check and additional Free and Open Source Software Publicly Known Vulnerabilities detection.
|
||||
*
|
||||
* !!! note "Docker Images"
|
||||
* The underlying Docker images are public and specific to the solution's programming language(s) and therefore may have to be exchanged
|
||||
* to fit to and support the relevant scenario. The default Python environment used is i.e. Python 3 based.
|
||||
*
|
||||
* !!! warn "Restrictions"
|
||||
* Currently the step does contain hardened scan configurations for `scanType` `'pip'` and `'go'`. Other environments are still being elaborated,
|
||||
* so please thoroughly check your results and do not take them for granted by default.
|
||||
* Also not all environments have been thoroughly tested already therefore you might need to tweak around with the default containers used or
|
||||
* create your own ones to adequately support your scenario. To do so please modify `dockerImage` and `dockerWorkspace` parameters.
|
||||
* The step expects an environment containing the programming language related compiler/interpreter as well as the related build tool. For a list
|
||||
* of the supported build tools per environment please refer to the [WhiteSource Unified Agent Documentation](https://whitesource.atlassian.net/wiki/spaces/WD/pages/33718339/Unified+Agent).
|
||||
*/
|
||||
@GenerateDocumentation
|
||||
void call(Map parameters = [:]) {
|
||||
handlePipelineStepErrors(stepName: STEP_NAME, stepParameters: parameters) {
|
||||
def script = checkScript(this, parameters) ?: this
|
||||
def utils = parameters.juStabUtils ?: new Utils()
|
||||
def descriptorUtils = parameters.descriptorUtilsStub ?: new DescriptorUtils()
|
||||
def statusCode = 1
|
||||
|
||||
// load default & individual configuration
|
||||
Map config = ConfigurationHelper.newInstance(this)
|
||||
.loadStepDefaults(CONFIG_KEY_COMPATIBILITY)
|
||||
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS, CONFIG_KEY_COMPATIBILITY)
|
||||
.mixinStepConfig(script.commonPipelineEnvironment, STEP_CONFIG_KEYS, CONFIG_KEY_COMPATIBILITY)
|
||||
.mixinStageConfig(script.commonPipelineEnvironment, parameters.stageName ?: env.STAGE_NAME, STEP_CONFIG_KEYS, CONFIG_KEY_COMPATIBILITY)
|
||||
.mixin([
|
||||
style : libraryResource('piper-os.css')
|
||||
])
|
||||
.mixin(parameters, PARAMETER_KEYS, CONFIG_KEY_COMPATIBILITY)
|
||||
.dependingOn('scanType').mixin('buildDescriptorFile')
|
||||
.dependingOn('scanType').mixin('dockerImage')
|
||||
.dependingOn('scanType').mixin('dockerWorkspace')
|
||||
.dependingOn('scanType').mixin('stashContent')
|
||||
.dependingOn('scanType').mixin('whitesource/configFilePath')
|
||||
.dependingOn('scanType').mixin('whitesource/installCommand')
|
||||
.withMandatoryProperty('whitesource/serviceUrl')
|
||||
.withMandatoryProperty('whitesource/orgToken')
|
||||
.withMandatoryProperty('whitesource/userTokenCredentialsId')
|
||||
.withMandatoryProperty('whitesource/productName')
|
||||
.use()
|
||||
|
||||
config.whitesource.cvssSeverityLimit = config.whitesource.cvssSeverityLimit == null ?: Integer.valueOf(config.whitesource.cvssSeverityLimit)
|
||||
config.stashContent = utils.unstashAll(config.stashContent)
|
||||
config.whitesource['projectNames'] = (config.whitesource['projectNames'] instanceof List) ? config.whitesource['projectNames'] : config.whitesource['projectNames']?.tokenize(',')
|
||||
parameters.whitesource = parameters.whitesource ?: [:]
|
||||
parameters.whitesource['projectNames'] = config.whitesource['projectNames']
|
||||
|
||||
script.commonPipelineEnvironment.setInfluxStepData('whitesource', false)
|
||||
|
||||
utils.pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scanType',
|
||||
stepParam1: config.scanType
|
||||
], config)
|
||||
|
||||
echo "Parameters: scanType: ${config.scanType}"
|
||||
|
||||
def whitesourceRepository = parameters.whitesourceRepositoryStub ?: new WhitesourceRepository(this, config)
|
||||
def whitesourceOrgAdminRepository = parameters.whitesourceOrgAdminRepositoryStub ?: new WhitesourceOrgAdminRepository(this, config)
|
||||
|
||||
if(config.whitesource.orgAdminUserTokenCredentialsId) {
|
||||
statusCode = triggerWhitesourceScanWithOrgAdminUserKey(script, config, utils, descriptorUtils, parameters, whitesourceRepository, whitesourceOrgAdminRepository)
|
||||
} else {
|
||||
statusCode = triggerWhitesourceScanWithUserKey(script, config, utils, descriptorUtils, parameters, whitesourceRepository, whitesourceOrgAdminRepository)
|
||||
}
|
||||
checkStatus(statusCode, config)
|
||||
|
||||
script.commonPipelineEnvironment.setInfluxStepData('whitesource', true)
|
||||
}
|
||||
}
|
||||
|
||||
private def triggerWhitesourceScanWithOrgAdminUserKey(script, config, utils, descriptorUtils, parameters, repository, orgAdminRepository) {
|
||||
withCredentials ([script.string(
|
||||
credentialsId: config.whitesource.orgAdminUserTokenCredentialsId,
|
||||
variable: 'orgAdminUserKey'
|
||||
)]) {
|
||||
config.whitesource.orgAdminUserKey = orgAdminUserKey
|
||||
triggerWhitesourceScanWithUserKey(script, config, utils, descriptorUtils, parameters, repository, orgAdminRepository)
|
||||
}
|
||||
}
|
||||
|
||||
private def triggerWhitesourceScanWithUserKey(script, config, utils, descriptorUtils, parameters, repository, orgAdminRepository) {
|
||||
withCredentials ([string(
|
||||
credentialsId: config.whitesource.userTokenCredentialsId,
|
||||
variable: 'userKey'
|
||||
)]) {
|
||||
config.whitesource.userKey = userKey
|
||||
def statusCode = 1
|
||||
echo "Triggering Whitesource scan on product '${config.whitesource.productName}'${config.whitesource.productToken ? ' with token \'' + config.whitesource.productToken + '\'' : ''} using product admin credentials with ID '${config.whitesource.userTokenCredentialsId}'${config.whitesource.orgAdminUserTokenCredentialsId ? ' and organization admin credentials with ID \'' + config.whitesource.orgAdminUserTokenCredentialsId + '\'' : ''}"
|
||||
|
||||
if (!config.whitesource.productToken) {
|
||||
def metaInfo = orgAdminRepository.fetchProductMetaInfo()
|
||||
def key = "token"
|
||||
if((null == metaInfo || !metaInfo[key]) && config.whitesource.createProductFromPipeline) {
|
||||
metaInfo = orgAdminRepository.createProduct()
|
||||
key = "productToken"
|
||||
} else if(null == metaInfo || !metaInfo[key]) {
|
||||
error "[WhiteSource] Could not fetch/find requested product '${config.whitesource.productName}' and automatic creation has been disabled"
|
||||
}
|
||||
echo "Meta Info: ${metaInfo}"
|
||||
config.whitesource.productToken = metaInfo[key]
|
||||
}
|
||||
|
||||
switch (config.scanType) {
|
||||
case 'mta':
|
||||
def scanJobs = [:]
|
||||
def mtaParameters = [:] + parameters + [reporting: false]
|
||||
// harmonize buildDescriptorExcludeList
|
||||
config.buildDescriptorExcludeList = config.buildDescriptorExcludeList instanceof List ? config.buildDescriptorExcludeList : config.buildDescriptorExcludeList?.replaceAll(', ', ',').replaceAll(' ,', ',').tokenize(',')
|
||||
// create job for each pom.xml with scanType: 'maven'
|
||||
scanJobs.putAll(MtaMultiplexer.createJobs(
|
||||
this, mtaParameters, config.buildDescriptorExcludeList, 'Whitesource', 'pom.xml', 'maven'
|
||||
) { options -> return whitesourceExecuteScan(options) })
|
||||
// create job for each pom.xml with scanType: 'maven'
|
||||
scanJobs.putAll(MtaMultiplexer.createJobs(
|
||||
this, mtaParameters, config.buildDescriptorExcludeList, 'Whitesource', 'package.json', 'npm'
|
||||
) { options -> whitesourceExecuteScan(options) })
|
||||
// create job for each setup.py with scanType: 'pip'
|
||||
scanJobs.putAll(MtaMultiplexer.createJobs(
|
||||
this, mtaParameters, config.buildDescriptorExcludeList, 'Whitesource', 'setup.py', 'pip'
|
||||
) { options -> whitesourceExecuteScan(options) })
|
||||
// execute scan jobs
|
||||
if (config.whitesource.parallelLimit > 0 && config.whitesource.parallelLimit < scanJobs.keySet().size()) {
|
||||
// block wise
|
||||
def scanJobsAll = scanJobs
|
||||
scanJobs = [failFast: false]
|
||||
for (int i = 1; i <= scanJobsAll.keySet().size(); i++) {
|
||||
def index = i - 1
|
||||
def key = scanJobsAll.keySet()[index]
|
||||
scanJobs[key] = scanJobsAll[key]
|
||||
if (i % config.whitesource.parallelLimit == 0 || i == scanJobsAll.keySet().size()) {
|
||||
parallel scanJobs
|
||||
scanJobs = [failFast: false]
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// in parallel
|
||||
scanJobs += [failFast: false]
|
||||
parallel scanJobs
|
||||
}
|
||||
statusCode = 0
|
||||
break
|
||||
default:
|
||||
def path = config.buildDescriptorFile.substring(0, config.buildDescriptorFile.lastIndexOf('/') + 1)
|
||||
resolveProjectIdentifiers(script, descriptorUtils, config)
|
||||
|
||||
def projectName = "${config.whitesource.projectName}${config.whitesource.productVersion?' - ':''}${config.whitesource.productVersion?:''}".toString()
|
||||
if(!config.whitesource['projectNames'].contains(projectName))
|
||||
config.whitesource['projectNames'].add(projectName)
|
||||
|
||||
WhitesourceConfigurationHelper.extendUAConfigurationFile(script, utils, config, path)
|
||||
dockerExecute(script: script, dockerImage: config.dockerImage, dockerWorkspace: config.dockerWorkspace, stashContent: config.stashContent) {
|
||||
if (config.whitesource.agentDownloadUrl) {
|
||||
def agentDownloadUrl = new GStringTemplateEngine().createTemplate(config.whitesource.agentDownloadUrl).make([config: config]).toString()
|
||||
//if agentDownloadUrl empty, rely on dockerImage to contain unifiedAgent correctly set up and available
|
||||
sh "curl ${script.env.HTTP_PROXY ? '--proxy ' + script.env.HTTP_PROXY + ' ' : ''}--location --output ${config.whitesource.agentFileName} ${agentDownloadUrl}".toString()
|
||||
}
|
||||
|
||||
def javaCmd = 'java'
|
||||
if (config.whitesource.jreDownloadUrl) {
|
||||
//if jreDownloadUrl empty, rely on dockerImage to contain java correctly set up and available on the path
|
||||
sh "curl ${script.env.HTTP_PROXY ? '--proxy ' + script.env.HTTP_PROXY + ' ' : ''}--location --output jvm.tar.gz ${config.whitesource.jreDownloadUrl} && tar --strip-components=1 -xzf jvm.tar.gz".toString()
|
||||
javaCmd = './bin/java'
|
||||
}
|
||||
|
||||
if(config.whitesource.installCommand)
|
||||
sh new GStringTemplateEngine().createTemplate(config.whitesource.installCommand).make([config: config]).toString()
|
||||
|
||||
def options = ["-jar ${config.whitesource.agentFileName} -c \'${config.whitesource.configFilePath}\'"]
|
||||
if (config.whitesource.orgToken) options.push("-apiKey '${config.whitesource.orgToken}'")
|
||||
if (config.whitesource.userKey) options.push("-userKey '${config.whitesource.userKey}'")
|
||||
if (config.whitesource.productName) options.push("-product '${config.whitesource.productName}'")
|
||||
|
||||
statusCode = sh(script: "${javaCmd} ${options.join(' ')} ${config.whitesource.agentParameters}", returnStatus: true)
|
||||
|
||||
if (config.whitesource.agentDownloadUrl) {
|
||||
sh "rm -f ${config.whitesource.agentFileName}"
|
||||
}
|
||||
|
||||
if (config.whitesource.jreDownloadUrl) {
|
||||
sh "rm -rf ./bin ./conf ./legal ./lib ./man"
|
||||
sh "rm -f jvm.tar.gz"
|
||||
}
|
||||
|
||||
// archive whitesource result files for UA
|
||||
archiveArtifacts artifacts: "whitesource/*.*", allowEmptyArchive: true
|
||||
|
||||
// archive whitesource debug files, if available
|
||||
archiveArtifacts artifacts: "**/ws-l*", allowEmptyArchive: true
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if (config.reporting) {
|
||||
analyseWhitesourceResults(config, repository)
|
||||
}
|
||||
|
||||
return statusCode
|
||||
}
|
||||
}
|
||||
|
||||
private resolveProjectIdentifiers(script, descriptorUtils, config) {
|
||||
if (!config.whitesource.projectName || !config.whitesource.productVersion) {
|
||||
def gav
|
||||
switch (config.scanType) {
|
||||
case 'npm':
|
||||
gav = descriptorUtils.getNpmGAV(config.buildDescriptorFile)
|
||||
break
|
||||
case 'sbt':
|
||||
gav = descriptorUtils.getSbtGAV(config.buildDescriptorFile)
|
||||
break
|
||||
case 'pip':
|
||||
gav = descriptorUtils.getPipGAV(config.buildDescriptorFile)
|
||||
break
|
||||
case 'golang':
|
||||
gav = descriptorUtils.getGoGAV(config.buildDescriptorFile, new URI(script.commonPipelineEnvironment.getGitHttpsUrl()))
|
||||
break
|
||||
case 'dlang':
|
||||
break
|
||||
case 'maven':
|
||||
gav = descriptorUtils.getMavenGAV(config.buildDescriptorFile)
|
||||
break
|
||||
}
|
||||
|
||||
if(!config.whitesource.projectName)
|
||||
config.whitesource.projectName = "${gav.group?:''}${gav.group?'.':''}${gav.artifact}"
|
||||
|
||||
def versionFragments = gav.version?.tokenize('.')
|
||||
def version = versionFragments.size() > 0 ? versionFragments.head() : null
|
||||
if(version && !config.whitesource.productVersion)
|
||||
config.whitesource.productVersion = version
|
||||
}
|
||||
}
|
||||
|
||||
void analyseWhitesourceResults(Map config, WhitesourceRepository repository) {
|
||||
def pdfName = "whitesource-riskReport.pdf"
|
||||
repository.fetchReportForProduct(pdfName)
|
||||
archiveArtifacts artifacts: pdfName
|
||||
echo "A summary of the Whitesource findings was stored as artifact under the name ${pdfName}"
|
||||
|
||||
if(config.whitesource.licensingVulnerabilities) {
|
||||
def violationCount = fetchViolationCount(config, repository)
|
||||
checkViolationStatus(violationCount)
|
||||
}
|
||||
|
||||
if (config.whitesource.securityVulnerabilities)
|
||||
config.whitesource.severeVulnerabilities = checkSecurityViolations(config, repository)
|
||||
}
|
||||
|
||||
int fetchViolationCount(Map config, WhitesourceRepository repository) {
|
||||
int violationCount = 0
|
||||
if (config.whitesource?.projectNames) {
|
||||
def projectsMeta = repository.fetchProjectsMetaInfo()
|
||||
for (int i = 0; i < projectsMeta.size(); i++) {
|
||||
def project = projectsMeta[i]
|
||||
def responseAlertsProject = repository.fetchProjectLicenseAlerts(project.token)
|
||||
violationCount += responseAlertsProject.alerts.size()
|
||||
}
|
||||
} else {
|
||||
def responseAlerts = repository.fetchProductLicenseAlerts()
|
||||
violationCount += responseAlerts.alerts.size()
|
||||
}
|
||||
return violationCount
|
||||
}
|
||||
|
||||
void checkViolationStatus(int violationCount) {
|
||||
if (violationCount == 0) {
|
||||
echo "[${STEP_NAME}] No policy violations found"
|
||||
} else {
|
||||
error "[${STEP_NAME}] Whitesource found ${violationCount} policy violations for your product"
|
||||
}
|
||||
}
|
||||
|
||||
int checkSecurityViolations(Map config, WhitesourceRepository repository) {
|
||||
def projectsMetaInformation = repository.fetchProjectsMetaInfo()
|
||||
def vulnerabilities = repository.fetchVulnerabilities(projectsMetaInformation)
|
||||
def severeVulnerabilities = 0
|
||||
vulnerabilities.each {
|
||||
item ->
|
||||
if ((item.vulnerability.score >= config.whitesource.cvssSeverityLimit || item.vulnerability.cvss3_score >= config.whitesource.cvssSeverityLimit) && config.whitesource.cvssSeverityLimit >= 0)
|
||||
severeVulnerabilities++
|
||||
}
|
||||
|
||||
writeFile(file: "${config.vulnerabilityReportFileName}.json", text: new JsonUtils().groovyObjectToPrettyJsonString(vulnerabilities))
|
||||
writeFile(file: "${config.vulnerabilityReportFileName}.html", text: getReportHtml(config, vulnerabilities, severeVulnerabilities))
|
||||
archiveArtifacts(artifacts: "${config.vulnerabilityReportFileName}.*")
|
||||
|
||||
if (vulnerabilities.size() - severeVulnerabilities > 0)
|
||||
echo "[${STEP_NAME}] WARNING: ${vulnerabilities.size() - severeVulnerabilities} Open Source Software Security vulnerabilities with CVSS score below ${config.whitesource.cvssSeverityLimit} detected."
|
||||
if (vulnerabilities.size() == 0)
|
||||
echo "[${STEP_NAME}] No Open Source Software Security vulnerabilities detected."
|
||||
|
||||
return severeVulnerabilities
|
||||
}
|
||||
|
||||
// ExitCodes: https://whitesource.atlassian.net/wiki/spaces/WD/pages/34209870/NPM+Plugin#NPMPlugin-ExitCode
|
||||
void checkStatus(int statusCode, config) {
|
||||
def errorMessage = ""
|
||||
if(config.whitesource.securityVulnerabilities && config.whitesource.severeVulnerabilities > 0)
|
||||
errorMessage += "${config.whitesource.severeVulnerabilities} Open Source Software Security vulnerabilities with CVSS score greater or equal ${config.whitesource.cvssSeverityLimit} detected. - "
|
||||
if (config.whitesource.licensingVulnerabilities)
|
||||
switch (statusCode) {
|
||||
case 0:
|
||||
break
|
||||
case 255:
|
||||
errorMessage += "The scan resulted in an error"
|
||||
break
|
||||
case 254:
|
||||
errorMessage += "Whitesource found one or multiple policy violations"
|
||||
break
|
||||
case 253:
|
||||
errorMessage += "The local scan client failed to execute the scan"
|
||||
break
|
||||
case 252:
|
||||
errorMessage += "There was a failure in the connection to the WhiteSource servers"
|
||||
break
|
||||
case 251:
|
||||
errorMessage += "The server failed to analyze the scan"
|
||||
break
|
||||
case 250:
|
||||
errorMessage += "Pre-step failure"
|
||||
break
|
||||
default:
|
||||
errorMessage += "Whitesource scan failed with unknown error code '${statusCode}'"
|
||||
}
|
||||
|
||||
if (errorMessage)
|
||||
error "[${STEP_NAME}] " + errorMessage
|
||||
}
|
||||
|
||||
def getReportHtml(config, vulnerabilityList, numSevereVulns) {
|
||||
def now = new Date().format('MMM dd, yyyy - HH:mm:ss z', TimeZone.getTimeZone('UTC'))
|
||||
def vulnerabilityTable = ''
|
||||
if (vulnerabilityList.size() == 0) {
|
||||
vulnerabilityTable += '''
|
||||
<tr>
|
||||
<td colspan=12> No publicly known vulnerabilities detected </td>
|
||||
</tr>'''
|
||||
} else {
|
||||
for (int i = 0; i < vulnerabilityList.size(); i++) {
|
||||
def item = vulnerabilityList[i]
|
||||
def score = item.vulnerability.cvss3_score > 0 ? item.vulnerability.cvss3_score : item.vulnerability.score
|
||||
def topFix = item.vulnerability.topFix ? "${item.vulnerability.topFix?.message}<br>${item.vulnerability.topFix?.fixResolution}<br><a href=\"${item.vulnerability.topFix?.url}\">${item.vulnerability.topFix?.url}</a>}" : ''
|
||||
vulnerabilityTable += """
|
||||
<tr>
|
||||
<td>${i + 1}</td>
|
||||
<td>${item.date}</td>
|
||||
<td><a href=\"${item.vulnerability.url}\">${item.vulnerability.name}</a></td>
|
||||
<td class=\"${score < config.cvssSeverityLimit ? 'warn' : 'notok'}\">${score}</td>
|
||||
<td>${item.vulnerability.cvss3_score > 0 ? 'v3' : 'v2'}</td>
|
||||
<td>${item.project}</td>
|
||||
<td>${item.library.filename}</td>
|
||||
<td>${item.library.groupId}</td>
|
||||
<td>${item.library.artifactId}</td>
|
||||
<td>${item.library.version}</td>
|
||||
<td>${item.vulnerability.description}</td>
|
||||
<td>${topFix}</td>
|
||||
</tr>"""
|
||||
}
|
||||
}
|
||||
|
||||
return SimpleTemplateEngine.newInstance().createTemplate(libraryResource('com.sap.piper/templates/whitesourceVulnerabilities.html')).make(
|
||||
[
|
||||
now : now,
|
||||
reportTitle : config.whitesource.vulnerabilityReportTitle,
|
||||
style : config.style,
|
||||
cvssSeverityLimit : config.whitesource.cvssSeverityLimit,
|
||||
totalSevereVulnerabilities : numSevereVulns,
|
||||
totalVulnerabilities : vulnerabilityList.size(),
|
||||
vulnerabilityTable : vulnerabilityTable,
|
||||
whitesourceProductName : config.whitesource.productName,
|
||||
whitesourceProjectNames : config.whitesource.projectNames
|
||||
]).toString()
|
||||
}
|
Loading…
Reference in New Issue
Block a user