1
0
mirror of https://github.com/SAP/jenkins-library.git synced 2025-01-18 05:18:24 +02:00

Merge branch 'master' of github.com:SAP/jenkins-library into revdoc

This commit is contained in:
Roland Stengel 2019-06-12 11:47:37 +02:00
commit 2313a83dd6
115 changed files with 1232 additions and 99 deletions

View File

@ -1,8 +1,10 @@
import groovy.io.FileType
import groovy.json.JsonOutput
import groovy.json.JsonSlurper
import org.yaml.snakeyaml.Yaml
import org.codehaus.groovy.control.CompilerConfiguration
import com.sap.piper.GenerateDocumentation
import com.sap.piper.GenerateStageDocumentation
import java.util.regex.Matcher
import groovy.text.StreamingTemplateEngine
@ -13,6 +15,42 @@ import com.sap.piper.MapUtils
//
class TemplateHelper {
static createDependencyList(Set deps) {
def t = ''
t += 'The step depends on the following Jenkins plugins\n\n'
def filteredDeps = deps.findAll { dep -> dep != 'UNIDENTIFIED' }
if(filteredDeps.contains('kubernetes')) {
// The docker plugin is not detected by the tests since it is not
// handled via step call, but it is added to the environment.
// Hovever kubernetes plugin and docker plugin are closely related,
// hence adding docker if kubernetes is present.
filteredDeps.add('docker')
}
if(filteredDeps.isEmpty()) {
t += '* <none>\n'
} else {
filteredDeps
.sort()
.each { dep -> t += "* [${dep}](https://plugins.jenkins.io/${dep})\n" }
}
if(filteredDeps.contains('kubernetes')) {
t += "\nThe kubernetes plugin is only used if running in a kubernetes environment."
}
t += '''|
|Transitive dependencies are omitted.
|
|The list might be incomplete.
|
|Consider using the [ppiper/jenkins-master](https://cloud.docker.com/u/ppiper/repository/docker/ppiper/jenkins-master)
|docker image. This images comes with preinstalled plugins.
|'''.stripMargin()
return t
}
static createParametersTable(Map parameters) {
def t = ''
@ -23,8 +61,7 @@ class TemplateHelper {
def props = parameters.get(it)
def defaultValue = isComplexDefault(props.defaultValue) ? renderComplexDefaultValue(props.defaultValue) :
props.defaultValue != null ? "`${props.defaultValue}`" : ''
def defaultValue = isComplexDefault(props.defaultValue) ? renderComplexDefaultValue(props.defaultValue) : renderSimpleDefaultValue(props.defaultValue)
t += "| `${it}` | ${props.mandatory ?: props.required ? 'yes' : 'no'} | ${defaultValue} | ${props.value ?: ''} |\n"
}
@ -49,6 +86,11 @@ class TemplateHelper {
.join('<br />')
}
private static renderSimpleDefaultValue(def _default) {
if (_default == null) return ''
return "`${_default}`"
}
static createParameterDescriptionSection(Map parameters) {
def t = ''
parameters.keySet().toSorted().each {
@ -79,6 +121,98 @@ class TemplateHelper {
t.trim()
}
static createStageContentSection(Map stageDescriptions) {
def t = 'This stage comprises following steps which are activated depending on your use-case/configuration:\n\n'
t += '| step | step description |\n'
t += '| ---- | ---------------- |\n'
stageDescriptions.each {step, description ->
t += "| [${step}](../steps/${step}.md) | ${description.trim()} |\n"
}
return t
}
static createStageActivationSection() {
def t = '''This stage will be active if any one of the following conditions is met:
* Stage configuration in [config.yml file](../configuration.md) contains entries for this stage.
* Any of the conditions are met which are explained in the section [Step Activation](#step-activation).
'''
return t.trim()
}
static createStepActivationSection(Map configConditions) {
if (!configConditions) return 'For this stage no conditions are assigned to steps.'
def t = 'Certain steps will be activated automatically depending on following conditions:\n\n'
t += '| step | config key | config value | file pattern |\n'
t += '| ---- | ---------- | ------------ | ------------ |\n'
configConditions?.each {stepName, conditions ->
t += "| ${stepName} "
t += "| ${renderValueList(conditions?.configKeys)} "
t += "| ${renderValueList(mapToValueList(conditions?.config))} "
List filePatterns = []
if (conditions?.filePattern) filePatterns.add(conditions?.filePattern)
if (conditions?.filePatternFromConfig) filePatterns.add(conditions?.filePatternFromConfig)
t += "| ${renderValueList(filePatterns)} |\n"
}
t += '''
!!! info "Step condition details"
There are currently several conditions which can be checked.<br /> This is done in the [Init stage](init.md) of the pipeline shortly after checkout of the source code repository.<br/ >
**Important: It will be sufficient that any one condition per step is met.**
* `config key`: Checks if a defined configuration parameter is set.
* `config value`: Checks if a configuration parameter has a defined value.
* `file pattern`: Checks if files according a defined pattern exist in the project. Either the pattern is speficified direcly or it is retrieved from a configuration parameter.
!!! note "Overruling step activation conditions"
It is possible to overrule the automatically detected step activation status.<br />
Just add to your stage configuration `<stepName>: false`, for example `deployToKubernetes: false`.
For details about the configuration options, please see [Configuration of Piper](../configuration.md).
'''
return t
}
private static renderValueList(List valueList) {
if (!valueList) return ''
if (valueList.size() > 1) {
List quotedList = []
valueList.each {listItem ->
quotedList.add("-`${listItem}`")
}
return quotedList.join('<br />')
} else {
return "`${valueList[0]}`"
}
}
private static mapToValueList(Map map) {
List valueList = []
map?.each {key, value ->
if (value instanceof List) {
value.each {listItem ->
valueList.add("${key}: ${listItem}")
}
} else {
valueList.add("${key}: ${value}")
}
}
return valueList
}
static createStageConfigurationSection() {
return 'The stage parameters need to be defined in the section `stages` of [config.yml file](../configuration.md).'
}
}
//
@ -119,6 +253,11 @@ class Helper {
prepareDefaultValuesStep
}
static Map getYamlResource(String resource) {
def ymlContent = new File(projectRoot,"resources/${resource}").text
return new Yaml().load(ymlContent)
}
static getDummyScript(def prepareDefaultValuesStep, def stepName, Map prepareDefaultValuesStepParams) {
def _prepareDefaultValuesStep = prepareDefaultValuesStep
@ -347,6 +486,15 @@ class Helper {
return params
}
static getStageStepKeys(def script) {
try {
return script.STAGE_STEP_KEYS ?: []
} catch (groovy.lang.MissingPropertyException ex) {
System.err << "[INFO] STAGE_STEP_KEYS not set for: ${script.STEP_NAME}.\n"
return []
}
}
static getRequiredParameters(File f) {
def params = [] as Set
f.eachLine {
@ -387,7 +535,7 @@ class Helper {
def scriptName = (it =~ /vars\${File.separator}(.*)\.groovy/)[0][1]
def stepScript = gse.createScript("${scriptName}.groovy", new Binding())
for (def method in stepScript.getClass().getMethods()) {
if(method.getName() == 'call' && method.getAnnotation(GenerateDocumentation) != null) {
if(method.getName() == 'call' && (method.getAnnotation(GenerateDocumentation) != null || method.getAnnotation(GenerateStageDocumentation) != null)) {
docuRelevantSteps << scriptName
break
}
@ -396,6 +544,26 @@ class Helper {
}
docuRelevantSteps
}
static resolveDocuRelevantStages(GroovyScriptEngine gse, File stepsDir) {
def docuRelevantStages = [:]
stepsDir.traverse(type: FileType.FILES, maxDepth: 0) {
if(it.getName().endsWith('.groovy')) {
def scriptName = (it =~ /vars\${File.separator}(.*)\.groovy/)[0][1]
def stepScript = gse.createScript("${scriptName}.groovy", new Binding())
for (def method in stepScript.getClass().getMethods()) {
GenerateStageDocumentation stageDocsAnnotation = method.getAnnotation(GenerateStageDocumentation)
if(method.getName() == 'call' && stageDocsAnnotation != null) {
docuRelevantStages[scriptName] = stageDocsAnnotation.defaultStageName()
break
}
}
}
}
docuRelevantStages
}
}
roots = [
@ -405,7 +573,8 @@ roots = [
stepsDir = null
stepsDocuDir = null
String customDefaults = null
stagesDocuDir = null
customDefaults = null
steps = []
@ -421,7 +590,9 @@ def cli = new CliBuilder(
cli.with {
s longOpt: 'stepsDir', args: 1, argName: 'dir', 'The directory containing the steps. Defaults to \'vars\'.'
d longOpt: 'docuDir', args: 1, argName: 'dir', 'The directory containing the docu stubs. Defaults to \'documentation/docs/steps\'.'
p longOpt: 'docuDirStages', args: 1, argName: 'dir', 'The directory containing the docu stubs for pipeline stages. Defaults to \'documentation/docs/stages\'.'
c longOpt: 'customDefaults', args: 1, argName: 'file', 'Additional custom default configuration'
i longOpt: 'stageInitFile', args: 1, argName: 'file', 'The file containing initialization data for step piperInitRunStageConfiguration'
h longOpt: 'help', 'Prints this help.'
}
@ -433,17 +604,30 @@ if(options.h) {
return
}
if(options.s)
if(options.s){
System.err << "[INFO] Using custom step root: ${options.s}.\n"
stepsDir = new File(Helper.projectRoot, options.s)
}
stepsDir = stepsDir ?: new File(Helper.projectRoot, "vars")
if(options.d)
if(options.d) {
System.err << "[INFO] Using custom doc dir for steps: ${options.d}.\n"
stepsDocuDir = new File(Helper.projectRoot, options.d)
}
stepsDocuDir = stepsDocuDir ?: new File(Helper.projectRoot, "documentation/docs/steps")
if(options.p) {
System.err << "[INFO] Using custom doc dir for stages: ${options.p}.\n"
stagesDocuDir = new File(Helper.projectRoot, options.p)
}
stagesDocuDir = stagesDocuDir ?: new File(Helper.projectRoot, "documentation/docs/stages")
if(options.c) {
System.err << "[INFO] Using custom defaults: ${options.c}.\n"
customDefaults = options.c
}
@ -478,6 +662,16 @@ if( ! steps) {
System.err << "[INFO] Generating docu only for step ${steps.size > 1 ? 's' : ''} ${steps}.\n"
}
// find all the stages that we have to document
Map stages = Helper.resolveDocuRelevantStages(gse, stepsDir)
// retrieve default conditions for steps
//ToDo: allow passing config file name via parameter
Map stageConfig
if (options.s) {
stageConfig = Helper.getYamlResource(options.s)
}
def prepareDefaultValuesStep = Helper.getPrepareDefaultValuesStep(gse)
boolean exceptionCaught = false
@ -507,6 +701,39 @@ for(step in stepDescriptors) {
}
}
//update stepDescriptors: remove stages and put into separate stageDescriptors map
def stageDescriptors = [:]
stages.each {key, value ->
System.err << "[INFO] Processing stage '${key}' ...\n"
stageDescriptors."${key}" = [:] << stepDescriptors."${key}"
stepDescriptors.remove(key)
//add stage name to stageDescriptors
stageDescriptors."${key}".name = value
//add stepCondition informmation to stageDescriptors
stageDescriptors."${key}".configConditions = stageConfig?.stages?.get(value)?.stepConditions
//identify step keys in stages
def stageStepKeys = Helper.getStageStepKeys(gse.createScript( "${key}.groovy", new Binding() ))
// prepare step descriptions
stageDescriptors."${key}".stepDescriptions = [:]
stageDescriptors."${key}".parameters.each {paramKey, paramValue ->
if (paramKey in stageStepKeys) {
stageDescriptors."${key}".stepDescriptions."${paramKey}" = "${paramValue.docu ?: ''}\n"
}
}
//remove details from parameter map
stageStepKeys.each {stepKey ->
stageDescriptors."${key}".parameters.remove(stepKey)
}
}
for(step in stepDescriptors) {
try {
renderStep(step.key, step.value)
@ -517,6 +744,16 @@ for(step in stepDescriptors) {
}
}
for (stage in stageDescriptors) {
try {
renderStage(stage.key, stage.value)
System.err << "[INFO] Stage '${stage.key}' has been rendered.\n"
} catch(Exception e) {
exceptionCaught = true
System.err << "${e.getClass().getName()} caught while rendering stage '${stage}': ${e.getMessage()}.\n"
}
}
if(exceptionCaught) {
System.err << "[ERROR] Exception caught during generating documentation. Check earlier log for details.\n"
System.exit(1)
@ -541,14 +778,41 @@ void renderStep(stepName, stepProperties) {
docGenStepName : stepName,
docGenDescription : 'Description\n\n' + stepProperties.description,
docGenParameters : 'Parameters\n\n' + TemplateHelper.createParametersSection(stepProperties.parameters),
docGenConfiguration : 'Step configuration\n\n' + TemplateHelper.createStepConfigurationSection(stepProperties.parameters)
docGenConfiguration : 'Step configuration\n\n' + TemplateHelper.createStepConfigurationSection(stepProperties.parameters),
docJenkinsPluginDependencies : 'Dependencies\n\n' + TemplateHelper.createDependencyList(stepProperties.dependencies)
]
def template = new StreamingTemplateEngine().createTemplate(theStepDocu.text)
String text = template.make(binding)
theStepDocu.withWriter { w -> w.write text }
}
void renderStage(stageName, stageProperties) {
def stageFileName = stageName.indexOf('Stage') != -1 ? stageName.split('Stage')[1].toLowerCase() : stageFileName
File theStageDocu = new File(stagesDocuDir, "${stageFileName}.md")
if(!theStageDocu.exists()) {
System.err << "[WARNING] stage docu input file for stage '${stageName}' is missing.\n"
return
}
def binding = [
docGenStageName : stageProperties.name,
docGenDescription : stageProperties.description,
docGenStageContent : 'Stage Content\n\n' + TemplateHelper.createStageContentSection(stageProperties.stepDescriptions),
docGenStageActivation: 'Stage Activation\n\n' + TemplateHelper.createStageActivationSection(),
docGenStepActivation: 'Step Activation\n\n' + TemplateHelper.createStepActivationSection(stageProperties.configConditions),
docGenStageParameters : 'Additional Stage Parameters\n\n' + TemplateHelper.createParametersSection(stageProperties.parameters),
docGenStageConfiguration : 'Configuration of Additional Stage Parameters\n\n' + TemplateHelper.createStageConfigurationSection()
]
def template = new StreamingTemplateEngine().createTemplate(theStageDocu.text)
String text = template.make(binding)
theStageDocu.withWriter { w -> w.write text }
}
def fetchTextFrom(def step, def parameterName, def steps) {
try {
def docuFromOtherStep = steps[step]?.parameters[parameterName]?.docu
@ -577,6 +841,13 @@ def handleStep(stepName, prepareDefaultValuesStep, gse, customDefaults) {
File theStep = new File(stepsDir, "${stepName}.groovy")
File theStepDocu = new File(stepsDocuDir, "${stepName}.md")
File theStepDeps = new File('documentation/jenkins_workspace/plugin_mapping.json')
if (!theStepDocu.exists() && stepName.indexOf('Stage') != -1) {
//try to get a corresponding stage documentation
def stageName = stepName.split('Stage')[1].toLowerCase()
theStepDocu = new File(stagesDocuDir,"${stageName}.md" )
}
if(!theStepDocu.exists()) {
System.err << "[WARNING] step docu input file for step '${stepName}' is missing.\n"
@ -628,7 +899,18 @@ def handleStep(stepName, prepareDefaultValuesStep, gse, customDefaults) {
// 'dependentConfig' is only present here for internal reasons and that entry is removed at
// end of method.
def step = [parameters:[:], dependentConfig: [:]]
def step = [
parameters:[:],
dependencies: (Set)[],
dependentConfig: [:]
]
//
// provide dependencies to Jenkins plugins
if(theStepDeps.exists()) {
def pluginDependencies = new JsonSlurper().parse(theStepDeps)
step.dependencies.addAll(pluginDependencies[stepName].collect { k, v -> k })
}
//
// START special handling for 'script' parameter

View File

@ -3,6 +3,44 @@
d=$(dirname "$0")
[ ! -z "$d" ] && d="$d/"
WS_OUT="$(pwd)/documentation/jenkins_workspace"
WS_IN=/workspace
STEP_CALL_MAPPING_FILE_NAME=step_calls_mapping.json
PLUGIN_MAPPING_FILE_NAME=plugin_mapping.json
CALLS="${WS_OUT}/${STEP_CALL_MAPPING_FILE_NAME}"
PLUGIN_MAPPING="${WS_OUT}/${PLUGIN_MAPPING_FILE_NAME}"
for f in ${CALLS} ${PLUGIN_MAPPING}
do
[ -e "${f}" ] && rm -rf "${f}"
done
export CLASSPATH_FILE='target/cp.txt'
mvn compile dependency:build-classpath -Dmdep.outputFile=${CLASSPATH_FILE} > /dev/null 2>&1
mvn clean test dependency:build-classpath -Dmdep.outputFile=${CLASSPATH_FILE} > /dev/null 2>&1
# --in: is created by the unit tests. It contains a mapping between the test case (name is
# already adjusted).
# --out: Contains a transformed version. The calls to other pipeline steps are resolved in a
# transitive manner. This allows us to report all Jenkins plugin calls (also the calls which
# are performed by other pipeline steps. E.g.: each step includes basically a call to
# handlePipelineStepErrors. The Plugin calls issues by handlePipelineStepErrors are also
# reported for the step calling that auxiliar step).
groovy "${d}resolveTransitiveCalls" -in target/trackedCalls.json --out "${CALLS}"
[ -f "${CALLS}" ] || { echo "File \"${CALLS}\" does not exist." ; exit 1; }
docker run \
-w "${WS_IN}" \
--env calls="${WS_IN}/${STEP_CALL_MAPPING_FILE_NAME}" \
--env result="${WS_IN}/${PLUGIN_MAPPING_FILE_NAME}" \
-v "${WS_OUT}:${WS_IN}" \
ppiper/jenkinsfile-runner \
-ns \
-f Jenkinsfile \
--runWorkspace /workspace
[ -f "${PLUGIN_MAPPING}" ] || { echo "Result file containing step to plugin mapping not found (${PLUGIN_MAPPING})."; exit 1; }
groovy -cp "target/classes:$(cat $CLASSPATH_FILE)" "${d}createDocu" "${@}"

View File

@ -0,0 +1,181 @@
import groovy.json.JsonSlurper
def cli = new CliBuilder(
usage: 'groovy createDocu [<options>]',
header: 'Options:',
footer: 'Copyright: SAP SE')
cli.with {
i longOpt: 'in', args: 1, argName: 'file', 'The file containing the mapping as created by the unit tests..'
o longOpt: 'out', args: 1, argName: 'file', 'The file containing the condenced mappings.'
h longOpt: 'help', 'Prints this help.'
}
def options = cli.parse(args)
if(options.h) {
System.err << "Printing help.\n"
cli.usage()
return
}
if(! options.i) {
System.err << "No input file"
cli.usage()
return
}
if(! options.o) {
System.err << "No output file"
cli.usage()
return
}
def steps = new JsonSlurper().parseText(new File(options.i).text)
def piperSteps = steps.piperSteps
def calls = steps.calls
// only temporary in order to avoid manipulating the map during
// iterating over it.
def tmpCalls = [:]
// Adjust naming
calls.each { c ->
tmpCalls.put(retrieveStepName(c.key), c.value as Set)
}
calls = tmpCalls
tmpCalls = null
// Remove selfs
calls.each { c ->
c.value.remove(c.key)
}
int counter=0
def alreadyHandled = []
//
// in case we exceed the value we assume some cyclic call
// between plugin steps.
int MAX_LOOP = 1600
boolean done = false
while(counter < MAX_LOOP) {
def hereWeNeedToReplace = null
def toBeReplaced = null
if(alreadyHandled.size() == calls.size()) {
done = true
break
}
for (def call in calls.entrySet()) {
stepName = call.key
calledSteps = call.value
if(alreadyHandled.contains(stepName)) {
continue
}
for (def calledStep in calledSteps) {
if(! ( calledStep in Map)) {
// in case the calledStep is a map the map
// was introduced in an earlier loop.
// This means this entry is already handled.
if(calledStep in piperSteps) {
toBeReplaced = calledStep
hereWeNeedToReplace = calledSteps
break
}
}
}
if(toBeReplaced) {
def replacement = [:]
replacement[toBeReplaced] = calls[toBeReplaced] as Set
def removed = hereWeNeedToReplace.remove(toBeReplaced)
hereWeNeedToReplace.add(replacement)
counter++
} else {
alreadyHandled << stepName
}
break
}
}
if(! done) {
throw new Exception('Unable to resolve transitive plugin calls.')
}
piperStepCallMappings = [:]
for(def entry : calls.entrySet()) {
def performedCalls = flatten(entry, (Set)[])
piperStepCallMappings.put(entry.key, performedCalls)
}
//
// special handling since since changeManagement util class
// is separated from the steps itself
//
// should be improved in the future in order not to have
// that bells and whistles here.
def cm = piperStepCallMappings.get('changeManagement')
for (cmStepName in [
'checkChangeInDevelopment',
'transportRequestCreate',
'transportRequestUploadFile',
'transportRequestRelease',
]) {
piperStepCallMappings.get(cmStepName).addAll(cm)
}
// end of special handling
//
File performedCalls = new File(options.o)
if (performedCalls.exists()) performedCalls.delete()
performedCalls << groovy.json.JsonOutput.toJson(piperStepCallMappings)
def flatten(def entry, Set result) {
for(def e : entry.value) {
if(e in Map) { // the map here is expected to hold one entry always
for(def steps : e.entrySet().value) {
for(def step : steps) {
if (step in Map) {
flatten(step, result)
} else {
result << step
}
}
}
} else {
result << e.value.toString()
}
}
result
}
static retrieveStepName(String s) {
firstCharToLowerCase(removeTrailing(s, 'Test'))
}
static removeTrailing(String s, String trail) {
return s.replaceAll(trail + '$', '')
}
static firstCharToLowerCase(CharSequence cs) {
char[] c = cs.getChars()
c[0] = Character.toLowerCase(c[0])
new String(c)
}

View File

@ -0,0 +1,13 @@
# ${docGenStageName}
${docGenDescription}
## ${docGenStageContent}
## ${docGenStageActivation}
## ${docGenStepActivation}
## ${docGenStageParameters}
## ${docGenStageConfiguration}

View File

@ -10,6 +10,8 @@ none
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Example
```groovy

View File

@ -10,6 +10,8 @@ You need to have a Bats test file. By default you would put this into directory
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Example
```groovy

View File

@ -10,6 +10,8 @@
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Exceptions
* `AbortException`:

View File

@ -82,6 +82,8 @@
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
### Thresholds
It is possible to define thresholds to fail the build on a certain count of findings. To achive this, just define your thresholds a followed for the specific check tool:

View File

@ -13,6 +13,8 @@
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Example
```groovy

View File

@ -10,6 +10,8 @@ Test configuration is available.
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Example
```

View File

@ -9,6 +9,8 @@ You need to store the API token for the Detect service as _'Secret text'_ creden
!!! note "minimum plugin requirement"
This step requires [synopsys-detect-plugin](https://github.com/jenkinsci/synopsys-detect-plugin) with at least version `2.0.0`.
## ${docJenkinsPluginDependencies}
## Example
```groovy

View File

@ -10,6 +10,8 @@ If the Jenkins is setup on a Kubernetes cluster, then you can execute the closur
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Side effects
none

View File

@ -13,6 +13,8 @@
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Side effects
none

View File

@ -6,6 +6,8 @@
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Example
```groovy

View File

@ -12,6 +12,8 @@ none
We recommend to define values of step parameters via [config.yml file](../configuration.md).
## ${docJenkinsPluginDependencies}
## Example
Pipeline step:

View File

@ -1,17 +1,19 @@
# ${docGenStepName}
## ${docGenDescription}
## Prerequisites
You need to create a personal access token within GitHub and add this to the Jenkins credentials store.
Please see [GitHub documentation for details about creating the personal access token](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/).
## ${docJenkinsPluginDependencies}
## ${docGenParameters}
## ${docGenConfiguration}
## ${docGenDescription}
## Example
Usage of pipeline step:

View File

@ -0,0 +1,17 @@
# ${docGenStepName}
## ${docGenDescription}
## ${docGenParameters}
## ${docGenConfiguration}
## Exceptions
None
## Examples
```groovy
hadolintExecute script: this
```

View File

@ -10,6 +10,8 @@ none
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Example
```groovy

View File

@ -16,6 +16,8 @@ Endpoint for health check is configured.
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Example
Pipeline step:

View File

@ -67,6 +67,8 @@ influxDBServer=jenkins
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Example
```groovy

View File

@ -16,6 +16,8 @@ via _Jenkins_ -> _Credentials_ -> _System_ -> _Global credentials (unrestricted)
* File: upload your `config.json` file
* ID: specify id which you then use for the configuration of `dockerConfigJsonCredentialsId` (see below)
## ${docJenkinsPluginDependencies}
## Example
```groovy

View File

@ -7,6 +7,8 @@
* **running Karma tests** - have a NPM module with running tests executed with Karma
* **configured WebDriver** - have the [`karma-webdriver-launcher`](https://github.com/karma-runner/karma-webdriver-launcher) package installed and a custom, WebDriver-based browser configured in Karma
## ${docJenkinsPluginDependencies}
## ${docGenParameters}
## ${docGenConfiguration}

View File

@ -18,6 +18,8 @@ mailSendNotification script: this
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Side effects
none

View File

@ -6,6 +6,8 @@
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Exceptions
None

View File

@ -14,6 +14,8 @@ While using a custom docker file, ensure that the following tools are installed:
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Side effects
1. The file name of the resulting archive is written to the `commonPipelineEnvironment` with variable name `mtarFileName`.

View File

@ -1,11 +1,11 @@
# ${docGenStepName}
## ${docGenDescription}
## ${docGenParameters}
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Examples
```groovy

View File

@ -18,6 +18,8 @@
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Side effects
none

View File

@ -14,6 +14,8 @@
Step uses `dockerExecute` inside.
## ${docJenkinsPluginDependencies}
## Exceptions
none

View File

@ -1,12 +1,12 @@
# ${docGenStepName}
## ${docGenDescription}
## ${docGenParameters}
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Exceptions
None

View File

@ -10,6 +10,8 @@ none
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Side effects
none

View File

@ -31,6 +31,8 @@ pipelineRestartSteps (script: this) {
none
## ${docJenkinsPluginDependencies}
## Exceptions
none

View File

@ -34,6 +34,8 @@ The step is stashing files before and after the build. This is due to the fact,
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Explanation of pipeline step
Usage of pipeline step:

View File

@ -1,7 +1,5 @@
# ${docGenStepName}
## ${docGenDescription}
## Prerequsites
none
@ -9,3 +7,5 @@ none
## ${docGenParameters}
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}

View File

@ -9,3 +9,5 @@ none
## ${docGenParameters}
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}

View File

@ -5,3 +5,5 @@
## ${docGenParameters}
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}

View File

@ -10,6 +10,8 @@
None
## ${docJenkinsPluginDependencies}
## Example
```groovy

View File

@ -68,6 +68,8 @@ webdriverio
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Side effects
none

View File

@ -10,6 +10,8 @@
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Side effects
none

View File

@ -6,12 +6,14 @@
* Installed and configured [Slack JenkinsCI integration](https://my.slack.com/services/new/jenkins-ci)
* *secret text* Jenkins credentials with the Slack token
* Installed and configured [Jenkins Slack plugin](https://github.com/jenkinsci/slack-plugin#install-instructions-for-slack).
* Installed and configured [Jenkins Slack plugin](https://github.com/jenkinsci/slack-plugin#install-instructions-for-slack)
## ${docGenParameters}
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Example
Usage of pipeline step:

View File

@ -11,6 +11,8 @@
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Side effects
Step uses `dockerExecute` inside.

View File

@ -11,6 +11,8 @@
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Exceptions
none

View File

@ -79,6 +79,8 @@ testsPublishResults(
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Side effects
none

View File

@ -11,6 +11,8 @@
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
The step is configured using a customer configuration file provided as
resource in an custom shared library.

View File

@ -10,6 +10,8 @@
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
The step is configured using a customer configuration file provided as
resource in an custom shared library.

View File

@ -10,6 +10,8 @@
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
The step is configured using a customer configuration file provided as
resource in an custom shared library.

View File

@ -8,6 +8,8 @@
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Exceptions
If you see an error like `fatal: Not a git repository (or any parent up to mount point /home/jenkins)` it is likely that your test description cannot be found.<br />

View File

@ -12,6 +12,8 @@ access protection imposed on the WhiteSource backend would simply allow access b
## ${docGenConfiguration}
## ${docJenkinsPluginDependencies}
## Exceptions
None

View File

@ -0,0 +1,71 @@
import groovy.json.JsonOutput
import groovy.json.JsonSlurper
import jenkins.model.Jenkins
unresolvableCalls = [
podTemplate:'kubernetes',
container: 'kubernetes',
docker: 'docker-plugin',
usernamePassword: 'credentials-binding',
string: 'credentials-binding',
file: 'credentials-binding',
]
node() {
stage('Resolve Plugins') {
try {
resolvePlugins()
} catch(Exception e) {
def result = System.getenv()['result']
new File(new File(result).getParentFile(), 'FAILURE').text = "${e.getMessage()}"
throw e
}
}
}
def resolvePlugins() {
def stepCallMapping = new JsonSlurper().parseText(new File(System.getenv()['calls']).text)
def stepPluginMapping = [:]
println "[INFO] Resolving plugins ..."
for(def step in stepCallMapping) {
def resolvedPlugins = [:]
for(def call in step.value) {
def resolvedPlugin = resolvePlugin(call)
if (! resolvedPlugin) {
resolvedPlugin = unresolvableCalls[call]
if(! resolvedPlugin) resolvedPlugin = 'UNIDENTIFIED'
}
if(resolvedPlugins[resolvedPlugin] == null)
resolvedPlugins[resolvedPlugin] = (Set)[]
resolvedPlugins[resolvedPlugin] << call
stepPluginMapping.put(step.key,resolvedPlugins)
}
}
def result = System.getenv()['result']
new File(result).write(new JsonOutput().toJson(stepPluginMapping))
println "[INFO] plugins resolved. Result: ${result}."
}
def resolvePlugin(call) {
def plugins = Jenkins.get().pluginManager.getPlugins()
def s = new org.jenkinsci.plugins.workflow.cps.Snippetizer()
def pDescs = s.getQuasiDescriptors(false)
for(def pd in pDescs) {
if(pd.getSymbol() == call)
return pd.real.plugin?.shortName
}
return null
}

View File

@ -17,6 +17,7 @@ nav:
- durationMeasure: steps/durationMeasure.md
- gaugeExecuteTests: steps/gaugeExecuteTests.md
- githubPublishRelease: steps/githubPublishRelease.md
- hadolintExecute: steps/hadolintExecute.md
- handlePipelineStepErrors: steps/handlePipelineStepErrors.md
- healthExecuteCheck: steps/healthExecuteCheck.md
- influxWriteData: steps/influxWriteData.md

View File

@ -12,7 +12,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>com.sap.cp.jenkins</groupId>
<artifactId>jenkins-library</artifactId>
<version>0.10</version>
<version>0.11</version>
<name>SAP CP Piper Library</name>
<description>Shared library containing steps and utilities to set up continuous deployment processes for SAP technologies.</description>

View File

@ -251,6 +251,19 @@ steps:
languageRunner: 'ruby'
runCommand: 'bundle install && bundle exec gauge run'
testOptions: 'specs'
hadolintExecute:
configurationFile: '.hadolint.yaml'
configurationUrl: ''
dockerFile: './Dockerfile'
dockerImage: 'hadolint/hadolint:latest-debian'
qualityGates:
- threshold: 1
type: 'TOTAL_ERROR'
unstable: false
reportFile: 'hadolint.xml'
reportName: 'HaDoLint'
stashContent:
- 'buildDescriptor'
handlePipelineStepErrors:
echoDetails: true
failOnError: true
@ -308,6 +321,7 @@ steps:
dockerImage: 's4sdk/docker-neo-cli'
deployMode: 'mta'
warAction: 'deploy'
extensions: []
neo:
size: 'lite'
credentialsId: 'CI_CREDENTIALS_ID'
@ -407,7 +421,7 @@ steps:
noDefaultExludes: []
pipelineStashFilesBeforeBuild:
stashIncludes:
buildDescriptor: '**/pom.xml, **/.mvn/**, **/assembly.xml, **/.swagger-codegen-ignore, **/package.json, **/requirements.txt, **/setup.py, **/mta*.y*ml, **/.npmrc, Dockerfile, **/VERSION, **/version.txt, **/Gopkg.*, **/build.sbt, **/sbtDescriptor.json, **/project/*'
buildDescriptor: '**/pom.xml, **/.mvn/**, **/assembly.xml, **/.swagger-codegen-ignore, **/package.json, **/requirements.txt, **/setup.py, **/mta*.y*ml, **/.npmrc, Dockerfile, .hadolint.yaml, **/VERSION, **/version.txt, **/Gopkg.*, **/build.sbt, **/sbtDescriptor.json, **/project/*'
deployDescriptor: '**/manifest*.y*ml, **/*.mtaext.y*ml, **/*.mtaext, **/xs-app.json, helm/**, *.y*ml'
git: '.git/**'
opa5: '**/*.*'

View File

@ -1,4 +1,4 @@
package com.sap.piper.cm;
package com.sap.piper.cm
public enum BackendType {
SOLMAN, CTS, RFC, NONE

View File

@ -1,4 +1,4 @@
package com.sap.piper.cm;
package com.sap.piper.cm
public class ChangeManagementException extends RuntimeException {

View File

@ -1,4 +1,4 @@
package com.sap.piper.cm;
package com.sap.piper.cm
import com.cloudbees.groovy.cps.NonCPS

View File

@ -24,6 +24,6 @@ enum DeployMode {
throw new IllegalArgumentException("${value} is not in the list of possible values ${stringValues()}")
}
return enumValue;
return enumValue
}
}

View File

@ -8,12 +8,14 @@ class NeoCommandHelper {
private Script step
private DeployMode deployMode
private Map deploymentConfiguration
private Set extensions
private String user
private String password
private String source
//Warning: Commands generated with this class can contain passwords and should only be used within the step withCredentials
NeoCommandHelper(Script step, DeployMode deployMode, Map deploymentConfiguration,
Set extensions,
String user, String password, String source) {
this.step = step
this.deployMode = deployMode
@ -21,6 +23,7 @@ class NeoCommandHelper {
this.user = user
this.password = password
this.source = source
this.extensions = extensions ?: []
}
private String prolog() {
@ -47,7 +50,7 @@ class NeoCommandHelper {
}
String deployMta() {
return "${prolog()} deploy-mta --synchronous ${mainArgs()} ${source()}"
return "${prolog()} deploy-mta --synchronous ${mainArgs()}${extensions()} ${source()}"
}
String cloudCockpitLink() {
@ -87,6 +90,11 @@ class NeoCommandHelper {
return "--source ${BashUtils.quoteAndEscape(source)}"
}
private String extensions() {
if(! this.extensions) return ''
' --extensions ' + ((Iterable)this.extensions.collect({ "'${it}'" })).join(',')
}
private String mainArgs() {
String usernamePassword = "--user ${BashUtils.quoteAndEscape(user)} --password ${BashUtils.quoteAndEscape(password)}"
@ -123,7 +131,7 @@ class NeoCommandHelper {
def environment = deploymentConfiguration.environment
if (!(environment in Map)) {
step.error("The environment variables for the deployment to Neo have to be defined as a map.");
step.error("The environment variables for the deployment to Neo have to be defined as a map.")
}
def keys = environment.keySet()

View File

@ -21,6 +21,6 @@ enum WarAction {
throw new IllegalArgumentException("${value} is not in the list of possible values ${stringValues()}")
}
return enumValue;
return enumValue
}
}

View File

@ -1,4 +1,3 @@
#!groovy
import com.lesfurets.jenkins.unit.BasePipelineTest
import static org.junit.Assert.assertEquals

View File

@ -1,5 +1,3 @@
#!groovy
import org.junit.Before
import org.junit.Rule
import org.junit.Test

View File

@ -1,4 +1,3 @@
#!groovy
import com.sap.piper.JenkinsUtils
import org.junit.Before
import org.junit.Rule

View File

@ -0,0 +1,37 @@
import org.junit.Rule
import org.junit.Test
import org.junit.rules.RuleChain
import util.BasePiperTest
import util.JenkinsReadYamlRule
import util.Rules
import static org.hamcrest.CoreMatchers.is
import static org.hamcrest.Matchers.hasItem
import static org.junit.Assert.assertThat
class CommonPipelineEnvironmentTest extends BasePiperTest {
@Rule
public RuleChain rules = Rules
.getCommonRules(this)
.around(new JenkinsReadYamlRule(this)
)
@Test
void testCustomValueList() {
nullScript.commonPipelineEnvironment.setValue('myList', [])
nullScript.commonPipelineEnvironment.getValue('myList').add('item1')
nullScript.commonPipelineEnvironment.getValue('myList').add('item2')
assertThat(nullScript.commonPipelineEnvironment.getValue('myList'), hasItem('item1'))
assertThat(nullScript.commonPipelineEnvironment.getValue('myList'), hasItem('item2'))
}
@Test
void testCustomValueMap() {
nullScript.commonPipelineEnvironment.setValue('myList', [:])
nullScript.commonPipelineEnvironment.getValue('myList').key1 = 'val1'
nullScript.commonPipelineEnvironment.getValue('myList').key2 = 'val2'
assertThat(nullScript.commonPipelineEnvironment.getValue('myList').key1, is('val1'))
assertThat(nullScript.commonPipelineEnvironment.getValue('myList').key2, is('val2'))
}
}

View File

@ -6,7 +6,7 @@ import static org.junit.Assert.assertThat
import static org.junit.Assert.fail
import static util.StepHelper.getSteps
import java.io.File;
import java.io.File
import java.util.stream.Collectors
import java.lang.reflect.Field
@ -193,7 +193,7 @@ public class CommonStepsTest extends BasePiperTest{
continue
}
boolean notAccessible = false;
boolean notAccessible = false
def fieldName
if(!stepNameField.isAccessible()) {

View File

@ -1,4 +1,3 @@
#!groovy
import org.junit.Before
import org.junit.Rule
import org.junit.Test

View File

@ -57,6 +57,7 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
def pullImageMap = [:]
def namespace
def securityContext
Map stashMap
@Before
void init() {
@ -92,6 +93,10 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
}
body()
})
helper.registerAllowedMethod('stash', [Map.class], {m ->
stashMap = m
})
}
@Test
@ -207,7 +212,7 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
@Test
void testDockerExecuteOnKubernetesEmptyContainerMapNoDockerImage() throws Exception {
exception.expect(IllegalArgumentException.class);
exception.expect(IllegalArgumentException.class)
stepRule.step.dockerExecuteOnKubernetes(
script: nullScript,
juStabUtils: utils,
@ -368,6 +373,21 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
assertThat(securityContext, is(equalTo(expectedSecurityContext)))
}
/*
Due to negative side-effect of full git stashing
@Test
void testDockerExecuteOnKubernetesWorkspaceStashing() {
stepRule.step.dockerExecuteOnKubernetes(
script: nullScript,
juStabUtils: utils,
dockerImage: 'maven:3.5-jdk-8-alpine',
) { bodyExecuted = true }
assertTrue(bodyExecuted)
assertThat(stashMap.useDefaultExcludes, is(false))
}
*/
private container(options, body) {
containerName = options.name

View File

@ -1,5 +1,3 @@
#!groovy
import com.sap.piper.analytics.InfluxData
import org.junit.Rule

View File

@ -1,4 +1,3 @@
#!groovy
import org.junit.Before
import org.junit.Rule
import org.junit.Test

View File

@ -1,4 +1,3 @@
#!groovy
import groovy.json.JsonSlurperClassic
import org.junit.Before
import org.junit.Rule

View File

@ -0,0 +1,67 @@
import hudson.AbortException
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.ExpectedException
import org.junit.rules.RuleChain
import util.BasePiperTest
import util.JenkinsDockerExecuteRule
import util.JenkinsLoggingRule
import util.JenkinsReadYamlRule
import util.JenkinsShellCallRule
import util.JenkinsStepRule
import util.Rules
import static org.junit.Assert.assertThat
import static org.hamcrest.Matchers.*
class HadolintExecuteTest extends BasePiperTest {
private ExpectedException thrown = new ExpectedException().none()
private JenkinsShellCallRule shellRule = new JenkinsShellCallRule(this)
private JenkinsDockerExecuteRule dockerExecuteRule = new JenkinsDockerExecuteRule(this)
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
private JenkinsReadYamlRule yamlRule = new JenkinsReadYamlRule(this)
private JenkinsLoggingRule loggingRule = new JenkinsLoggingRule(this)
@Rule
public RuleChain ruleChain = Rules
.getCommonRules(this)
.around(thrown)
.around(yamlRule)
.around(dockerExecuteRule)
.around(shellRule)
.around(stepRule)
.around(loggingRule)
@Before
void init() {
helper.registerAllowedMethod 'stash', [String, String], { name, includes -> assertThat(name, is('hadolintConfiguration')); assertThat(includes, is('.hadolint.yaml')) }
helper.registerAllowedMethod 'fileExists', [String], { s -> s == './Dockerfile' }
helper.registerAllowedMethod 'checkStyle', [Map], { m -> assertThat(m.pattern, is('hadolint.xml')); return 'checkstyle' }
helper.registerAllowedMethod 'recordIssues', [Map], { m -> assertThat(m.tools, hasItem('checkstyle')) }
helper.registerAllowedMethod 'archiveArtifacts', [String], { String p -> assertThat('hadolint.xml', is(p)) }
}
@Test
void testHadolintExecute() {
stepRule.step.hadolintExecute(script: nullScript, juStabUtils: utils, dockerImage: 'hadolint/hadolint:latest-debian', configurationUrl: 'https://github.wdf.sap.corp/raw/SGS/Hadolint-Dockerfile/master/.hadolint.yaml')
assertThat(dockerExecuteRule.dockerParams.dockerImage, is('hadolint/hadolint:latest-debian'))
assertThat(loggingRule.log, containsString("Unstash content: buildDescriptor"))
assertThat(shellRule.shell,
hasItems(
"curl --fail --location --output .hadolint.yaml https://github.wdf.sap.corp/raw/SGS/Hadolint-Dockerfile/master/.hadolint.yaml",
"hadolint ./Dockerfile --config .hadolint.yaml --format checkstyle > hadolint.xml"
)
)
}
@Test
void testNoDockerfile() {
helper.registerAllowedMethod 'fileExists', [String], { false }
thrown.expect AbortException
thrown.expectMessage '[hadolintExecute] Dockerfile \'./Dockerfile\' is not found.'
stepRule.step.hadolintExecute(script: nullScript, juStabUtils: utils, dockerImage: 'hadolint/hadolint:latest-debian')
}
}

View File

@ -1,4 +1,3 @@
#!groovy
import hudson.AbortException
import static org.hamcrest.Matchers.is

View File

@ -1,4 +1,3 @@
#!groovy
import org.junit.Before
import org.junit.Rule
import org.junit.Test

View File

@ -1,4 +1,3 @@
#!groovy
import com.sap.piper.DefaultValueCache
import com.sap.piper.analytics.InfluxData

View File

@ -1,4 +1,3 @@
#!groovy
import org.junit.Before
import org.junit.Rule
import org.junit.Test

View File

@ -1,4 +1,3 @@
#!groovy
import org.junit.Before
import org.junit.Rule
import org.junit.Test

View File

@ -1,4 +1,3 @@
#!groovy
import org.junit.Before
import org.junit.Rule
import org.junit.Test

View File

@ -1,14 +1,19 @@
import com.sap.piper.StepAssertions
import com.sap.piper.Utils
import groovy.lang.Script
import hudson.AbortException
import static org.hamcrest.Matchers.allOf
import static org.hamcrest.Matchers.containsString
import static org.hamcrest.Matchers.not
import static org.junit.Assert.assertThat
import org.hamcrest.Matchers
import org.hamcrest.BaseMatcher
import org.hamcrest.Description
import org.jenkinsci.plugins.credentialsbinding.impl.CredentialNotFoundException
import org.junit.After
import org.junit.Assert
import org.junit.Before
import org.junit.Rule
@ -73,6 +78,11 @@ class NeoDeployTest extends BasePiperTest {
nullScript.commonPipelineEnvironment.configuration = [steps: [neoDeploy: [neo: [host: 'test.deploy.host.com', account: 'trialuser123']]]]
}
@After
void tearDown() {
GroovySystem.metaClassRegistry.removeMetaClass(StepAssertions)
}
@Test
void straightForwardTestConfigViaParameters() {
@ -119,6 +129,158 @@ class NeoDeployTest extends BasePiperTest {
.hasSingleQuotedOption('source', archiveName))
}
@Test
void extensionsAsStringTest() {
def checkedExtensionFiles = []
StepAssertions.metaClass.static.assertFileExists =
getFileExistsCheck(checkedExtensionFiles, [archiveName, 'myExtension.yml'])
stepRule.step.neoDeploy(
script: nullScript,
source: archiveName,
extensions: 'myExtension.yml'
)
assert checkedExtensionFiles.contains('myExtension.yml')
assertThat(shellRule.shell,
new CommandLineMatcher()
.hasProlog('neo.sh deploy-mta')
.hasSingleQuotedOption('extensions', 'myExtension.yml'))
}
@Test
void extensionsAsEmptyString() {
thrown.expect(AbortException)
thrown.expectMessage('extension file name was null or empty')
stepRule.step.neoDeploy(
script: nullScript,
source: archiveName,
extensions: ''
)
}
@Test
void extensionsAsSetTest() {
Set extensions= ['myExtension1.yml' ,'myExtension2.yml']
extensionsAsCollectionTest(extensions)
}
@Test
void extensionsAsCollectionWithEmptyStringTest() {
thrown.expect(AbortException)
thrown.expectMessage('extension file name was null or empty')
stepRule.step.neoDeploy(
script: nullScript,
source: archiveName,
extensions: ['myExtension1.yml' ,''])
}
@Test
void extensionsNullTest() {
stepRule.step.neoDeploy(
script: nullScript,
source: archiveName,
extensions: null)
assert shellRule.shell.find { c -> c.startsWith('neo.sh deploy-mta') && ! c.contains('--extensions') }
}
@Test
void extensionsAsEmptyCollectionTest() {
stepRule.step.neoDeploy(
script: nullScript,
source: archiveName,
extensions: [])
assert shellRule.shell.find { c -> c.startsWith('neo.sh deploy-mta') && ! c.contains('--extensions') }
}
@Test
void extensionsAsCollectionsWithNullEntrySetTest() {
thrown.expect(AbortException)
thrown.expectMessage('extension file name was null or empty')
stepRule.step.neoDeploy(
script: nullScript,
source: archiveName,
extensions: [null])
}
@Test
void extensionsAsListTest() {
List extensions= ['myExtension1.yml' ,'myExtension2.yml']
extensionsAsCollectionTest(extensions)
}
@Test
void sameExtensionProvidedTwiceTest() {
List extensions= ['myExtension1.yml' ,'myExtension2.yml', 'myExtension1.yml']
extensionsAsCollectionTest(extensions)
}
void extensionsAsCollectionTest(def extensions) {
def checkedExtensionFiles = []
StepAssertions.metaClass.static.assertFileExists =
getFileExistsCheck(checkedExtensionFiles, [archiveName, 'myExtension1.yml', 'myExtension2.yml'])
stepRule.step.neoDeploy(
script: nullScript,
source: archiveName,
extensions: extensions
)
assert checkedExtensionFiles.contains('myExtension1.yml')
assert checkedExtensionFiles.contains('myExtension2.yml')
assertThat(shellRule.shell,
new CommandLineMatcher()
.hasProlog('neo.sh deploy-mta')
// some kind of creative usage for the single quotation check (... single quotes inside)
.hasSingleQuotedOption('extensions', 'myExtension1.yml\',\'myExtension2.yml'))
}
private static getFileExistsCheck(def checkedExtensionFiles, def fileNames) {
{ Script step, String filePath ->
checkedExtensionFiles << filePath
if( ! fileNames.contains(filePath) )
step.error("File ${filePath} cannot be found.")
}
}
@Test
void extensionsForWrongDeployModeTest() {
thrown.expect(AbortException)
thrown.expectMessage('Extensions are only supported for deploy mode \'MTA\'')
stepRule.step.neoDeploy(
script: nullScript,
source: archiveName,
deployMode: 'warParams',
extensions: 'myExtension.yml',
neo:
[
application: 'does',
runtime: 'not',
runtimeVersion: 'matter'
]
)
}
@Test
void archivePathFromCPETest() {

View File

@ -1,4 +1,3 @@
#!groovy
import org.jenkinsci.plugins.workflow.steps.FlowInterruptedException
import org.junit.Rule
import org.junit.Test

View File

@ -1,4 +1,3 @@
#!groovy
package stages
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
package stages
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
import org.junit.Before
import org.junit.Rule
import org.junit.Test

View File

@ -1,15 +1,15 @@
import org.junit.Before
import org.junit.Rule;
import org.junit.Rule
import org.junit.Test
import org.junit.rules.ExpectedException
import org.junit.rules.RuleChain;
import org.junit.rules.RuleChain
import com.sap.piper.DefaultValueCache
import util.BasePiperTest
import util.JenkinsLoggingRule
import util.JenkinsReadYamlRule
import util.JenkinsShellCallRule
import util.JenkinsStepRule;
import util.JenkinsStepRule
import util.Rules

View File

@ -1,4 +1,3 @@
#!groovy
import org.junit.Before
import org.junit.Rule
import org.junit.Test

View File

@ -1,5 +1,3 @@
#!groovy
import static org.hamcrest.Matchers.*
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
import com.sap.piper.DescriptorUtils
import com.sap.piper.JsonUtils
import com.sap.piper.integration.WhitesourceOrgAdminRepository
@ -747,6 +746,50 @@ class WhitesourceExecuteScanTest extends BasePiperTest {
}
@Test
void testPassProjectNamesToCPE() {
helper.registerAllowedMethod("findFiles", [Map.class], { map ->
if (map.glob == "**${File.separator}pom.xml") {
return [new File('maven1/pom.xml'), new File('maven2/pom.xml')].toArray()
}
if (map.glob == "**${File.separator}package.json") {
return [new File('npm1/package.json'), new File('npm2/package.json'), new File('npm3/package.json'), new File('npm4/package.json')].toArray()
}
return [].toArray()
})
helper.registerAllowedMethod("parallel", [Map.class], { map ->
map.each {m ->
if (m.key != 'failFast') {
m.value()
}
}
})
helper.registerAllowedMethod("readProperties", [Map], {
def result = new Properties()
return result
})
//need to use call due to mock above
stepRule.step.call([
script : nullScript,
descriptorUtilsStub : descriptorUtilsStub,
whitesourceRepositoryStub : whitesourceStub,
whitesourceOrgAdminRepositoryStub: whitesourceOrgAdminRepositoryStub,
scanType : 'mta',
productName : 'SHC - Piper',
buildDescriptorExcludeList : ["maven2${File.separator}pom.xml".toString()],
juStabUtils : utils,
orgToken : 'b39d1328-52e2-42e3-98f0-932709daf3f0'
])
assertThat(nullScript.commonPipelineEnvironment.getValue('whitesourceProjectNames'), hasItem('com.sap.maven.test-java - 1'))
assertThat(nullScript.commonPipelineEnvironment.getValue('whitesourceProjectNames'), hasItem('com.sap.node.test-node - 1'))
}
@Test
void testNPMStatusCheckScanException() {
thrown.expect(AbortException.class)
stepRule.step.checkStatus(-1 & 0xFF, [whitesource:[licensingVulnerabilities: true]])

View File

@ -22,7 +22,7 @@ class MapUtilsTest {
c: [d: '1',
e: '2']],
b = [b: '2',
c: [d: 'x']];
c: [d: 'x']]
Map merged = MapUtils.merge(a, b)

View File

@ -25,7 +25,7 @@ class MtaUtilsTest extends BasePiperTest {
private File badJson
private mtaUtils
private ExpectedException thrown= ExpectedException.none();
private ExpectedException thrown= ExpectedException.none()
@ClassRule
public static TemporaryFolder tmp = new TemporaryFolder()

View File

@ -8,7 +8,7 @@ import static org.junit.Assert.assertEquals
import static org.junit.Assert.assertNotNull
class SystemEnvTest {
SystemEnv env = null;
SystemEnv env = null
Map systemEnvironmentMock = [:]
@Before
void setUp() {

View File

@ -17,7 +17,7 @@ class NeoCommandHelperTest extends BasePiperTest {
.getCommonRules(this)
.around(fileExistsRule)
NeoCommandHelper getTestFixture(DeployMode deployMode) {
NeoCommandHelper getTestFixture(DeployMode deployMode, Set extensions = []) {
Map deploymentConfiguration = [
host : 'host_value',
@ -41,6 +41,7 @@ class NeoCommandHelperTest extends BasePiperTest {
nullScript,
deployMode,
deploymentConfiguration,
extensions,
username,
password,
source
@ -121,9 +122,9 @@ class NeoCommandHelperTest extends BasePiperTest {
@Test
void deployMta() {
String actual = getTestFixture(DeployMode.MTA).deployMta()
String actual = getTestFixture(DeployMode.MTA, (Set)['myExtension1.yml', 'myExtension2.yml']).deployMta()
String expected = "neo.sh deploy-mta --synchronous --host 'host_value' --account 'account_value' " +
"--user 'username' --password 'password' --source 'file.mta'"
"--user 'username' --password 'password' --extensions 'myExtension1.yml','myExtension2.yml' --source 'file.mta'"
Assert.assertEquals(expected, actual)
}
}

View File

@ -1,4 +1,3 @@
#!groovy
package templates
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
package templates
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
package templates
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
package templates
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
package templates
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
package templates
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
package templates
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
package templates
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
package templates
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
package templates
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
package templates
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
package templates
import org.junit.Before

View File

@ -1,4 +1,3 @@
#!groovy
package templates
import org.junit.Before

Some files were not shown because too many files have changed in this diff Show More