1
0
mirror of https://github.com/SAP/jenkins-library.git synced 2025-01-18 05:18:24 +02:00

Merge remote-tracking branch 'github/master' into HEAD

This commit is contained in:
Marcus Holl 2019-05-03 11:22:05 +02:00
commit 350ec78c9b
51 changed files with 1627 additions and 470 deletions

View File

@ -46,7 +46,7 @@ Code shall contain comments to explain the intention of the code when it is uncl
#### EditorConfig
To ensure a common file format, there is a `.editorConfig` file [in place](.editorconfig). To respect this file, [check](http://editorconfig.org/#download) if your editor does support it natively or you need to download a plugin.
To ensure a common file format, there is a `.editorConfig` file [in place](../.editorconfig). To respect this file, [check](http://editorconfig.org/#download) if your editor does support it natively or you need to download a plugin.
### Commit Message Style

2
.gitignore vendored
View File

@ -1,5 +1,5 @@
.idea/
bin/
/bin
.settings
logs
reports

View File

@ -1,4 +1,5 @@
import groovy.io.FileType
import groovy.json.JsonOutput
import org.yaml.snakeyaml.Yaml
import org.codehaus.groovy.control.CompilerConfiguration
import com.sap.piper.GenerateDocumentation
@ -19,12 +20,32 @@ class TemplateHelper {
parameters.keySet().toSorted().each {
def props = parameters.get(it)
t += "| `${it}` | ${props.mandatory ?: props.required ? 'yes' : 'no'} | ${(props.defaultValue ? '`' + props.defaultValue + '`' : '') } | ${props.value ?: ''} |\n"
def defaultValue = isComplexDefault(props.defaultValue) ? renderComplexDefaultValue(props.defaultValue) : "`${props.defaultValue}`"
t += "| `${it}` | ${props.mandatory ?: props.required ? 'yes' : 'no'} | ${defaultValue} | ${props.value ?: ''} |\n"
}
t
}
private static boolean isComplexDefault(def _default) {
if(! (_default in Collection)) return false
if(_default.size() == 0) return false
for(def entry in _default) {
if(! (entry in Map)) return false
if(! entry.dependentParameterKey) return false
if(! entry.key) return false
}
return true
}
private static renderComplexDefaultValue(def _default) {
_default
.collect { "${it.dependentParameterKey}=`${it.key ?: '<empty>'}`:`${it.value ?: '<empty>'}`" }
.join('<br />')
}
static createParameterDescriptionSection(Map parameters) {
def t = ''
parameters.keySet().toSorted().each {
@ -87,11 +108,15 @@ class Helper {
prepareDefaultValuesStep.metaClass.readYaml {
m -> new Yaml().load(m.text)
}
prepareDefaultValuesStep.metaClass.echo {
m -> println(m)
}
prepareDefaultValuesStep
}
static getDummyScript(def prepareDefaultValuesStep, def stepName) {
static getDummyScript(def prepareDefaultValuesStep, def stepName, Map prepareDefaultValuesStepParams) {
def _prepareDefaultValuesStep = prepareDefaultValuesStep
def _stepName = stepName
@ -101,7 +126,7 @@ class Helper {
def STEP_NAME = _stepName
def prepareDefaultValues() {
_prepareDefaultValuesStep()
_prepareDefaultValuesStep(prepareDefaultValuesStepParams)
}
@ -185,6 +210,15 @@ class Helper {
f.eachLine {
line ->
if(line ==~ /.*dependingOn.*/) {
def dependentConfigKey = (line =~ /.*dependingOn\('(.*)'\).mixin\('(.*)'/)[0][1]
def configKey = (line =~ /.*dependingOn\('(.*)'\).mixin\('(.*)'/)[0][2]
if(! step.dependentConfig[configKey]) {
step.dependentConfig[configKey] = []
}
step.dependentConfig[configKey] << dependentConfigKey
}
if(docuEnd) {
docuEnd = false
@ -375,6 +409,7 @@ roots = [
stepsDir = null
stepsDocuDir = null
String customDefaults = null
steps = []
@ -391,9 +426,14 @@ if(args.length >= 2)
stepsDocuDir = stepsDocuDir ?: new File(Helper.projectRoot, "documentation/docs/steps")
def argsDrop = 2
if(args.length >= 3 && args[2].contains('.yml')) {
customDefaults = args[2]
argsDrop ++
}
if(args.length >= 3)
steps = (args as List).drop(2) // the first two entries are stepsDir and docuDir
steps = (args as List).drop(argsDrop) // the first two entries are stepsDir and docuDir
// the other parts are considered as step names
@ -433,7 +473,7 @@ boolean exceptionCaught = false
def stepDescriptors = [:]
for (step in steps) {
try {
stepDescriptors."${step}" = handleStep(step, prepareDefaultValuesStep, gse)
stepDescriptors."${step}" = handleStep(step, prepareDefaultValuesStep, gse, customDefaults)
} catch(Exception e) {
exceptionCaught = true
System.err << "${e.getClass().getName()} caught while handling step '${step}': ${e.getMessage()}.\n"
@ -448,6 +488,8 @@ for(step in stepDescriptors) {
def otherStep = param.value.docu.replaceAll('@see', '').trim()
param.value.docu = fetchTextFrom(otherStep, param.key, stepDescriptors)
param.value.mandatory = fetchMandatoryFrom(otherStep, param.key, stepDescriptors)
if(! param.value.value)
param.value.value = fetchPossibleValuesFrom(otherStep, param.key, stepDescriptors)
}
}
}
@ -468,6 +510,10 @@ if(exceptionCaught) {
System.exit(1)
}
File docuMetaData = new File('target/docuMetaData.json')
if(docuMetaData.exists()) docuMetaData.delete()
docuMetaData << new JsonOutput().toJson(stepDescriptors)
System.err << "[INFO] done.\n"
void renderStep(stepName, stepProperties) {
@ -511,7 +557,11 @@ def fetchMandatoryFrom(def step, def parameterName, def steps) {
}
}
def handleStep(stepName, prepareDefaultValuesStep, gse) {
def fetchPossibleValuesFrom(def step, def parameterName, def steps) {
return steps[step]?.parameters[parameterName]?.value ?: ''
}
def handleStep(stepName, prepareDefaultValuesStep, gse, customDefaults) {
File theStep = new File(stepsDir, "${stepName}.groovy")
File theStepDocu = new File(stepsDocuDir, "${stepName}.md")
@ -523,9 +573,13 @@ def handleStep(stepName, prepareDefaultValuesStep, gse) {
System.err << "[INFO] Handling step '${stepName}'.\n"
Map prepareDefaultValuesStepParams = [:]
if (customDefaults)
prepareDefaultValuesStepParams.customDefaults = customDefaults
def defaultConfig = Helper.getConfigHelper(getClass().getClassLoader(),
roots,
Helper.getDummyScript(prepareDefaultValuesStep, stepName)).use()
Helper.getDummyScript(prepareDefaultValuesStep, stepName, prepareDefaultValuesStepParams)).use()
def params = [] as Set
@ -560,7 +614,9 @@ def handleStep(stepName, prepareDefaultValuesStep, gse) {
params = compatibleParams
}
def step = [parameters:[:]]
// 'dependentConfig' is only present here for internal reasons and that entry is removed at
// end of method.
def step = [parameters:[:], dependentConfig: [:]]
//
// START special handling for 'script' parameter
@ -603,5 +659,34 @@ def handleStep(stepName, prepareDefaultValuesStep, gse) {
Helper.scanDocu(theStep, step)
step.parameters.each { k, v ->
if(step.dependentConfig.get(k)) {
def dependentParameterKey = step.dependentConfig.get(k)[0]
def dependentValues = step.parameters.get(dependentParameterKey)?.value
if (dependentValues) {
def the_defaults = []
dependentValues
.replaceAll('[\'"` ]', '')
.split(',').each {possibleValue ->
if (!possibleValue instanceof Boolean && defaultConfig.get(possibleValue)) {
the_defaults <<
[
dependentParameterKey: dependentParameterKey,
key: possibleValue,
value: Helper.getValue(defaultConfig.get(possibleValue), k.split('/'))
]
}
}
v.defaultValue = the_defaults
}
}
}
//
// 'dependentConfig' is only present for internal purposes and must not be used outside.
step.remove('dependentConfig')
step
}

View File

@ -15,7 +15,8 @@ Set up an agile development process with Jenkins CI, which automatically feeds c
In many SAP development scenarios, it is vital to synchronize both backend and frontend deliveries. These deliveries are typically an SAP UI5 application and an ABAP backend from which it is served. The SAP UI5 parts are often developed using agile practices and use Continuous Integration pipelines that automatically build, test, and deploy the application.
**Note:** This scenario description is an example. You can apply the process to other scenarios and component sets, as well.
!!! note
This scenario description is an example. You can apply the process to other scenarios and component sets, as well.
In this scenario, we want to show how an agile development process with Jenkins CI can automatically feed changes into SAP Solution Manager. In SAP Solution Manager, all parts of the application stack come together and can be subject to classic change and transport management.

View File

@ -1,6 +1,6 @@
# Build and Deploy SAP UI5 or SAP Fiori Applications on SAP Cloud Platform with Jenkins
# Build and Deploy SAPUI5 or SAP Fiori Applications on SAP Cloud Platform with Jenkins
Build an application based on SAP UI5 or SAP Fiori with Jenkins and deploy the build result into an SAP Cloud Platform account in the Neo environment.
Build an application based on SAPUI5 or SAP Fiori with Jenkins and deploy the build result into an SAP Cloud Platform account in the Neo environment.
## Prerequisites
@ -32,7 +32,7 @@ On the project level, provide and adjust the following template:
This scenario combines various different steps to create a complete pipeline.
In this scenario, we want to show how to build an application based on SAP UI5 or SAP Fiori by using the multi-target application (MTA) concept and how to deploy the build result into an SAP Cloud Platform account in the Neo environment. This document comprises the [mtaBuild](https://sap.github.io/jenkins-library/steps/mtaBuild/) and the [neoDeploy](https://sap.github.io/jenkins-library/steps/neoDeploy/) steps.
In this scenario, we want to show how to build an application based on SAPUI5 or SAP Fiori by using the multi-target application (MTA) concept and how to deploy the build result into an SAP Cloud Platform account in the Neo environment. This document comprises the [mtaBuild](https://sap.github.io/jenkins-library/steps/mtaBuild/) and the [neoDeploy](https://sap.github.io/jenkins-library/steps/neoDeploy/) steps.
![This pipeline in Jenkins Blue Ocean](images/pipeline.jpg)
###### Screenshot: Build and Deploy Process in Jenkins

View File

@ -1,18 +1,6 @@
# cloudFoundryDeploy
# ${docGenStepName}
## Description
The application will be deployed to a test or production space within Cloud Foundry.
Deployment can be done
* in a standard way
* in a zero downtime manner (using a [blue-green deployment approach](https://martinfowler.com/bliki/BlueGreenDeployment.html))
!!! note "Deployment supports multiple deployment tools"
Currently the following are supported:
* Standard `cf push` and [Bluemix blue-green plugin](https://github.com/bluemixgaragelondon/cf-blue-green-deploy#how-to-use)
* [MTA CF CLI Plugin](https://github.com/cloudfoundry-incubator/multiapps-cli-plugin)
## ${docGenDescription}
## Prerequisites
@ -21,90 +9,9 @@ Deployment can be done
![Jenkins credentials configuration](../images/cf_credentials.png)
## Parameters
## ${docGenParameters}
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
| script | yes | | |
| cloudFoundry | yes | | |
| deployTool | no | cf_native | cf_native, mtaDeployPlugin |
| deployType | no | standard | standard, blue-green |
| keepOldInstance | no | false | true, false |
| dockerImage | no | s4sdk/docker-cf-cli | |
| dockerWorkspace | no | /home/piper | |
| mtaDeployParameters | | for _deployType:standard_ `-f`<br />for _deployType:blue-green_ `-f --no-confirm` | |
| mtaExtensionDescriptor | no | '' | |
| mtaPath | no | '' | |
| smokeTestScript | no | blueGreenCheckScript.sh (provided by library). <br />Can be overwritten using config property 'smokeTestScript' | |
| smokeTestStatusCode | no | 200 | |
| stashContent | no | [] | |
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving e.g. configuration parameters.
* `cloudFoundry` defines a map containing following properties:
* `apiEndpoint`: Cloud Foundry API endpoint (default: `https://api.cf.eu10.hana.ondemand.com`)
* `appName`: App name of application to be deployed (optional)
* `credentialsId`: Credentials to be used for deployment (mandatory)
* `manifest`: Manifest to be used for deployment
* `org`: Cloud Foundry target organization (mandatory)
* `space`: Cloud Foundry target space (mandatory)
Example: `cloudFoundry: [apiEndpoint: 'https://test.server.com', appName:'cfAppName', credentialsId: 'cfCredentialsId', manifest: 'cfManifest', org: 'cfOrg', space: 'cfSpace']`
!!! note
It is also possible to use following configuration parameters instead of `cloudFoundry` map:
- cfApiEndpoint
- cfAppName
- cfCredentialsId
- cfManifest
- cfOrg
- cfSpace
!!! note
Due to [an incompatible change](https://github.com/cloudfoundry/cli/issues/1445) in the Cloud Foundry CLI, multiple buildpacks are not supported by this step.
If your `application` contains a list of `buildpacks` instead a single `buildpack`, this will be automatically re-written by the step when blue-green deployment is used.
* `deployTool` defines the tool which should be used for deployment.
* `deployType` defines the type of deployment, either `standard` deployment which results in a system downtime or a zero-downtime `blue-green` deployment.
* `keepOldInstance` in case of a `blue-green` deployment the old instance will be deleted by default. If this option is set to true the old instance will remain stopped in the Cloud Foundry space.
* `dockerImage` defines the Docker image containing the deployment tools (like cf cli, ...) and `dockerWorkspace` defines the home directory of the default user of the `dockerImage`
* `smokeTestScript` allows to specify a script which performs a check during blue-green deployment. The script gets the FQDN as parameter and returns `exit code 0` in case check returned `smokeTestStatusCode`. More details can be found [here](https://github.com/bluemixgaragelondon/cf-blue-green-deploy#how-to-use) <br /> Currently this option is only considered for deployTool `cf_native`.
* `stashContent` defines the stash names which should be unstashed at the beginning of the step. This makes the files available in case the step is started on an empty node.
### Deployment with cf_native
* `appName` in `cloudFoundry` map (or `cfAppName`) defines the name of the application which will be deployed to the Cloud Foundry space.
* `manifest` in `cloudFoundry` maps (or `cfManifest`) defines the manifest to be used for Cloud Foundry deployment.
!!! note
Cloud Foundry supports the deployment of multiple applications using a single manifest file.
This option is supported with Piper.
In this case define `appName: ''` since the app name for the individual applications have to be defined via the manifest.
You can find details in the [Cloud Foundry Documentation](https://docs.cloudfoundry.org/devguide/deploy-apps/manifest.html#multi-apps)
### Deployment with mtaDeployPlugin
* `mtaPath` define path to *.mtar for deployment.
* `mtaExtensionDescriptor` defines additional extension descriptor file for deployment.
* `mtaDeployParameters` defines additional parameters passed to mta deployment.
## Step configuration
The following parameters can also be specified as step/stage/general parameters using the [global configuration](../configuration.md):
* cloudFoundry
* deployUser
* deployTool
* deployType
* dockerImage
* dockerWorkspace
* mtaDeployParameters
* mtaExtensionDescriptor
* mtaPath
* smokeTestScript
* smokeTestStatusCode
* stashContent
## ${docGenConfiguration}
## Example

View File

@ -1,22 +1,6 @@
# influxWriteData
# ${docGenStepName}
## Description
Since your Continuous Delivery Pipeline in Jenkins provides your productive development and delivery infrastructure you should monitor the pipeline to ensure it runs as expected. How to setup this monitoring is described in the following.
You basically need three components:
- The [InfluxDB Jenkins plugin](https://wiki.jenkins-ci.org/display/JENKINS/InfluxDB+Plugin) which allows you to send build metrics to InfluxDB servers
- The [InfluxDB](https://www.influxdata.com/time-series-platform/influxdb/) to store this data (Docker available)
- A [Grafana](http://grafana.org/) dashboard to visualize the data stored in InfluxDB (Docker available)
!!! note "no InfluxDB available?"
If you don't have an InfluxDB available yet this step will still provide you some benefit.
It will create following files for you and archive them into your build:
* `jenkins_data.json`: This file gives you build-specific information, like e.g. build result, stage where the build failed
* `influx_data.json`: This file gives you detailed information about your pipeline, e.g. stage durations, steps executed, ...
## ${docGenDescription}
## Prerequisites
@ -79,37 +63,9 @@ You need to define the influxDB server in your pipeline as it is defined in the
influxDBServer=jenkins
```
## Parameters
## ${docGenParameters}
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
|script|yes|||
|artifactVersion|no|`commonPipelineEnvironment.getArtifactVersion()`||
|customData|no|`InfluxData.getInstance().getFields().jenkins_custom_data`||
|customDataMap|no|`InfluxData.getInstance().getFields()`||
|customDataMapTags|no|`InfluxData.getInstance().getTags()`||
|customDataTags|no|`InfluxData.getInstance().getTags().jenkins_custom_data`||
|influxPrefix|no|||
|influxServer|no|`''`||
|wrapInNode|no|`false`||
## Step configuration
We recommend to define values of step parameters via [config.yml file](../configuration.md).
In following sections the configuration is possible:
| parameter | general | step | stage |
| ----------|-----------|---------|-----------------|
|script||||
|artifactVersion||X|X|
|customData||X|X|
|customDataMap||X|X|
|customDataMapTags||X|X|
|customDataTags||X|X|
|influxPrefix||X|X|
|influxServer||X|X|
|wrapInNode||X|X|
## ${docGenConfiguration}
## Example

View File

@ -0,0 +1,27 @@
# ${docGenStepName}
## ${docGenDescription}
## Prerequsites
When pushing to a container registry, you need to maintain the respective credentials in your Jenkins credentials store:
Kaniko expects a Docker `config.json` file containing the credential information for registries.
You can create it like explained in the Docker Success Center in the articale about [How to generate a new auth in the config.json file](https://success.docker.com/article/generate-new-auth-in-config-json-file).
Please copy this file and upload it to your Jenkins for example<br />
via _Jenkins_ -> _Credentials_ -> _System_ -> _Global credentials (unrestricted)_ -> _ Add Credentials_ ->
* Kind: _Secret file_
* File: upload your `config.json` file
* ID: specify id which you then use for the configuration of `dockerConfigJsonCredentialsId` (see below)
## Example
```groovy
kanikoExecute script:this
```
## ${docGenParameters}
## ${docGenConfiguration}

View File

@ -0,0 +1,18 @@
# ${docGenStepName}
## ${docGenDescription}
## ${docGenParameters}
## ${docGenConfiguration}
## Examples
```groovy
multicloudDeploy(
script: script,
cfTargets: [[apiEndpoint: 'https://test.server.com', appName:'cfAppName', credentialsId: 'cfCredentialsId', manifest: 'cfManifest', org: 'cfOrg', space: 'cfSpace']],
neoTargets: [[credentialsId: 'my-credentials-id', host: hana.example.org, account: 'trialuser1']],
enableZeroDowntimeDeployment: 'true'
)
```

View File

@ -1,12 +1,6 @@
# neoDeploy
# ${docGenStepName}
## Description
Deploys an Application to SAP Cloud Platform (SAP CP) using the SAP Cloud Platform Console Client (Neo Java Web SDK).
Before doing this, validates that SAP Cloud Platform Console Client is installed and the version is compatible.
Note that a version is formed by `major.minor.patch`, and a version is compatible to another version if the minor and patch versions are higher, but the major version is not, e.g. if 3.39.10 is the expected version, 3.39.11 and 3.40.1 would be compatible versions, but 4.0.1 would not be a compatible version.
## ${docGenDescription}
## Prerequisites
@ -20,97 +14,9 @@ Note that a version is formed by `major.minor.patch`, and a version is compatibl
* **Java 8 or compatible version** - needed by the *Neo-Java-Web-SDK*. Java environment needs to be properly configured (JAVA_HOME, java exectutable contained in path).
## Parameters when using MTA deployment method (default - MTA)
## ${docGenParameters}
| parameter | mandatory | default | possible values |
| -------------------|-----------|-------------------------------|-------------------------------------------------|
| `script` | yes | | |
| `neo` | no | | |
| `deployMode` | yes | `'mta'` | `'mta'`, `'warParams'`, `'warPropertiesFile'` |
| `neoHome` | no | | |
| `source` | no | | |
The parameter `neo` is a map which contains the following parameters:
| parameter | mandatory | default | possible values |
| -------------------|-----------|-------------------------------|-------------------------------------------------|
| `account` | no | | |
| `credentialsId` | no | `'CI_CREDENTIALS_ID'` | |
| `host` | no | | |
## Parameters when using WAR file deployment method with .properties file (WAR_PROPERTIESFILE)
| parameter | mandatory | default | possible values |
| -------------------|-----------|-------------------------------|-------------------------------------------------|
| `script` | yes | | |
| `neo` | no | | |
| `deployMode` | yes | `'mta'` | `'mta'`, `'warParams'`, `'warPropertiesFile'` |
| `neoHome` | no | | |
| `source` | no | | |
| `warAction` | yes | `'deploy'` | `'deploy'`, `'rolling-update'` |
The parameter `neo` is a map which contains the following parameters:
| parameter | mandatory | default | possible values |
| -------------------|-----------|-------------------------------|-------------------------------------------------|
| `credentialsId` | no | `'CI_CREDENTIALS_ID'` | |
| `propertiesFile` | yes | | |
## Parameters when using WAR file deployment method without .properties file - with parameters (WAR_PARAMS)
| parameter | mandatory | default | possible values |
| -------------------|-----------|-------------------------------|-------------------------------------------------|
| `script` | yes | | |
| `neo` | no | | |
| `deployMode` | yes | `'mta'` | `'mta'`, `'warParams'`, `'warPropertiesFile'` |
| `neoHome` | no | | |
| `source` | no | | |
| `warAction` | yes | `'deploy'` | `'deploy'`, `'rolling-update'` |
The parameter `neo` is a map which contains the following parameters:
| parameter | mandatory | default | possible values |
| -------------------|-----------|-------------------------------|-------------------------------------------------|
| `account` | yes | | |
| `application` | yes | | |
| `credentialsId` | no | `'CI_CREDENTIALS_ID'` | |
| `environment` | | | |
| `host` | yes | | |
| `runtime` | yes | | |
| `runtimeVersion` | yes | | |
| `size` | no | `'lite'` | `'lite'`, `'pro'`, `'prem'`, `'prem-plus'` |
| `vmArguments` | | | |
* `script` - The common script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving e.g. configuration parameters.
* `deployMode` - The deployment mode which should be used. Available options are `'mta'` (default), `'warParams'` (deploying WAR file and passing all the deployment parameters via the function call) and `'warPropertiesFile'` (deploying WAR file and putting all the deployment parameters in a .properties file)
* `neoHome` - The path to the `neo-java-web-sdk` tool used for SAP CP deployment. If no parameter is provided, the path is retrieved from the environment variables using the environment variable `NEO_HOME`. If no parameter and no environment variable is provided, the path is retrieved from the step configuration using the step configuration key `neoHome`. If the previous configurations are not provided, the tool is expected on the `PATH`, and if it is not available on the `PATH` an AbortException is thrown.
* `source`- The path to the archive for deployment to SAP CP. If not provided `mtarFilePath` from commom pipeline environment is used instead.
* `warAction` - Action mode when using WAR file mode. Available options are `deploy` (default) and `rolling-update` which performs update of an application without downtime in one go.
The parameters for `neo`:
* `account` - The SAP Cloud Platform account to deploy to.
* `application` - Name of the application you want to manage, configure, or deploy
* `credentialsId` - The Jenkins credentials containing user and password used for SAP CP deployment.
* `environment` - Map of environment variables in the form of KEY: VALUE
* `host` - The SAP Cloud Platform host to deploy to.
* `propertiesFile` - The path to the .properties file in which all necessary deployment properties for the application are defined.
* `runtime` - Name of SAP Cloud Platform application runtime
* `runtimeVersion` - Version of SAP Cloud Platform application runtime
* `size` - Compute unit (VM) size. Acceptable values: lite, pro, prem, prem-plus.
* `vmArguments` - String of VM arguments passed to the JVM
The step is prepared for being executed in docker. The corresponding parameters can be applied. See step `dockerExecute` for details.
## Step configuration
The parameter `neo` including all options can also be specified as a global parameter using the global configuration file.
The following parameters can also be specified as step parameters using the global configuration file:
* `dockerImage`
* `neoHome`
* `source`
## ${docGenConfiguration}
## Side effects

View File

@ -1,22 +1,12 @@
# pipelineStashFiles
# ${docGenStepName}
## Description
This step stashes files that are needed in other build steps (on other nodes).
## ${docGenDescription}
## Prerequsites
none
## Parameters
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
| script | yes | | |
| runCheckmarx | no | false | |
| runOpaTests | no | false | |
| stashIncludes | no | see details | |
| stashExcludes | no | see details | |
## ${docGenParameters}
Details:
@ -42,14 +32,7 @@ The step is stashing files before and after the build. This is due to the fact,
* `stashIncludes: [buildDescriptor: '**/mybuild.yml]`
* `stashExcludes: [tests: '**/NOTRELEVANT.*]`
## Step configuration
The following parameters can also be specified as step parameters using the global configuration file:
* runOpaTests
* runCheckmarx
* stashExcludes
* stashIncludes
## ${docGenConfiguration}
## Explanation of pipeline step

View File

@ -0,0 +1,11 @@
# ${docGenStepName}
## ${docGenDescription}
## Prerequsites
none
## ${docGenParameters}
## ${docGenConfiguration}

View File

@ -0,0 +1,11 @@
# ${docGenStepName}
## ${docGenDescription}
## Prerequsites
none
## ${docGenParameters}
## ${docGenConfiguration}

View File

@ -0,0 +1,7 @@
# ${docGenStepName}
## ${docGenDescription}
## ${docGenParameters}
## ${docGenConfiguration}

View File

@ -13,8 +13,8 @@ nav:
- dockerExecute: steps/dockerExecute.md
- dockerExecuteOnKubernetes: steps/dockerExecuteOnKubernetes.md
- durationMeasure: steps/durationMeasure.md
- githubPublishRelease: steps/githubPublishRelease.md
- gaugeExecuteTests: steps/gaugeExecuteTests.md
- githubPublishRelease: steps/githubPublishRelease.md
- handlePipelineStepErrors: steps/handlePipelineStepErrors.md
- healthExecuteCheck: steps/healthExecuteCheck.md
- influxWriteData: steps/influxWriteData.md
@ -22,12 +22,15 @@ nav:
- mailSendNotification: steps/mailSendNotification.md
- mavenExecute: steps/mavenExecute.md
- mtaBuild: steps/mtaBuild.md
- multicloudDeploy: steps/multicloudDeploy.md
- neoDeploy: steps/neoDeploy.md
- newmanExecute: steps/newmanExecute.md
- npmExecute: steps/npmExecute.md
- pipelineExecute: steps/pipelineExecute.md
- pipelineRestartSteps: steps/pipelineRestartSteps.md
- pipelineStashFiles: steps/pipelineStashFiles.md
- pipelineStashFilesAfterBuild: steps/pipelineStashFilesAfterBuild.md
- pipelineStashFilesBeforeBuild: steps/pipelineStashFilesBeforeBuild.md
- prepareDefaultValues: steps/prepareDefaultValues.md
- seleniumExecuteTests: steps/seleniumExecuteTests.md
- setupCommonPipelineEnvironment: steps/setupCommonPipelineEnvironment.md

View File

@ -221,6 +221,13 @@ steps:
languageRunner: 'js'
runCommand: 'gauge run'
testOptions: 'specs'
bundler:
dockerImage: 'ruby:2.5.3-stretch'
dockerName: 'bundler'
dockerWorkspace: ''
languageRunner: 'ruby'
runCommand: 'bundle install && bundle exec gauge run'
testOptions: 'specs'
handlePipelineStepErrors:
echoDetails: true
failOnError: true
@ -232,6 +239,15 @@ steps:
healthEndpoint: ''
influxWriteData:
influxServer: ''
kanikoExecute:
containerBuildOptions: '--skip-tls-verify-pull'
containerCommand: '/busybox/tail -f /dev/null'
containerPreparationCommand: 'rm /kaniko/.docker/config.json'
containerShell: '/busybox/sh'
customTlsCertificateLinks: []
dockerfile: Dockerfile
dockerImage: 'gcr.io/kaniko-project/executor:debug'
dockerOptions: "-u 0 --entrypoint=''"
karmaExecuteTests:
containerPortMappings:
'node:8-stretch':
@ -407,6 +423,10 @@ steps:
dockerImage: 'node:8-stretch'
dockerName: 'npm'
dockerWorkspace: '/home/node'
bundler:
dockerImage: 'ruby:2.5.3-stretch'
dockerName: 'bundler'
dockerWorkspace: ''
slackSendNotification:
color: "${buildStatus == 'SUCCESS'?'#008000':'#E60000'}"
defaultMessage: "${buildStatus}: Job ${env.JOB_NAME} <${env.BUILD_URL}|#${env.BUILD_NUMBER}>"

View File

@ -0,0 +1,5 @@
package com.sap.piper
enum CloudPlatform {
NEO, CLOUD_FOUNDRY
}

View File

@ -0,0 +1,34 @@
package com.sap.piper
enum DeploymentType {
NEO_ROLLING_UPDATE('rolling-update'), CF_BLUE_GREEN('blue-green'), CF_STANDARD('standard'), NEO_DEPLOY('deploy')
private String value
public DeploymentType(String value){
this.value = value
}
@Override
public String toString(){
return value
}
static DeploymentType selectFor(CloudPlatform cloudPlatform, boolean enableZeroDowntimeDeployment) {
switch (cloudPlatform) {
case CloudPlatform.NEO:
if (enableZeroDowntimeDeployment) return NEO_ROLLING_UPDATE
return NEO_DEPLOY
case CloudPlatform.CLOUD_FOUNDRY:
if (enableZeroDowntimeDeployment) return CF_BLUE_GREEN
return CF_STANDARD
default:
throw new RuntimeException("Unknown cloud platform: ${cloudPlatform}")
}
}
}

View File

@ -12,7 +12,7 @@ def stash(name, include = '**/*.*', exclude = '', useDefaultExcludes = true) {
echo "Stash content: ${name} (include: ${include}, exclude: ${exclude}, useDefaultExcludes: ${useDefaultExcludes})"
Map stashParams = [
name: name,
name : name,
includes: include,
excludes: exclude
]
@ -23,6 +23,16 @@ def stash(name, include = '**/*.*', exclude = '', useDefaultExcludes = true) {
steps.stash stashParams
}
@NonCPS
def runClosures(Map closures) {
def closuresToRun = closures.values().asList()
Collections.shuffle(closuresToRun) // Shuffle the list so no one tries to rely on the order of execution
for (int i = 0; i < closuresToRun.size(); i++) {
(closuresToRun[i] as Closure).run()
}
}
def stashList(script, List stashes) {
for (def stash : stashes) {
def name = stash.name
@ -68,7 +78,7 @@ def unstashAll(stashContent) {
def unstashedContent = []
if (stashContent) {
for (i = 0; i < stashContent.size(); i++) {
if(stashContent[i]) {
if (stashContent[i]) {
unstashedContent += unstash(stashContent[i])
}
}
@ -88,7 +98,7 @@ void pushToSWA(Map parameters, Map config) {
try {
parameters.actionName = parameters.get('actionName') ?: 'Piper Library OS'
parameters.eventType = parameters.get('eventType') ?: 'library-os'
parameters.jobUrlSha1 = generateSha1(env.JOB_URL)
parameters.jobUrlSha1 = generateSha1(env.JOB_URL)
parameters.buildUrlSha1 = generateSha1(env.BUILD_URL)
Telemetry.notify(this, config, parameters)
@ -98,8 +108,18 @@ void pushToSWA(Map parameters, Map config) {
}
@NonCPS
static String fillTemplate(String templateText, Map binding){
static String fillTemplate(String templateText, Map binding) {
def engine = new SimpleTemplateEngine()
String result = engine.createTemplate(templateText).make(binding)
return result
}
static String downloadSettingsFromUrl(script, String url, String targetFile = 'settings.xml') {
if (script.fileExists(targetFile)) {
throw new RuntimeException("Trying to download settings file to ${targetFile}, but a file with this name already exists. Please specify a unique file name.")
}
def settings = script.httpRequest(url)
script.writeFile(file: targetFile, text: settings.getContent())
return targetFile
}

View File

@ -1,122 +0,0 @@
package com.sap.piper.jenkins
import com.cloudbees.groovy.cps.NonCPS
class JenkinsController implements Serializable {
def script
String jenkinsUrl
def timeout
JenkinsController(script, String jenkinsUrl = "http://localhost:8080", timeout = 3600) {
this.script = script
this.jenkinsUrl = jenkinsUrl
this.timeout = timeout
}
def waitForJenkinsStarted() {
def timeout = 120
def timePerLoop = 5
for (int i = 0; i < timeout; i += timePerLoop) {
script.sleep timePerLoop
try {
if (retrieveJenkinsStatus() == 'NORMAL') {
return true
}
} catch (Exception e) {
script.echo "Could not retrieve status for Jenkins at ${jenkinsUrl}/api/json. Message: ${e.getMessage()}. Retrying..."
e.printStackTrace()
continue
}
return false
}
script.error("Timeout: Jenkins did not start within the expected time frame.")
}
private retrieveJenkinsStatus() {
def apiUrl = "${jenkinsUrl}/api/json"
script.echo "Checking Jenkins Status"
def response = getTextFromUrl(apiUrl)
def result = script.readJSON text: response
return result.mode
}
//Trigger scanning of the multi branch builds
def buildJob(String jobName) {
script.sh "curl -s -X POST ${jenkinsUrl}/job/${URLEncoder.encode(jobName, 'UTF-8')}/build"
}
def waitForSuccess(String jobName, String branch) {
if (this.waitForJobStatus(jobName, branch, 'SUCCESS')) {
this.printConsoleText(jobName, branch)
script.echo "Build was successful"
} else {
this.printConsoleText(jobName, branch)
script.error("Build of ${jobName} ${branch} was not successfull")
}
}
def getBuildUrl(String jobName, String branch) {
return "${jenkinsUrl}/job/${URLEncoder.encode(jobName, 'UTF-8')}/job/${URLEncoder.encode(branch, 'UTF-8')}/lastBuild/"
}
def waitForJobStatus(String jobName, String branch, String status) {
def buildUrl = getBuildUrl(jobName, branch)
def timePerLoop = 10
for (int i = 0; i < timeout; i += timePerLoop) {
script.sleep timePerLoop
try {
script.echo "Checking Build Status of ${jobName} ${branch}"
def buildInformation = retrieveBuildInformation(jobName, branch)
if (buildInformation.building) {
script.echo "Build is still in progress"
continue
}
if (buildInformation.result == status) {
return true
}
} catch (Exception e) {
script.echo "Could not retrieve status for ${buildUrl}. Message: ${e.getMessage()}. Retrying..."
continue
}
return false
}
script.error("Timeout: Build of job ${jobName}, branch ${branch} did not finish in the expected time frame.")
}
def getConsoleText(String jobName, String branch) {
def consoleUrl = this.getBuildUrl(jobName, branch) + "/consoleText"
return getTextFromUrl(consoleUrl)
}
def printConsoleText(String jobName, String branch) {
String consoleOutput = getConsoleText(jobName, branch)
script.echo '***********************************************'
script.echo '** Begin Output of Example Application Build **'
script.echo '***********************************************'
script.echo consoleOutput
script.echo '*********************************************'
script.echo '** End Output of Example Application Build **'
script.echo '*********************************************'
}
def retrieveBuildInformation(String jobName, String branch) {
def buildUrl = getBuildUrl(jobName, branch)
def url = "${buildUrl}/api/json"
script.echo "Checking Build Status of ${jobName} ${branch}"
script.echo "${jenkinsUrl}/job/${URLEncoder.encode(jobName, 'UTF-8')}/job/${URLEncoder.encode(branch, 'UTF-8')}/"
def response = getTextFromUrl(url)
def result = script.readJSON text: response
return result
}
@NonCPS
private static String getTextFromUrl(url) {
return new URL(url).getText()
}
}

View File

@ -96,7 +96,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
stageName: 'acceptance',
])
// asserts
assertThat(loggingRule.log, containsString('[cloudFoundryDeploy] General parameters: deployTool=, deployType=standard, cfApiEndpoint=https://api.cf.eu10.hana.ondemand.com, cfOrg=testOrg, cfSpace=testSpace, cfCredentialsId=myCreds, deployUser=testUser'))
assertThat(loggingRule.log, containsString('[cloudFoundryDeploy] General parameters: deployTool=, deployType=standard, cfApiEndpoint=https://api.cf.eu10.hana.ondemand.com, cfOrg=testOrg, cfSpace=testSpace, cfCredentialsId=myCreds'))
}
@Test
@ -125,7 +125,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
stageName: 'acceptance'
])
// asserts
assertThat(loggingRule.log, containsString('[cloudFoundryDeploy] General parameters: deployTool=notAvailable, deployType=standard, cfApiEndpoint=https://api.cf.eu10.hana.ondemand.com, cfOrg=testOrg, cfSpace=testSpace, cfCredentialsId=myCreds, deployUser=testUser'))
assertThat(loggingRule.log, containsString('[cloudFoundryDeploy] General parameters: deployTool=notAvailable, deployType=standard, cfApiEndpoint=https://api.cf.eu10.hana.ondemand.com, cfOrg=testOrg, cfSpace=testSpace, cfCredentialsId=myCreds'))
}
@Test

View File

@ -46,13 +46,14 @@ public class CommonStepsTest extends BasePiperTest{
// all steps not adopting the usual pattern of working with the script.
def whitelistScriptReference = [
'commonPipelineEnvironment',
'handlePipelineStepErrors',
'pipelineExecute',
'piperPipeline',
'prepareDefaultValues',
'setupCommonPipelineEnvironment'
]
'commonPipelineEnvironment',
'handlePipelineStepErrors',
'pipelineExecute',
'piperPipeline',
'prepareDefaultValues',
'setupCommonPipelineEnvironment',
'buildSetResult'
]
List steps = getSteps().stream()
.filter {! whitelistScriptReference.contains(it)}
@ -102,17 +103,18 @@ public class CommonStepsTest extends BasePiperTest{
}
private static fieldRelatedWhitelist = [
'durationMeasure', // only expects parameters via signature
'prepareDefaultValues', // special step (infrastructure)
'piperPipeline', // special step (infrastructure)
'pipelineStashFilesAfterBuild', // intended to be called from pipelineStashFiles
'pipelineStashFilesBeforeBuild', // intended to be called from pipelineStashFiles
'pipelineStashFiles', // only forwards to before/after step
'pipelineExecute', // special step (infrastructure)
'commonPipelineEnvironment', // special step (infrastructure)
'handlePipelineStepErrors', // special step (infrastructure)
'piperStageWrapper' //intended to be called from within stages
]
'durationMeasure', // only expects parameters via signature
'prepareDefaultValues', // special step (infrastructure)
'piperPipeline', // special step (infrastructure)
'pipelineStashFilesAfterBuild', // intended to be called from pipelineStashFiles
'pipelineStashFilesBeforeBuild', // intended to be called from pipelineStashFiles
'pipelineStashFiles', // only forwards to before/after step
'pipelineExecute', // special step (infrastructure)
'commonPipelineEnvironment', // special step (infrastructure)
'handlePipelineStepErrors', // special step (infrastructure)
'piperStageWrapper', //intended to be called from within stages
'buildSetResult'
]
@Test
public void generalConfigKeysSetPresentTest() {
@ -170,7 +172,8 @@ public class CommonStepsTest extends BasePiperTest{
def whitelist = [
'commonPipelineEnvironment',
'piperPipeline'
'piperPipeline',
'buildSetResult'
]
def stepsWithWrongStepName = []

View File

@ -0,0 +1,143 @@
#!groovy
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.ExpectedException
import org.junit.rules.RuleChain
import util.*
import static org.hamcrest.Matchers.*
import static org.junit.Assert.assertThat
class KanikoExecuteTest extends BasePiperTest {
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
private JenkinsShellCallRule shellRule = new JenkinsShellCallRule(this)
private JenkinsReadFileRule readFileRule = new JenkinsReadFileRule(this, 'test/resources/kaniko/')
private JenkinsWriteFileRule writeFileRule = new JenkinsWriteFileRule(this)
private JenkinsDockerExecuteRule dockerExecuteRule = new JenkinsDockerExecuteRule(this)
@Rule
public RuleChain rules = Rules
.getCommonRules(this)
.around(new JenkinsReadYamlRule(this))
.around(shellRule)
.around(readFileRule)
.around(writeFileRule)
.around(dockerExecuteRule)
.around(stepRule)
def fileMap = [:]
@Before
void init() {
binding.variables.env.WORKSPACE = '/path/to/current/workspace'
helper.registerAllowedMethod('file', [Map], { m ->
fileMap = m
return m
})
helper.registerAllowedMethod('withCredentials', [List, Closure], { l, c ->
binding.setProperty(fileMap.variable, 'config.json')
try {
c()
} finally {
binding.setProperty(fileMap.variable, null)
}
})
UUID.metaClass.static.randomUUID = { -> 1}
}
@Test
void testDefaults() {
stepRule.step.kanikoExecute(
script: nullScript
)
assertThat(shellRule.shell, hasItem('#!/busybox/sh rm /kaniko/.docker/config.json'))
assertThat(shellRule.shell, hasItem(allOf(
startsWith('#!/busybox/sh'),
containsString('mv 1-config.json /kaniko/.docker/config.json'),
containsString('/kaniko/executor'),
containsString('--dockerfile /path/to/current/workspace/Dockerfile'),
containsString('--context /path/to/current/workspace'),
containsString('--skip-tls-verify-pull'),
containsString('--no-push')
)))
assertThat(writeFileRule.files.values()[0], is('{"auths":{}}'))
assertThat(dockerExecuteRule.dockerParams, allOf(
hasEntry('containerCommand', '/busybox/tail -f /dev/null'),
hasEntry('containerShell', '/busybox/sh'),
hasEntry('dockerImage', 'gcr.io/kaniko-project/executor:debug'),
hasEntry('dockerOptions', "-u 0 --entrypoint=''")
))
}
@Test
void testCustomDockerCredentials() {
stepRule.step.kanikoExecute(
script: nullScript,
dockerConfigJsonCredentialsId: 'myDockerConfigJson'
)
assertThat(fileMap.credentialsId, is('myDockerConfigJson'))
assertThat(writeFileRule.files.values()[0], allOf(
containsString('docker.my.domain.com:4444'),
containsString('"auth": "myAuth"'),
containsString('"email": "my.user@domain.com"')
))
}
@Test
void testCustomImage() {
stepRule.step.kanikoExecute(
script: nullScript,
containerImageNameAndTag: 'my.docker.registry/path/myImageName:myTag'
)
assertThat(shellRule.shell, hasItem(allOf(
startsWith('#!/busybox/sh'),
containsString('mv 1-config.json /kaniko/.docker/config.json'),
containsString('/kaniko/executor'),
containsString('--dockerfile /path/to/current/workspace/Dockerfile'),
containsString('--context /path/to/current/workspace'),
containsString('--skip-tls-verify-pull'),
containsString('--destination my.docker.registry/path/myImageName:myTag')
)))
}
@Test
void testPreserveDestination() {
stepRule.step.kanikoExecute(
script: nullScript,
containerBuildOptions: '--destination my.docker.registry/path/myImageName:myTag'
)
assertThat(shellRule.shell, hasItem(allOf(
startsWith('#!/busybox/sh'),
containsString('mv 1-config.json /kaniko/.docker/config.json'),
containsString('/kaniko/executor'),
containsString('--dockerfile /path/to/current/workspace/Dockerfile'),
containsString('--context /path/to/current/workspace'),
containsString('--destination my.docker.registry/path/myImageName:myTag')
)))
}
@Test
void testCustomCertificates() {
stepRule.step.kanikoExecute(
script: nullScript,
customTlsCertificateLinks: ['http://link.one', 'http://link.two']
)
assertThat(shellRule.shell, hasItem(allOf(
startsWith('#!/busybox/sh'),
containsString('rm /kaniko/.docker/config.json'),
containsString('wget http://link.one -O - >> /kaniko/ssl/certs/ca-certificates.crt'),
containsString('wget http://link.two -O - >> /kaniko/ssl/certs/ca-certificates.crt'),
)))
}
}

View File

@ -13,6 +13,7 @@ import util.JenkinsLoggingRule
import util.JenkinsReadYamlRule
import util.JenkinsShellCallRule
import util.JenkinsStepRule
import util.JenkinsWriteFileRule
import util.Rules
public class MtaBuildTest extends BasePiperTest {
@ -23,6 +24,7 @@ public class MtaBuildTest extends BasePiperTest {
private JenkinsDockerExecuteRule dockerExecuteRule = new JenkinsDockerExecuteRule(this)
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
private JenkinsReadYamlRule readYamlRule = new JenkinsReadYamlRule(this).registerYaml('mta.yaml', defaultMtaYaml() )
private JenkinsWriteFileRule writeFileRule = new JenkinsWriteFileRule(this)
@Rule
public RuleChain ruleChain = Rules
@ -33,12 +35,15 @@ public class MtaBuildTest extends BasePiperTest {
.around(shellRule)
.around(dockerExecuteRule)
.around(stepRule)
.around(writeFileRule)
@Before
void init() {
helper.registerAllowedMethod('fileExists', [String], { s -> s == 'mta.yaml' })
helper.registerAllowedMethod('httpRequest', [String.class], { s -> new SettingsStub()})
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*\\$MTA_JAR_LOCATION.*', '')
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*\\$JAVA_HOME.*', '')
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*which java.*', 0)
@ -181,6 +186,38 @@ public class MtaBuildTest extends BasePiperTest {
assert 'something' == dockerExecuteRule.dockerParams.dockerOptions
}
@Test
void canConfigureMavenUserSettings() {
stepRule.step.mtaBuild(script: nullScript, projectSettingsFile: 'settings.xml')
assert shellRule.shell.find(){ c -> c.contains('cp settings.xml $HOME/.m2/settings.xml')}
}
@Test
void canConfigureMavenUserSettingsFromRemoteSource() {
stepRule.step.mtaBuild(script: nullScript, projectSettingsFile: 'https://some.host/my-settings.xml')
assert shellRule.shell.find(){ c -> c.contains('cp project-settings.xml $HOME/.m2/settings.xml')}
}
@Test
void canConfigureMavenGlobalSettings() {
stepRule.step.mtaBuild(script: nullScript, globalSettingsFile: 'settings.xml')
assert shellRule.shell.find(){ c -> c.contains('cp settings.xml $M2_HOME/conf/settings.xml')}
}
@Test
void canConfigureMavenGlobalSettingsFromRemoteSource() {
stepRule.step.mtaBuild(script: nullScript, globalSettingsFile: 'https://some.host/my-settings.xml')
assert shellRule.shell.find(){ c -> c.contains('cp global-settings.xml $M2_HOME/conf/settings.xml')}
}
@Test
void buildTargetFromDefaultStepConfigurationTest() {
@ -274,4 +311,9 @@ public class MtaBuildTest extends BasePiperTest {
'''
}
class SettingsStub {
String getContent() {
return "<xml>sometext</xml>"
}
}
}

View File

@ -0,0 +1,256 @@
import com.sap.piper.JenkinsUtils
import com.sap.piper.Utils
import hudson.AbortException
import org.junit.Assert
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.ExpectedException
import org.junit.rules.RuleChain
import util.*
class MulticloudDeployTest extends BasePiperTest {
private ExpectedException thrown = new ExpectedException().none()
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
private JenkinsMockStepRule neoDeployRule = new JenkinsMockStepRule(this, 'neoDeploy')
private JenkinsMockStepRule cloudFoundryDeployRule = new JenkinsMockStepRule(this, 'cloudFoundryDeploy')
private JenkinsReadMavenPomRule readMavenPomRule = new JenkinsReadMavenPomRule(this, 'test/resources/deploy')
private Map neo1 = [:]
private Map neo2 = [:]
private Map cloudFoundry1 = [:]
private Map cloudFoundry2 = [:]
@Rule
public RuleChain ruleChain = Rules
.getCommonRules(this)
.around(new JenkinsReadYamlRule(this))
.around(thrown)
.around(stepRule)
.around(neoDeployRule)
.around(cloudFoundryDeployRule)
.around(readMavenPomRule)
private Map neoDeployParameters = [:]
private Map cloudFoundryDeployParameters = [:]
@Before
void init() {
neo1 = [
host: 'test.deploy.host1.com',
account: 'trialuser1',
credentialsId: 'credentialsId1'
]
neo2 = [
host: 'test.deploy.host2.com',
account: 'trialuser2',
credentialsId: 'credentialsId2'
]
cloudFoundry1 = [
appName:'testAppName1',
manifest: 'test.yml',
org: 'testOrg1',
space: 'testSpace1',
credentialsId: 'cfCredentialsId1'
]
cloudFoundry2 = [
appName:'testAppName2',
manifest: 'test.yml',
org: 'testOrg2',
space: 'testSpace2',
credentialsId: 'cfCredentialsId2'
]
nullScript.commonPipelineEnvironment.configuration = [
general: [
neoTargets: [
neo1, neo2
],
cfTargets: [
cloudFoundry1, cloudFoundry2
]
],
stages: [
acceptance: [
org: 'testOrg',
space: 'testSpace',
deployUser: 'testUser'
]
],
steps: [
cloudFoundryDeploy: [
deployTool: 'cf_native',
deployType: 'blue-green',
keepOldInstance: true,
cf_native: [
dockerImage: 's4sdk/docker-cf-cli',
dockerWorkspace: '/home/piper'
]
]
]
]
}
@Test
void errorNoTargetsDefined() {
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = []
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
thrown.expect(Exception)
thrown.expectMessage('Deployment skipped because no targets defined!')
stepRule.step.multicloudDeploy(
script: nullScript,
stage: 'test'
)
}
@Test
void errorNoSourceForNeoDeploymentTest() {
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = [neo1]
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
thrown.expect(Exception)
thrown.expectMessage('ERROR - NO VALUE AVAILABLE FOR source')
stepRule.step.multicloudDeploy(
script: nullScript,
stage: 'test'
)
}
@Test
void neoDeploymentTest() {
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = [neo1]
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
stepRule.step.multicloudDeploy(
script: nullScript,
stage: 'test',
source: 'file.mtar'
)
assert neoDeployRule.hasParameter('script', nullScript)
assert neoDeployRule.hasParameter('warAction', 'deploy')
assert neoDeployRule.hasParameter('source', 'file.mtar')
assert neoDeployRule.hasParameter('neo', neo1)
}
@Test
void neoRollingUpdateTest() {
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = []
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
def neoParam = [
host: 'test.param.deploy.host.com',
account: 'trialparamNeoUser',
credentialsId: 'paramNeoCredentialsId'
]
stepRule.step.multicloudDeploy(
script: nullScript,
stage: 'test',
neoTargets: [neoParam],
source: 'file.mtar',
enableZeroDowntimeDeployment: true
)
assert neoDeployRule.hasParameter('script', nullScript)
assert neoDeployRule.hasParameter('warAction', 'rolling-update')
assert neoDeployRule.hasParameter('source', 'file.mtar')
assert neoDeployRule.hasParameter('neo', neoParam)
}
@Test
void cfDeploymentTest() {
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = []
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
def cloudFoundry = [
appName:'paramTestAppName',
manifest: 'test.yml',
org: 'paramTestOrg',
space: 'paramTestSpace',
credentialsId: 'paramCfCredentialsId'
]
stepRule.step.multicloudDeploy([
script: nullScript,
stage: 'acceptance',
cfTargets: [cloudFoundry]
])
assert cloudFoundryDeployRule.hasParameter('script', nullScript)
assert cloudFoundryDeployRule.hasParameter('deployType', 'standard')
assert cloudFoundryDeployRule.hasParameter('cloudFoundry', cloudFoundry)
assert cloudFoundryDeployRule.hasParameter('mtaPath', nullScript.commonPipelineEnvironment.mtarFilePath)
assert cloudFoundryDeployRule.hasParameter('deployTool', 'cf_native')
}
@Test
void cfBlueGreenDeploymentTest() {
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = []
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = [cloudFoundry1]
stepRule.step.multicloudDeploy([
script: nullScript,
stage: 'acceptance',
enableZeroDowntimeDeployment: true
])
assert cloudFoundryDeployRule.hasParameter('script', nullScript)
assert cloudFoundryDeployRule.hasParameter('deployType', 'blue-green')
assert cloudFoundryDeployRule.hasParameter('cloudFoundry', cloudFoundry1)
assert cloudFoundryDeployRule.hasParameter('mtaPath', nullScript.commonPipelineEnvironment.mtarFilePath)
assert cloudFoundryDeployRule.hasParameter('deployTool', 'cf_native')
}
@Test
void multicloudDeploymentTest() {
stepRule.step.multicloudDeploy([
script: nullScript,
stage: 'acceptance',
enableZeroDowntimeDeployment: true,
source: 'file.mtar'
])
assert neoDeployRule.hasParameter('script', nullScript)
assert neoDeployRule.hasParameter('warAction', 'rolling-update')
assert neoDeployRule.hasParameter('source', 'file.mtar')
assert neoDeployRule.hasParameter('neo', neo1)
assert neoDeployRule.hasParameter('script', nullScript)
assert neoDeployRule.hasParameter('warAction', 'rolling-update')
assert neoDeployRule.hasParameter('source', 'file.mtar')
assert neoDeployRule.hasParameter('neo', neo2)
assert cloudFoundryDeployRule.hasParameter('script', nullScript)
assert cloudFoundryDeployRule.hasParameter('deployType', 'blue-green')
assert cloudFoundryDeployRule.hasParameter('cloudFoundry', cloudFoundry1)
assert cloudFoundryDeployRule.hasParameter('mtaPath', nullScript.commonPipelineEnvironment.mtarFilePath)
assert cloudFoundryDeployRule.hasParameter('deployTool', 'cf_native')
assert cloudFoundryDeployRule.hasParameter('script', nullScript)
assert cloudFoundryDeployRule.hasParameter('deployType', 'blue-green')
assert cloudFoundryDeployRule.hasParameter('cloudFoundry', cloudFoundry2)
assert cloudFoundryDeployRule.hasParameter('mtaPath', nullScript.commonPipelineEnvironment.mtarFilePath)
assert cloudFoundryDeployRule.hasParameter('deployTool', 'cf_native')
}
}

View File

@ -6,6 +6,8 @@ import static org.hamcrest.Matchers.containsString
import static org.hamcrest.Matchers.not
import org.hamcrest.Matchers
import org.hamcrest.BaseMatcher
import org.hamcrest.Description
import org.jenkinsci.plugins.credentialsbinding.impl.CredentialNotFoundException
import org.junit.Assert
import org.junit.Before
@ -460,4 +462,57 @@ class NeoDeployTest extends BasePiperTest {
size: 'lite'
])
}
@Test
void dontSwallowExceptionWhenUnableToProvideLogsTest() {
thrown.expect(AbortException)
thrown.expectMessage('Something went wrong during neo deployment')
thrown.expect(new BaseMatcher() {
def expectedException = AbortException
def expectedText = 'Cannot provide logs.'
boolean matches(def ex) {
def suppressed = ex.getSuppressed()
return (suppressed.size() == 1 &&
suppressed[0] in expectedException &&
suppressed[0].message == expectedText)
}
void describeTo(Description d) {
d.appendText(" a suppressed ${expectedException} with message ${expectedText}.")
}
})
loggingRule.expect('Unable to provide the logs.')
helper.registerAllowedMethod('fileExists', [String],
{ f ->
f == 'archive.mtar'
}
)
helper.registerAllowedMethod('sh', [Map],
{ m ->
if(m.script.toString().contains('neo.sh deploy-mta'))
throw new AbortException('Something went wrong during neo deployment.')
}
)
helper.registerAllowedMethod("sh", [String],
{ cmd ->
if (cmd == 'cat logs/neo/*')
throw new AbortException('Cannot provide logs.')
}
)
stepRule.step.neoDeploy(script: nullScript,
source: archiveName,
neo:[credentialsId: 'myCredentialsId'],
deployMode: 'mta',
utils: utils,
)
}
}

View File

@ -209,6 +209,66 @@ steps: {}
}
@Test
void testConditionConfigKeys() {
helper.registerAllowedMethod('libraryResource', [String.class], {s ->
if(s == 'testDefault.yml') {
return '''
stages:
testStage1:
stepConditions:
firstStep:
configKeys:
- myKey1_1
- myKey1_2
testStage2:
stepConditions:
secondStep:
configKeys:
- myKey2_1
testStage3:
stepConditions:
thirdStep:
configKeys:
- myKey3_1
'''
} else {
return '''
general: {}
steps: {}
'''
}
})
nullScript.commonPipelineEnvironment.configuration = [
general: [myKey1_1: 'myVal1_1'],
stages: [:],
steps: [thirdStep: [myKey3_1: 'myVal3_1']]
]
jsr.step.piperInitRunStageConfiguration(
script: nullScript,
juStabUtils: utils,
stageConfigResource: 'testDefault.yml'
)
assertThat(nullScript.commonPipelineEnvironment.configuration.runStage.keySet(),
allOf(
containsInAnyOrder(
'testStage1',
'testStage3'
),
hasSize(2)
)
)
assertThat(nullScript.commonPipelineEnvironment.configuration.runStep.testStage1.firstStep, is(true))
assertThat(nullScript.commonPipelineEnvironment.configuration.runStep.testStage2?.secondStep, is(false))
assertThat(nullScript.commonPipelineEnvironment.configuration.runStep.testStage3.thirdStep, is(true))
}
@Test
void testConditionFilePattern() {
helper.registerAllowedMethod('libraryResource', [String.class], {s ->

View File

@ -108,8 +108,12 @@ class PiperPipelineTest extends BasePiperTest {
})
helper.registerAllowedMethod('steps', [Closure], null)
helper.registerAllowedMethod('post', [Closure], null)
helper.registerAllowedMethod('always', [Closure], null)
helper.registerAllowedMethod('post', [Closure], {c -> c()})
helper.registerAllowedMethod('success', [Closure], {c -> c()})
helper.registerAllowedMethod('failure', [Closure], {c -> c()})
helper.registerAllowedMethod('aborted', [Closure], {c -> c()})
helper.registerAllowedMethod('unstable', [Closure], {c -> c()})
helper.registerAllowedMethod('cleanup', [Closure], {c -> c()})
helper.registerAllowedMethod('input', [Map], {m -> return null})
@ -156,6 +160,9 @@ class PiperPipelineTest extends BasePiperTest {
helper.registerAllowedMethod('piperPipelineStageRelease', [Map.class], {m ->
stepsCalled.add('piperPipelineStageRelease')
})
helper.registerAllowedMethod('piperPipelineStagePost', [Map.class], {m ->
stepsCalled.add('piperPipelineStagePost')
})
nullScript.prepareDefaultValues(script: nullScript)
@ -227,7 +234,8 @@ class PiperPipelineTest extends BasePiperTest {
'piperPipelineStageCompliance',
'input',
'piperPipelineStagePromote',
'piperPipelineStageRelease'
'piperPipelineStageRelease',
'piperPipelineStagePost'
))
}
}

View File

@ -0,0 +1,55 @@
package util
import com.lesfurets.jenkins.unit.BasePipelineTest
import java.beans.Introspector
import org.junit.rules.TestRule
import org.junit.runner.Description
import org.junit.runners.model.Statement
class JenkinsMockStepRule implements TestRule {
final BasePipelineTest testInstance
final String stepName
def callsIndex = 0
def callsParameters = [:]
JenkinsMockStepRule(BasePipelineTest testInstance, String stepName) {
this.testInstance = testInstance
this.stepName = stepName
}
boolean hasParameter(def key, def value){
for ( def parameters : callsParameters) {
for ( def parameter : parameters.value.entrySet()) {
if (parameter.key.equals(key) && parameter.value.equals(value)) return true
}
}
return false
}
@Override
Statement apply(Statement base, Description description) {
return new Statement() {
@Override
void evaluate() throws Throwable {
testInstance.helper.registerAllowedMethod(this.stepName, [Map], { Map m ->
this.callsIndex += 1
this.callsParameters.put(callsIndex, m)
})
base.evaluate()
}
}
}
@Override
String toString() {
return callsParameters.toString()
}
}

View File

@ -8,7 +8,9 @@ import org.junit.rules.TestRule
import org.junit.runner.Description
import org.junit.runners.model.Statement
class JenkinsStepRule implements TestRule {
final BasePipelineTest testInstance
def step
@ -22,9 +24,11 @@ class JenkinsStepRule implements TestRule {
return new Statement() {
@Override
void evaluate() throws Throwable {
def testClassName = testInstance.getClass().getSimpleName()
def stepName = Introspector.decapitalize(testClassName.replaceAll('Test$', ''))
this.step = testInstance.loadScript("${stepName}.groovy")
base.evaluate()
}
}

View File

@ -0,0 +1,8 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.sap.piper</groupId>
<artifactId>library-test</artifactId>
<packaging>war</packaging>
<version>1.2.3</version>
<name>library-test</name>
</project>

View File

@ -0,0 +1,8 @@
{
"auths": {
"docker.my.domain.com:4444": {
"auth": "myAuth",
"email": "my.user@domain.com"
}
}
}

View File

@ -22,7 +22,7 @@ import groovy.text.SimpleTemplateEngine
'artifactType',
/**
* Defines the tool which is used for building the artifact.
* @possibleValues docker, dlang, golang, maven, mta, npm, pip, sbt
* @possibleValues `dlang`, `docker`, `golang`, `maven`, `mta`, `npm`, `pip`, `sbt`
*/
'buildTool',
/**

View File

@ -0,0 +1,4 @@
void call(currentBuild, result = 'SUCCESS') {
echo "Current build result is ${currentBuild.result}, setting it to ${result}."
currentBuild.result = result
}

View File

@ -2,6 +2,7 @@ import com.sap.piper.JenkinsUtils
import static com.sap.piper.Prerequisites.checkScript
import com.sap.piper.GenerateDocumentation
import com.sap.piper.Utils
import com.sap.piper.ConfigurationHelper
import com.sap.piper.CfManifestUtils
@ -14,22 +15,109 @@ import groovy.transform.Field
@Field Set STEP_CONFIG_KEYS = [
'cloudFoundry',
'deployUser',
/**
* Cloud Foundry API endpoint.
* @parentConfigKey cloudFoundry
*/
'apiEndpoint',
/**
* Defines the name of the application to be deployed to the Cloud Foundry space.
* @parentConfigKey cloudFoundry
*/
'appName',
/**
* Credentials to be used for deployment.
* @parentConfigKey cloudFoundry
*/
'credentialsId',
/**
* Defines the manifest to be used for deployment to Cloud Foundry.
* @parentConfigKey cloudFoundry
*/
'manifest',
/**
* Cloud Foundry target organization.
* @parentConfigKey cloudFoundry
*/
'org',
/**
* Cloud Foundry target space.
* @parentConfigKey cloudFoundry
*/
'space',
/**
* Defines the tool which should be used for deployment.
* @possibleValues 'cf_native', 'mtaDeployPlugin'
*/
'deployTool',
/**
* Defines the type of deployment, either `standard` deployment which results in a system downtime or a zero-downtime `blue-green` deployment.
* @possibleValues 'standard', 'blue-green'
*/
'deployType',
/**
* In case of a `blue-green` deployment the old instance will be deleted by default. If this option is set to true the old instance will remain stopped in the Cloud Foundry space.
* @possibleValues true, false
*/
'keepOldInstance',
/** @see dockerExecute */
'dockerImage',
/** @see dockerExecute */
'dockerWorkspace',
/** @see dockerExecute */
'stashContent',
/**
* Defines additional parameters passed to mta for deployment with the mtaDeployPlugin.
*/
'mtaDeployParameters',
/**
* Defines additional extension descriptor file for deployment with the mtaDeployPlugin.
*/
'mtaExtensionDescriptor',
/**
* Defines the path to *.mtar for deployment with the mtaDeployPlugin.
*/
'mtaPath',
/**
* Allows to specify a script which performs a check during blue-green deployment. The script gets the FQDN as parameter and returns `exit code 0` in case check returned `smokeTestStatusCode`.
* More details can be found [here](https://github.com/bluemixgaragelondon/cf-blue-green-deploy#how-to-use) <br /> Currently this option is only considered for deployTool `cf_native`.
*/
'smokeTestScript',
'smokeTestStatusCode',
'stashContent']
/**
* Expected status code returned by the check.
*/
'smokeTestStatusCode'
]
@Field Map CONFIG_KEY_COMPATIBILITY = [cloudFoundry: [apiEndpoint: 'cfApiEndpoint', appName:'cfAppName', credentialsId: 'cfCredentialsId', manifest: 'cfManifest', org: 'cfOrg', space: 'cfSpace']]
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
/**
* Deploys an application to a test or production space within Cloud Foundry.
* Deployment can be done
*
* * in a standard way
* * in a zero downtime manner (using a [blue-green deployment approach](https://martinfowler.com/bliki/BlueGreenDeployment.html))
*
* !!! note "Deployment supports multiple deployment tools"
* Currently the following are supported:
*
* * Standard `cf push` and [Bluemix blue-green plugin](https://github.com/bluemixgaragelondon/cf-blue-green-deploy#how-to-use)
* * [MTA CF CLI Plugin](https://github.com/cloudfoundry-incubator/multiapps-cli-plugin)
*
* !!! note
* Due to [an incompatible change](https://github.com/cloudfoundry/cli/issues/1445) in the Cloud Foundry CLI, multiple buildpacks are not supported by this step.
* If your `application` contains a list of `buildpacks` instead a single `buildpack`, this will be automatically re-written by the step when blue-green deployment is used.
*
* !!! note
* Cloud Foundry supports the deployment of multiple applications using a single manifest file.
* This option is supported with Piper.
*
* In this case define `appName: ''` since the app name for the individual applications have to be defined via the manifest.
* You can find details in the [Cloud Foundry Documentation](https://docs.cloudfoundry.org/devguide/deploy-apps/manifest.html#multi-apps)
*/
@GenerateDocumentation
void call(Map parameters = [:]) {
handlePipelineStepErrors (stepName: STEP_NAME, stepParameters: parameters) {
@ -62,7 +150,7 @@ void call(Map parameters = [:]) {
stepParam3: parameters?.script == null
], config)
echo "[${STEP_NAME}] General parameters: deployTool=${config.deployTool}, deployType=${config.deployType}, cfApiEndpoint=${config.cloudFoundry.apiEndpoint}, cfOrg=${config.cloudFoundry.org}, cfSpace=${config.cloudFoundry.space}, cfCredentialsId=${config.cloudFoundry.credentialsId}, deployUser=${config.deployUser}"
echo "[${STEP_NAME}] General parameters: deployTool=${config.deployTool}, deployType=${config.deployType}, cfApiEndpoint=${config.cloudFoundry.apiEndpoint}, cfOrg=${config.cloudFoundry.org}, cfSpace=${config.cloudFoundry.space}, cfCredentialsId=${config.cloudFoundry.credentialsId}"
//make sure that all relevant descriptors, are available in workspace
utils.unstashAll(config.stashContent)

View File

@ -162,8 +162,10 @@ void executeOnPod(Map config, utils, Closure body) {
* In case third case, we need to create the 'container' stash to bring the modified content back to the host.
*/
try {
if (config.containerName && config.stashContent.isEmpty()){
config.stashContent.add(stashWorkspace(config, 'workspace'))
def stashContent = config.stashContent
if (config.containerName && stashContent.isEmpty()){
stashContent = [stashWorkspace(config, 'workspace')]
}
podTemplate(getOptions(config)) {
node(config.uniqueId) {
@ -175,7 +177,7 @@ void executeOnPod(Map config, utils, Closure body) {
echo "ContainerConfig: ${containerParams}"
container(containerParams){
try {
utils.unstashAll(config.stashContent)
utils.unstashAll(stashContent)
body()
} finally {
stashWorkspace(config, 'container', true)

View File

@ -13,7 +13,10 @@ import groovy.transform.Field
@Field Set GENERAL_CONFIG_KEYS = []
@Field Set STEP_CONFIG_KEYS = [
/** Defines the build tool to be used for the test execution.*/
/**
* Defines the build tool to be used for the test execution.
* @possibleValues `maven`, `npm`, `bundler`
*/
'buildTool',
/** @see dockerExecute*/
'dockerEnvVars',
@ -25,7 +28,7 @@ import groovy.transform.Field
'dockerWorkspace',
/**
* Defines the behavior in case tests fail. When this is set to `true` test results cannot be recorded using the `publishTestResults` step afterwards.
* @possibleValues true, false
* @possibleValues `true`, `false`
*/
'failOnError',
/** Defines the command for installing Gauge. In case the `dockerImage` already contains Gauge it can be set to empty: ``.*/

View File

@ -1,5 +1,6 @@
import static com.sap.piper.Prerequisites.checkScript
import com.sap.piper.GenerateDocumentation
import com.sap.piper.ConfigurationHelper
import com.sap.piper.JsonUtils
import com.sap.piper.Utils
@ -11,17 +12,61 @@ import groovy.transform.Field
@Field Set GENERAL_CONFIG_KEYS = []
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([
/**
* Defines the version of the current artifact. Defaults to `commonPipelineEnvironment.getArtifactVersion()`
*/
'artifactVersion',
/**
* Defines custom data (map of key-value pairs) to be written to Influx into measurement `jenkins_custom_data`. Defaults to `commonPipelineEnvironment.getInfluxCustomData()`
*/
'customData',
/**
* Defines tags (map of key-value pairs) to be written to Influx into measurement `jenkins_custom_data`. Defaults to `commonPipelineEnvironment.getInfluxCustomDataTags()`
*/
'customDataTags',
/**
* Defines a map of measurement names containing custom data (map of key-value pairs) to be written to Influx. Defaults to `commonPipelineEnvironment.getInfluxCustomDataMap()`
*/
'customDataMap',
/**
* Defines a map of measurement names containing tags (map of key-value pairs) to be written to Influx. Defaults to `commonPipelineEnvironment.getInfluxCustomDataTags()`
*/
'customDataMapTags',
/**
* Defines the name of the Influx server as configured in Jenkins global configuration.
*/
'influxServer',
/**
* Defines a custom prefix.
* For example in multi branch pipelines, where every build is named after the branch built and thus you have different builds called 'master' that report different metrics.
*/
'influxPrefix',
/**
* Defines if a dedicated node/executor should be created in the pipeline run.
* This is especially relevant when running the step in a declarative `POST` stage where by default no executor is available.
*/
'wrapInNode'
])
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
/**
* Since your Continuous Delivery Pipeline in Jenkins provides your productive development and delivery infrastructure you should monitor the pipeline to ensure it runs as expected. How to setup this monitoring is described in the following.
*
* You basically need three components:
*
* - The [InfluxDB Jenkins plugin](https://wiki.jenkins-ci.org/display/JENKINS/InfluxDB+Plugin) which allows you to send build metrics to InfluxDB servers
* - The [InfluxDB](https://www.influxdata.com/time-series-platform/influxdb/) to store this data (Docker available)
* - A [Grafana](http://grafana.org/) dashboard to visualize the data stored in InfluxDB (Docker available)
*
* !!! note "no InfluxDB available?"
* If you don't have an InfluxDB available yet this step will still provide you some benefit.
*
* It will create following files for you and archive them into your build:
*
* * `jenkins_data.json`: This file gives you build-specific information, like e.g. build result, stage where the build failed
* * `influx_data.json`: This file gives you detailed information about your pipeline, e.g. stage durations, steps executed, ...
*/
@GenerateDocumentation
void call(Map parameters = [:]) {
handlePipelineStepErrors (stepName: STEP_NAME, stepParameters: parameters, allowBuildFailure: true) {

127
vars/kanikoExecute.groovy Normal file
View File

@ -0,0 +1,127 @@
import groovy.text.GStringTemplateEngine
import static com.sap.piper.Prerequisites.checkScript
import com.sap.piper.GenerateDocumentation
import com.sap.piper.ConfigurationHelper
import com.sap.piper.Utils
import groovy.transform.Field
@Field def STEP_NAME = getClass().getName()
@Field Set GENERAL_CONFIG_KEYS = []
@Field Set STEP_CONFIG_KEYS = [
/**
* Defines the build options for the [kaniko](https://github.com/GoogleContainerTools/kaniko) build.
*/
'containerBuildOptions',
/** @see dockerExecute */
'containerCommand',
/** Defines the full name of the Docker image to be created including registry, image name and tag like `my.docker.registry/path/myImageName:myTag`.*/
'containerImageNameAndTag',
/** @see dockerExecute */
'containerShell',
/**
* Defines the command to prepare the Kaniko container.
* By default the contained credentials are removed in order to allow anonymous access to container registries.
*/
'containerPreparationCommand',
/**
* List containing download links of custom TLS certificates. This is required to ensure trusted connections to registries with custom certificates.
*/
'customTlsCertificateLinks',
/**
* Defines the location of the Dockerfile relative to the Jenkins workspace.
*/
'dockerfile',
/**
* Defines the id of the file credentials in your Jenkins credentials store which contain the file `.docker/config.json`.
* You can find more details about the Docker credentials in the [Docker documentation](https://docs.docker.com/engine/reference/commandline/login/).
*/
'dockerConfigJsonCredentialsId',
/** @see dockerExecute */
'dockerEnvVars',
/** @see dockerExecute */
'dockerOptions',
/** @see dockerExecute */
'dockerImage'
]
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
/**
* Executes a [Kaniko](https://github.com/GoogleContainerTools/kaniko) build for creating a Docker container.
*/
@GenerateDocumentation
void call(Map parameters = [:]) {
handlePipelineStepErrors(stepName: STEP_NAME, stepParameters: parameters) {
final script = checkScript(this, parameters) ?: this
// load default & individual configuration
Map config = ConfigurationHelper.newInstance(this)
.loadStepDefaults()
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
.mixinStepConfig(script.commonPipelineEnvironment, STEP_CONFIG_KEYS)
.mixinStageConfig(script.commonPipelineEnvironment, parameters.stageName?:env.STAGE_NAME, STEP_CONFIG_KEYS)
.mixin(parameters, PARAMETER_KEYS)
.use()
new Utils().pushToSWA([
step: STEP_NAME
], config)
def buildOptions = new GStringTemplateEngine().createTemplate(config.containerBuildOptions).make([config: config, env: env]).toString()
if (!buildOptions.contains('--destination')) {
if (config.containerImageNameAndTag) {
buildOptions += " --destination ${config.containerImageNameAndTag}"
} else {
buildOptions += " --no-push"
}
}
dockerExecute(
script: script,
containerCommand: config.containerCommand,
containerShell: config.containerShell,
dockerEnvVars: config.dockerEnvVars,
dockerImage: config.dockerImage,
dockerOptions: config.dockerOptions
) {
// prepare kaniko container for running with proper Docker config.json and custom certificates
// custom certificates will be downloaded and appended to ca-certificates.crt file used in container
sh """#!${config.containerShell}
${config.containerPreparationCommand}
${getCertificateUpdate(config.customTlsCertificateLinks)}
"""
def uuid = UUID.randomUUID().toString()
if (config.dockerConfigJsonCredentialsId) {
// write proper config.json with credentials
withCredentials([file(credentialsId: config.dockerConfigJsonCredentialsId, variable: 'dockerConfigJson')]) {
writeFile file: "${uuid}-config.json", text: readFile(dockerConfigJson)
}
} else {
// empty config.json to allow anonymous authentication
writeFile file: "${uuid}-config.json", text: '{"auths":{}}'
}
// execute Kaniko
sh """#!${config.containerShell}
mv ${uuid}-config.json /kaniko/.docker/config.json
/kaniko/executor --dockerfile ${env.WORKSPACE}/${config.dockerfile} --context ${env.WORKSPACE} ${buildOptions}"""
}
}
}
private String getCertificateUpdate(List certLinks) {
String certUpdate = ''
if (!certLinks) return certUpdate
certLinks.each {link ->
certUpdate += "wget ${link} -O - >> /kaniko/ssl/certs/ca-certificates.crt\n"
}
return certUpdate
}

View File

@ -6,6 +6,8 @@ import com.sap.piper.Utils
import groovy.transform.Field
import static com.sap.piper.Utils.downloadSettingsFromUrl
@Field def STEP_NAME = getClass().getName()
@Field Set GENERAL_CONFIG_KEYS = []
@ -63,35 +65,33 @@ void call(Map parameters = [:]) {
String command = "mvn"
def globalSettingsFile = configuration.globalSettingsFile
if (globalSettingsFile?.trim()) {
if(globalSettingsFile.trim().startsWith("http")){
downloadSettingsFromUrl(globalSettingsFile)
globalSettingsFile = "settings.xml"
String globalSettingsFile = configuration.globalSettingsFile?.trim()
if (globalSettingsFile) {
if (globalSettingsFile.startsWith("http")) {
globalSettingsFile = downloadSettingsFromUrl(this, globalSettingsFile, 'global-settings.xml')
}
command += " --global-settings '${globalSettingsFile}'"
}
def m2Path = configuration.m2Path
String m2Path = configuration.m2Path
if(m2Path?.trim()) {
command += " -Dmaven.repo.local='${m2Path}'"
}
def projectSettingsFile = configuration.projectSettingsFile
if (projectSettingsFile?.trim()) {
if(projectSettingsFile.trim().startsWith("http")){
downloadSettingsFromUrl(projectSettingsFile)
projectSettingsFile = "settings.xml"
String projectSettingsFile = configuration.projectSettingsFile?.trim()
if (projectSettingsFile) {
if (projectSettingsFile.startsWith("http")) {
projectSettingsFile = downloadSettingsFromUrl(this, projectSettingsFile, 'project-settings.xml')
}
command += " --settings '${projectSettingsFile}'"
}
def pomPath = configuration.pomPath
String pomPath = configuration.pomPath
if(pomPath?.trim()){
command += " --file '${pomPath}'"
}
def mavenFlags = configuration.flags
String mavenFlags = configuration.flags
if (mavenFlags?.trim()) {
command += " ${mavenFlags}"
}
@ -122,9 +122,3 @@ void call(Map parameters = [:]) {
}
}
}
private downloadSettingsFromUrl(String url){
def settings = httpRequest url
writeFile file: 'settings.xml', text: settings.getContent()
}

View File

@ -4,9 +4,10 @@ import com.sap.piper.GenerateDocumentation
import com.sap.piper.ConfigurationHelper
import com.sap.piper.MtaUtils
import com.sap.piper.Utils
import groovy.transform.Field
import static com.sap.piper.Utils.downloadSettingsFromUrl
@Field def STEP_NAME = getClass().getName()
@Field Set GENERAL_CONFIG_KEYS = []
@ -26,7 +27,11 @@ import groovy.transform.Field
* The location of the SAP Multitarget Application Archive Builder jar file, including file name and extension.
* If it is not provided, the SAP Multitarget Application Archive Builder is expected on PATH.
*/
'mtaJarLocation'
'mtaJarLocation',
/** Path or url to the mvn settings file that should be used as global settings file.*/
'globalSettingsFile',
/** Path or url to the mvn settings file that should be used as project settings file.*/
'projectSettingsFile'
]
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS.plus([
/** @see dockerExecute */
@ -59,6 +64,23 @@ void call(Map parameters = [:]) {
dockerExecute(script: script, dockerImage: configuration.dockerImage, dockerOptions: configuration.dockerOptions) {
String projectSettingsFile = configuration.projectSettingsFile?.trim()
if (projectSettingsFile) {
if (projectSettingsFile.startsWith("http")) {
projectSettingsFile = downloadSettingsFromUrl(this, projectSettingsFile, 'project-settings.xml')
}
sh 'mkdir -p $HOME/.m2'
sh "cp ${projectSettingsFile} \$HOME/.m2/settings.xml"
}
String globalSettingsFile = configuration.globalSettingsFile?.trim()
if (globalSettingsFile) {
if (globalSettingsFile.startsWith("http")) {
globalSettingsFile = downloadSettingsFromUrl(this, globalSettingsFile, 'global-settings.xml')
}
sh "cp ${globalSettingsFile} \$M2_HOME/conf/settings.xml"
}
def mtaYamlName = "mta.yaml"
def applicationName = configuration.applicationName

View File

@ -0,0 +1,141 @@
import com.sap.piper.GenerateDocumentation
import com.sap.piper.CloudPlatform
import com.sap.piper.DeploymentType
import com.sap.piper.k8s.ContainerMap
import com.sap.piper.ConfigurationHelper
import com.sap.piper.Utils
import com.sap.piper.JenkinsUtils
import groovy.transform.Field
import static com.sap.piper.Prerequisites.checkScript
@Field String STEP_NAME = getClass().getName()
@Field Set GENERAL_CONFIG_KEYS = [
/** Defines the targets to deploy on cloudFoundry.*/
'cfTargets',
/** Defines the targets to deploy on neo.*/
'neoTargets'
]
@Field Set STEP_CONFIG_KEYS = []
@Field Set PARAMETER_KEYS = GENERAL_CONFIG_KEYS.plus([
/** The stage name. If the stage name is not provided, it will be taken from the environment variable 'STAGE_NAME'.*/
'stage',
/** Defines the deployment type.*/
'enableZeroDowntimeDeployment',
/** The source file to deploy to the SAP Cloud Platform.*/
'source'
])
/**
* Deploys an application to multiple platforms (cloudFoundry, SAP Cloud Platform) or to multiple instances of multiple platforms or the same platform.
*/
@GenerateDocumentation
void call(parameters = [:]) {
handlePipelineStepErrors(stepName: STEP_NAME, stepParameters: parameters) {
def stageName = parameters.stage ?: env.STAGE_NAME
def enableZeroDowntimeDeployment = parameters.enableZeroDowntimeDeployment ?: false
def script = checkScript(this, parameters) ?: this
def utils = parameters.utils ?: new Utils()
def jenkinsUtils = parameters.jenkinsUtils ?: new JenkinsUtils()
ConfigurationHelper configHelper = ConfigurationHelper.newInstance(this)
.loadStepDefaults()
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
.mixin(parameters, PARAMETER_KEYS)
Map config = configHelper.use()
configHelper
.withMandatoryProperty('source', null, { config.neoTargets })
utils.pushToSWA([
step: STEP_NAME,
stepParamKey1: 'stage',
stepParam1: stageName,
stepParamKey2: 'enableZeroDowntimeDeployment',
stepParam2: enableZeroDowntimeDeployment
], config)
def index = 1
def deployments = [:]
def deploymentType
def deployTool
if (config.cfTargets) {
deploymentType = DeploymentType.selectFor(CloudPlatform.CLOUD_FOUNDRY, enableZeroDowntimeDeployment).toString()
deployTool = script.commonPipelineEnvironment.configuration.isMta ? 'mtaDeployPlugin' : 'cf_native'
for (int i = 0; i < config.cfTargets.size(); i++) {
def target = config.cfTargets[i]
Closure deployment = {
cloudFoundryDeploy(
script: script,
juStabUtils: utils,
jenkinsUtilsStub: jenkinsUtils,
deployType: deploymentType,
cloudFoundry: target,
mtaPath: script.commonPipelineEnvironment.mtarFilePath,
deployTool: deployTool
)
}
setDeployment(deployments, deployment, index, script, stageName)
index++
}
utils.runClosures(deployments)
}
if (config.neoTargets) {
deploymentType = DeploymentType.selectFor(CloudPlatform.NEO, enableZeroDowntimeDeployment)
for (int i = 0; i < config.neoTargets.size(); i++) {
def target = config.neoTargets[i]
Closure deployment = {
neoDeploy (
script: script,
warAction: deploymentType.toString(),
source: config.source,
neo: target
)
}
setDeployment(deployments, deployment, index, script, stageName)
index++
}
utils.runClosures(deployments)
}
if (!config.cfTargets && !config.neoTargets) {
error "Deployment skipped because no targets defined!"
}
}
}
void setDeployment(deployments, deployment, index, script, stageName) {
deployments["Deployment ${index > 1 ? index : ''}"] = {
if (env.POD_NAME) {
dockerExecuteOnKubernetes(script: script, containerMap: ContainerMap.instance.getMap().get(stageName) ?: [:]) {
deployment.run()
}
} else {
node(env.NODE_NAME) {
deployment.run()
}
}
}
}

View File

@ -1,3 +1,4 @@
import com.sap.piper.GenerateDocumentation
import com.sap.piper.ConfigurationHelper
import com.sap.piper.Utils
import com.sap.piper.StepAssertions
@ -9,22 +10,106 @@ import groovy.transform.Field
import static com.sap.piper.Prerequisites.checkScript
@Field String STEP_NAME = getClass().getName()
@Field Set GENERAL_CONFIG_KEYS = [
'neo'
'neo',
/**
* The SAP Cloud Platform account to deploy to.
* @parentConfigKey neo
* @mandatory for deployMode=warParams
*/
'account',
/**
* Name of the application you want to manage, configure, or deploy.
* @parentConfigKey neo
* @mandatory for deployMode=warParams
*/
'application',
/**
* The Jenkins credentials containing user and password used for SAP CP deployment.
* @parentConfigKey neo
*/
'credentialsId',
/**
* Map of environment variables in the form of KEY: VALUE.
* @parentConfigKey neo
*/
'environment',
/**
* The SAP Cloud Platform host to deploy to.
* @parentConfigKey neo
* @mandatory for deployMode=warParams
*/
'host',
/**
* The path to the .properties file in which all necessary deployment properties for the application are defined.
* @parentConfigKey neo
* @mandatory for deployMode=warPropertiesFile
*/
'propertiesFile',
/**
* Name of SAP Cloud Platform application runtime.
* @parentConfigKey neo
* @mandatory for deployMode=warParams
*/
'runtime',
/**
* Version of SAP Cloud Platform application runtime.
* @parentConfigKey neo
* @mandatory for deployMode=warParams
*/
'runtimeVersion',
/**
* Compute unit (VM) size. Acceptable values: lite, pro, prem, prem-plus.
* @parentConfigKey neo
*/
'size',
/**
* String of VM arguments passed to the JVM.
* @parentConfigKey neo
*/
'vmArguments'
]
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([
/**
* @see dockerExecute
*/
'dockerEnvVars',
/**
* @see dockerExecute
*/
'dockerImage',
/**
* @see dockerExecute
*/
'dockerOptions',
'neoHome',
/**
* The path to the archive for deployment to SAP CP. If not provided `mtarFilePath` from commom pipeline environment is used instead.
*/
'source'
])
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS.plus([
/**
* The deployment mode which should be used. Available options are:
* *`'mta'` - default,
* *`'warParams'` - deploying WAR file and passing all the deployment parameters via the function call,
* *`'warPropertiesFile'` - deploying WAR file and putting all the deployment parameters in a .properties file.
* @possibleValues 'mta', 'warParams', 'warPropertiesFile'
*/
'deployMode',
/**
* Action mode when using WAR file mode. Available options are `deploy` (default) and `rolling-update` which performs update of an application without downtime in one go.
* @possibleValues 'deploy', 'rolling-update'
*/
'warAction'
])
/**
* Deploys an Application to SAP Cloud Platform (SAP CP) using the SAP Cloud Platform Console Client (Neo Java Web SDK).
*/
@GenerateDocumentation
void call(parameters = [:]) {
handlePipelineStepErrors(stepName: STEP_NAME, stepParameters: parameters) {
@ -150,9 +235,15 @@ private deploy(script, utils, Map configuration, NeoCommandHelper neoCommandHelp
}
}
catch (Exception ex) {
if (dockerImage) {
echo "Error while deploying to SAP Cloud Platform. Here are the neo.sh logs:"
sh "cat logs/neo/*"
try {
sh "cat logs/neo/*"
} catch(Exception e) {
echo "Unable to provide the logs."
ex.addSuppressed(e)
}
}
throw ex
}

View File

@ -1,7 +1,31 @@
import com.sap.piper.GenerateDocumentation
import groovy.transform.Field
@Field STEP_NAME = getClass().getName()
@Field Set GENERAL_CONFIG_KEYS = []
@Field Set STEP_CONFIG_KEYS = []
@Field Set PARAMETER_KEYS = [
/**
* Can be used to overwrite the default behavior of existing stashes as well as to define additional stashes.
* This parameter handles the _includes_ and can be defined as a map of stash name and include patterns.
* Include pattern has to be a string with comma separated patterns as per [Pipeline basic step `stash`](https://jenkins.io/doc/pipeline/steps/workflow-basic-steps/#stash-stash-some-files-to-be-used-later-in-the-build)
*/
'stashIncludes',
/**
* Can be used to overwrite the default behavior of existing stashes as well as to define additional stashes.
* This parameter handles the _excludes_ and can be defined as a map of stash name and exclude patterns.
* Exclude pattern has to be a string with comma separated patterns as per [Pipeline basic step `stash`](https://jenkins.io/doc/pipeline/steps/workflow-basic-steps/#stash-stash-some-files-to-be-used-later-in-the-build)
*/
'stashExcludes'
]
/**
* This step stashes files that are needed in other build steps (on other nodes).
*/
@GenerateDocumentation
void call(Map parameters = [:], body) {
handlePipelineStepErrors (stepName: 'pipelineStashFiles', stepParameters: parameters) {

View File

@ -1,13 +1,34 @@
import static com.sap.piper.Prerequisites.checkScript
import com.sap.piper.GenerateDocumentation
import com.sap.piper.Utils
import com.sap.piper.ConfigurationHelper
import groovy.transform.Field
@Field String STEP_NAME = getClass().getName()
@Field Set STEP_CONFIG_KEYS = ['noDefaultExludes', 'stashIncludes', 'stashExcludes']
@Field Set GENERAL_CONFIG_KEYS = []
@Field Set STEP_CONFIG_KEYS = [
/**
* By default certain files are excluded from stashing (e.g. `.git` folder).
* Details can be found as per [Pipeline basic step `stash](https://jenkins.io/doc/pipeline/steps/workflow-basic-steps/#stash-stash-some-files-to-be-used-later-in-the-build).
* This parameter allows to provide a list of stash names for which the standard exclude behavior should be switched off.
* This will allow you to also stash directories like `.git`.
*/
'noDefaultExludes',
/** @see pipelineStashFiles */
'stashIncludes',
/** @see pipelineStashFiles */
'stashExcludes'
]
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
/**
* This step stashes files that are needed in other build steps (on other nodes).
*/
@GenerateDocumentation
void call(Map parameters = [:]) {
handlePipelineStepErrors (stepName: STEP_NAME, stepParameters: parameters, stepNameDoc: 'stashFiles') {

View File

@ -1,13 +1,34 @@
import static com.sap.piper.Prerequisites.checkScript
import com.sap.piper.GenerateDocumentation
import com.sap.piper.Utils
import com.sap.piper.ConfigurationHelper
import groovy.transform.Field
@Field String STEP_NAME = getClass().getName()
@Field Set STEP_CONFIG_KEYS = ['noDefaultExludes', 'stashIncludes', 'stashExcludes']
@Field Set GENERAL_CONFIG_KEYS = []
@Field Set STEP_CONFIG_KEYS = [
/**
* By default certain files are excluded from stashing (e.g. `.git` folder).
* Details can be found as per [Pipeline basic step `stash](https://jenkins.io/doc/pipeline/steps/workflow-basic-steps/#stash-stash-some-files-to-be-used-later-in-the-build).
* This parameter allows to provide a list of stash names for which the standard exclude behavior should be switched off.
* This will allow you to also stash directories like `.git`.
*/
'noDefaultExludes',
/** @see pipelineStashFiles */
'stashIncludes',
/** @see pipelineStashFiles */
'stashExcludes'
]
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
/**
* This step stashes files that are needed in other build steps (on other nodes).
*/
@GenerateDocumentation
void call(Map parameters = [:]) {
handlePipelineStepErrors (stepName: STEP_NAME, stepParameters: parameters, stepNameDoc: 'stashFiles') {

View File

@ -72,6 +72,17 @@ void call(Map parameters = [:]) {
stepActive = true
}
break
case 'configKeys':
if (condition.getValue() instanceof List) {
condition.getValue().each {configKey ->
if (script.commonPipelineEnvironment.getStepConfiguration(step.getKey(), currentStage)?.get(configKey)) {
stepActive = true
}
}
} else if (script.commonPipelineEnvironment.getStepConfiguration(step.getKey(), currentStage)?.get(condition.getValue())) {
stepActive = true
}
break
case 'filePatternFromConfig':
def conditionValue=script.commonPipelineEnvironment.getStepConfiguration(step.getKey(), currentStage)?.get(condition.getValue())
if (conditionValue && findFiles(glob: conditionValue)) {

View File

@ -81,9 +81,13 @@ void call(parameters) {
}
}
post {
always {
influxWriteData script: parameters.script, wrapInNode: true
mailSendNotification script: parameters.script, wrapInNode: true
/* https://jenkins.io/doc/book/pipeline/syntax/#post */
success {buildSetResult(currentBuild)}
aborted {buildSetResult(currentBuild, 'ABORTED')}
failure {buildSetResult(currentBuild, 'FAILURE')}
unstable {buildSetResult(currentBuild, 'UNSTABLE')}
cleanup {
piperPipelineStagePost script: parameters.script
}
}
}

View File

@ -0,0 +1,45 @@
import com.sap.piper.ConfigurationHelper
import com.sap.piper.GenerateDocumentation
import com.sap.piper.Utils
import groovy.transform.Field
import static com.sap.piper.Prerequisites.checkScript
@Field String STEP_NAME = getClass().getName()
@Field Set GENERAL_CONFIG_KEYS = []
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
/**
* In this stage reporting actions like mail notification or telemetry reporting are executed.
*
* This stage contains following steps:
* - [influxWriteData](./influxWriteData.md)
* - [mailSendNotification](./mailSendNotification.md)
*
* !!! note
* This stage is meant to be used in a [post](https://jenkins.io/doc/book/pipeline/syntax/#post) section of a pipeline.
*/
@GenerateDocumentation
void call(Map parameters = [:]) {
def script = checkScript(this, parameters) ?: this
def utils = parameters.juStabUtils ?: new Utils()
def stageName = parameters.stageName?:env.STAGE_NAME
// ease handling extension
stageName = stageName.replace('Declarative: ', '')
Map config = ConfigurationHelper.newInstance(this)
.loadStepDefaults()
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
.mixinStageConfig(script.commonPipelineEnvironment, stageName, STEP_CONFIG_KEYS)
.mixin(parameters, PARAMETER_KEYS)
.use()
piperStageWrapper (script: script, stageName: stageName, stageLocking: false) {
// telemetry reporting
utils.pushToSWA([step: STEP_NAME], config)
influxWriteData script: script
mailSendNotification script: script
}
}

View File

@ -14,7 +14,7 @@ import groovy.text.SimpleTemplateEngine
@Field Set GENERAL_CONFIG_KEYS = [
/**
* Defines the tool which is used for executing the tests
* @possibleValues `'maven'`, `'npm'`
* @possibleValues `maven`, `npm`, `bundler`
*/
'buildTool',
/** @see dockerExecute */

View File

@ -63,7 +63,7 @@ import static com.sap.piper.Prerequisites.checkScript
'userTokenCredentialsId',
/**
* Type of development stack used to implement the solution.
* @possibleValues `maven`, `mta`, `npm`, `pip`, `sbt`
* @possibleValues `golang`, `maven`, `mta`, `npm`, `pip`, `sbt`
*/
'scanType',
/**