1
0
mirror of https://github.com/SAP/jenkins-library.git synced 2025-01-30 05:59:39 +02:00

Merge branch 'master' into pr/avoidHavingNeoLogFolderStringLiteralThreeTimes

This commit is contained in:
Oliver Nocon 2019-05-22 10:59:18 +02:00 committed by GitHub
commit 14765d5b56
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
49 changed files with 1079 additions and 260 deletions

View File

@ -71,11 +71,9 @@ To setup the shared library, you need to perform the following steps:
1. Login to your Jenkins instance with administration privileges.
1. Open the system configuration page (*Manage Jenkins > Configure System*).
1. Scroll down to section *Global Pipeline Libraries* and add a new Library by
clicking the *Add* button.
1. Scroll down to section *Global Pipeline Libraries* and add a new Library by clicking the *Add* button.
1. set *Library Name* to `piper-lib-os`
1. set *Default Version* to the branch or tag you want to consume (e.g.
`master` or `v0.1`)
1. set *Default Version* to the branch or tag you want to consume (e.g. `master` or `v0.1`)
1. set *Retrieval Method* to `Modern SCM`
1. set *Source Code Management* to `Git`
1. set *Project Repository* to `https://github.com/SAP/jenkins-library`
@ -103,6 +101,7 @@ Feel free to open new issues for feature requests, bugs or general feedback on
the [GitHub issues page of this project][piper-library-issues].
Register to our [google group][google-group] in order to get updates or for asking questions.
# Contributing
Read and understand our [contribution guidelines][piper-library-contribution]

View File

@ -6,6 +6,8 @@ import com.sap.piper.GenerateDocumentation
import java.util.regex.Matcher
import groovy.text.StreamingTemplateEngine
import com.sap.piper.MapUtils
//
// Collects helper functions for rendering the documentation
//
@ -21,7 +23,8 @@ class TemplateHelper {
def props = parameters.get(it)
def defaultValue = isComplexDefault(props.defaultValue) ? renderComplexDefaultValue(props.defaultValue) : "`${props.defaultValue}`"
def defaultValue = isComplexDefault(props.defaultValue) ? renderComplexDefaultValue(props.defaultValue) :
props.defaultValue != null ? "`${props.defaultValue}`" : ''
t += "| `${it}` | ${props.mandatory ?: props.required ? 'yes' : 'no'} | ${defaultValue} | ${props.value ?: ''} |\n"
}
@ -375,13 +378,6 @@ class Helper {
return mappings
}
static getValue(Map config, def pPath) {
def p =config[pPath.head()]
if(pPath.size() == 1) return p // there is no tail
if(p in Map) getValue(p, pPath.tail())
else return p
}
static resolveDocuRelevantSteps(GroovyScriptEngine gse, File stepsDir) {
def docuRelevantSteps = []
@ -416,26 +412,42 @@ steps = []
//
// assign parameters
if(args.length >= 1)
stepsDir = new File(args[0])
def cli = new CliBuilder(
usage: 'groovy createDocu [<options>]',
header: 'Options:',
footer: 'Copyright: SAP SE')
cli.with {
s longOpt: 'stepsDir', args: 1, argName: 'dir', 'The directory containing the steps. Defaults to \'vars\'.'
d longOpt: 'docuDir', args: 1, argName: 'dir', 'The directory containing the docu stubs. Defaults to \'documentation/docs/steps\'.'
c longOpt: 'customDefaults', args: 1, argName: 'file', 'Additional custom default configuration'
h longOpt: 'help', 'Prints this help.'
}
def options = cli.parse(args)
if(options.h) {
System.err << "Printing help.\n"
cli.usage()
return
}
if(options.s)
stepsDir = new File(Helper.projectRoot, options.s)
stepsDir = stepsDir ?: new File(Helper.projectRoot, "vars")
if(args.length >= 2)
stepsDocuDir = new File(args[1])
if(options.d)
stepsDocuDir = new File(Helper.projectRoot, options.d)
stepsDocuDir = stepsDocuDir ?: new File(Helper.projectRoot, "documentation/docs/steps")
def argsDrop = 2
if(args.length >= 3 && args[2].contains('.yml')) {
customDefaults = args[2]
argsDrop ++
if(options.c) {
customDefaults = options.c
}
if(args.length >= 3)
steps = (args as List).drop(argsDrop) // the first two entries are stepsDir and docuDir
// the other parts are considered as step names
steps.addAll(options.arguments())
// assign parameters
//
@ -625,9 +637,9 @@ def handleStep(stepName, prepareDefaultValuesStep, gse, customDefaults) {
step.parameters['script'] = [
docu: 'The common script environment of the Jenkinsfile running. ' +
'Typically the reference to the script calling the pipeline ' +
'step is provided with the this parameter, as in `script: this`. ' +
'step is provided with the `this` parameter, as in `script: this`. ' +
'This allows the function to access the ' +
'commonPipelineEnvironment for retrieving, for example, configuration parameters.',
'`commonPipelineEnvironment` for retrieving, e.g. configuration parameters.',
required: true,
GENERAL_CONFIG: false,
@ -640,7 +652,7 @@ def handleStep(stepName, prepareDefaultValuesStep, gse, customDefaults) {
it ->
def defaultValue = Helper.getValue(defaultConfig, it.split('/'))
def defaultValue = MapUtils.getByPath(defaultConfig, it)
def parameterProperties = [
defaultValue: defaultValue,
@ -675,7 +687,7 @@ def handleStep(stepName, prepareDefaultValuesStep, gse, customDefaults) {
[
dependentParameterKey: dependentParameterKey,
key: possibleValue,
value: Helper.getValue(defaultConfig.get(possibleValue), k.split('/'))
value: MapUtils.getByPath(defaultConfig.get(possibleValue), k)
]
}
}

View File

@ -11,10 +11,10 @@ Your configuration inherits from the default configuration located at [https://g
Configuration of the Piper steps as well the Piper templates can be done in a hierarchical manner.
1. Directly passed step parameters will always take precedence over other configuration values and defaults
2. Stage configuration parameters define a Jenkins pipeline stage dependent set of parameters (e.g. deployment options for the `Acceptance` stage)
3. Step configuration defines how steps behave in general (e.g. step `cloudFoundryDeploy`)
4. General configuration parameters define parameters which are available across step boundaries
5. Default configuration comes with the Piper library and is always available
1. Stage configuration parameters define a Jenkins pipeline stage dependent set of parameters (e.g. deployment options for the `Acceptance` stage)
1. Step configuration defines how steps behave in general (e.g. step `cloudFoundryDeploy`)
1. General configuration parameters define parameters which are available across step boundaries
1. Default configuration comes with the Piper library and is always available
![Piper Configuration](images/piper_config.png)

View File

@ -4,4 +4,4 @@
.md-typeset a:not(.headerlink):hover {
text-decoration: underline;
}
}

View File

@ -33,10 +33,10 @@ The basic workflow is as follows:
**Note:** The blank line between message header and message description is mandatory.
2. To communicate with SAP Solution Manager, the pipeline uses credentials that must be stored on Jenkins using the credential ID `CM`. For more information, see [checkChangeInDevelopment](https://sap.github.io/jenkins-library/steps/checkChangeInDevelopment/).
3. The required transport request is created on the fly. **Note:** The change document can contain various components (for example, UI and backend components).
4. The changes of your development team trigger the Jenkins pipeline. It builds and validates the changes and attaches them to the respective transport request.
5. As soon as the development process is completed, the change document in SAP Solution Manager can be set to status `to be tested` and all components can be transported to the test system.
1. To communicate with SAP Solution Manager, the pipeline uses credentials that must be stored on Jenkins using the credential ID `CM`. For more information, see [checkChangeInDevelopment](https://sap.github.io/jenkins-library/steps/checkChangeInDevelopment/).
1. The required transport request is created on the fly. **Note:** The change document can contain various components (for example, UI and backend components).
1. The changes of your development team trigger the Jenkins pipeline. It builds and validates the changes and attaches them to the respective transport request.
1. As soon as the development process is completed, the change document in SAP Solution Manager can be set to status `to be tested` and all components can be transported to the test system.
![Hybrid Application Development Workflow](../images/Scenario_SolMan.png "Hybrid Application Development Workflow")
###### Hybrid Application Development Workflow

View File

@ -11,12 +11,10 @@ Build an application based on SAPUI5 or SAP Fiori with Jenkins and deploy the bu
* You have installed Node.js including node and npm. See [Node.js](https://nodejs.org/en/download/).
* You have installed the SAP Cloud Platform Neo Environment SDK. See [SAP Development Tools](https://tools.hana.ondemand.com/#cloud).
### Project Prerequisites
This scenario requires additional files in your project and in the execution environment on your Jenkins instance.
On the project level, provide and adjust the following template:
| File Name | Description | Position |
@ -26,12 +24,10 @@ On the project level, provide and adjust the following template:
| [`package.json`](https://github.com/SAP/jenkins-library/blob/master/documentation/docs/scenarios/ui5-sap-cp/files/package.json) | This file lists the required development dependencies for the build. | Add the content of the `package.json` file to your existing `package.json` file. |
| [`Gruntfile.js`](https://github.com/SAP/jenkins-library/blob/master/documentation/docs/scenarios/ui5-sap-cp/files/Gruntfile.js) | This file controls the grunt build. By default the tasks `clean`, `build`, and `lint` are executed. | Place the `Gruntfile.js` in the root directory of your project. |
## Context
This scenario combines various different steps to create a complete pipeline.
In this scenario, we want to show how to build an application based on SAPUI5 or SAP Fiori by using the multi-target application (MTA) concept and how to deploy the build result into an SAP Cloud Platform account in the Neo environment. This document comprises the [mtaBuild](https://sap.github.io/jenkins-library/steps/mtaBuild/) and the [neoDeploy](https://sap.github.io/jenkins-library/steps/neoDeploy/) steps.
![This pipeline in Jenkins Blue Ocean](images/pipeline.jpg)
@ -73,7 +69,6 @@ steps:
| `buildTarget` | The target platform to which the mtar can be deployed. Possible values are: `CF`, `NEO`, `XSA` |
| `mtaJarLocation` | The location of the multi-target application archive builder jar file, including file name and extension. |
#### Configuration for the Deployment to SAP Cloud Platform
| Parameter | Description |
@ -83,7 +78,6 @@ steps:
| `host` | The SAP Cloud Platform host to deploy to. |
| `neoHome` | The path to the `neo-java-web-sdk` tool that is used for the deployment. |
### Parameters
For the detailed description of the relevant parameters, see:

View File

@ -32,8 +32,7 @@ resource in an custom shared library.
// inside the shared lib denoted by 'foo' the additional configuration file
// needs to be located under 'resources' ('resoures/myConfig.yml')
prepareDefaultValues script: this,
customDefaults: 'myConfig.yml'
prepareDefaultValues script: this, customDefaults: 'myConfig.yml'
```
Example content of `'resources/myConfig.yml'` in branch `'master'` of the repository denoted by
@ -79,11 +78,13 @@ The parameters can also be provided when the step is invoked:
// explict endpoint provided, we search for changeDocumentId
// starting at the previous commit (HEAD~1) rather than on
// 'origin/master' (the default).
checkChangeInDevelopment script:this
changeManagement: [
endpoint: 'https:example.org/cm'
git: [
from: 'HEAD~1'
]
]
checkChangeInDevelopment(
script: this
changeManagement: [
endpoint: 'https:example.org/cm'
git: [
from: 'HEAD~1'
]
]
)
```

View File

@ -15,8 +15,8 @@ Very basic setup can be done like that (with user "admin" and password "adminPwd
For more advanced setup please reach out to the respective documentation:
- https://hub.docker.com/_/influxdb/ (and https://github.com/docker-library/docs/tree/master/influxdb)
- https://hub.docker.com/r/grafana/grafana/ (and https://github.com/grafana/grafana-docker)
- InfluxDB ([Docker Hub](https://hub.docker.com/_/influxdb/) [GitHub](https://github.com/docker-library/docs/tree/master/influxdb))
- Grafana ([Docker Hub](https://hub.docker.com/r/grafana/grafana/) [GitHub](https://github.com/grafana/grafana-docker))
After you have started your InfluxDB docker you need to create a database:
@ -43,7 +43,7 @@ Once you have started both docker containers and Influx and Grafana are running
To setup your Jenkins you need to do two configuration steps:
1. Configure Jenkins (via Manage Jenkins)
2. Adapt pipeline configuration
1. Adapt pipeline configuration
### Configure Jenkins

View File

@ -10,7 +10,7 @@ Kaniko expects a Docker `config.json` file containing the credential information
You can create it like explained in the Docker Success Center in the articale about [How to generate a new auth in the config.json file](https://success.docker.com/article/generate-new-auth-in-config-json-file).
Please copy this file and upload it to your Jenkins for example<br />
via _Jenkins_ -> _Credentials_ -> _System_ -> _Global credentials (unrestricted)_ -> _ Add Credentials_ ->
via _Jenkins_ -> _Credentials_ -> _System_ -> _Global credentials (unrestricted)_ -> _Add Credentials_ ->
* Kind: _Secret file_
* File: upload your `config.json` file

View File

@ -25,15 +25,15 @@ none
## Exceptions
* `Exception`:
* If `source` is not provided.
* If `propertiesFile` is not provided (when using `'WAR_PROPERTIESFILE'` deployment mode).
* If `application` is not provided (when using `'WAR_PARAMS'` deployment mode).
* If `runtime` is not provided (when using `'WAR_PARAMS'` deployment mode).
* If `runtimeVersion` is not provided (when using `'WAR_PARAMS'` deployment mode).
* If `source` is not provided.
* If `propertiesFile` is not provided (when using `'WAR_PROPERTIESFILE'` deployment mode).
* If `application` is not provided (when using `'WAR_PARAMS'` deployment mode).
* If `runtime` is not provided (when using `'WAR_PARAMS'` deployment mode).
* If `runtimeVersion` is not provided (when using `'WAR_PARAMS'` deployment mode).
* `AbortException`:
* If neo-java-web-sdk is not installed, or `neoHome`is wrong.
* If neo-java-web-sdk is not installed, or `neoHome`is wrong.
* `CredentialNotFoundException`:
* If the credentials cannot be resolved.
* If the credentials cannot be resolved.
## Example

View File

@ -1,7 +1,6 @@
# ${docGenStepName}
## ${docGenDescription}
## ${docGenParameters}

View File

@ -18,7 +18,7 @@ seleniumExecuteTests (script: this) {
### Example test using WebdriverIO
Example based on http://webdriver.io/guide/getstarted/modes.html and http://webdriver.io/guide.html
Example based on <http://webdriver.io/guide/getstarted/modes.html> and <http://webdriver.io/guide.html>
#### Configuration for Local Docker Environment

View File

@ -0,0 +1,18 @@
# ${docGenStepName}
## ${docGenDescription}
## Prerequsites
- The project needs a `sonar-project.properties` file that describes the project and defines certain settings, see [here](https://docs.sonarqube.org/display/SCAN/Advanced+SonarQube+Scanner+Usages#AdvancedSonarQubeScannerUsages-Multi-moduleProjectStructure).
- A SonarQube instance needs to be defined in the Jenkins.
## ${docGenParameters}
## ${docGenConfiguration}
## Exceptions
none
## Examples

View File

@ -23,8 +23,7 @@ resource in an custom shared library.
// inside the shared lib denoted by 'foo' the additional configuration file
// needs to be located under 'resources' ('resoures/myConfig.yml')
prepareDefaultValues script: this,
customDefaults: 'myConfig.yml'
prepareDefaultValues script: this, customDefaults: 'myConfig.yml'
```
Example content of `'resources/myConfig.yml'` in branch `'master'` of the repository denoted by

View File

@ -8,11 +8,8 @@
## ${docGenParameters}
## ${docGenConfiguration}
The step is configured using a customer configuration file provided as
resource in an custom shared library.

View File

@ -74,21 +74,25 @@ The parameters can also be provided when the step is invoked. For examples see b
```groovy
// SOLMAN
transportRequestUploadFile script:this,
changeDocumentId: '001', // typically provided via git commit history
transportRequestId: '001', // typically provided via git commit history
applicationId: '001',
filePath: '/path',
changeManagement:[
type: 'SOLMAN'
endpoint: 'https://example.org/cm'
]
transportRequestUploadFile(
script: this,
changeDocumentId: '001', // typically provided via git commit history
transportRequestId: '001', // typically provided via git commit history
applicationId: '001',
filePath: '/path',
changeManagement: [
type: 'SOLMAN'
endpoint: 'https://example.org/cm'
]
)
// CTS
transportRequestUploadFile script:this,
transportRequestId: '001', // typically provided via git commit history
filePath: '/path',
changeManagement:[
type: 'CTS'
endpoint: 'https://example.org/cm'
]
transportRequestUploadFile(
script: this,
transportRequestId: '001', // typically provided via git commit history
filePath: '/path',
changeManagement: [
type: 'CTS'
endpoint: 'https://example.org/cm'
]
)
```

View File

@ -37,6 +37,7 @@ nav:
- setupCommonPipelineEnvironment: steps/setupCommonPipelineEnvironment.md
- slackSendNotification: steps/slackSendNotification.md
- snykExecute: steps/snykExecute.md
- sonarExecuteScan: steps/sonarExecuteScan.md
- testsPublishResults: steps/testsPublishResults.md
- transportRequestCreate: steps/transportRequestCreate.md
- transportRequestRelease: steps/transportRequestRelease.md

14
pom.xml
View File

@ -1,7 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<project
xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>org.jenkins-ci.plugins</groupId>
<artifactId>plugin</artifactId>
@ -17,10 +19,10 @@
<url>https://sap.github.io/jenkins-library/</url>
<licenses>
<license>
<name>Apache License 2.0</name>
<comments>https://github.com/SAP/jenkins-library/blob/master/LICENSE</comments>
</license>
<license>
<name>Apache License 2.0</name>
<comments>https://github.com/SAP/jenkins-library/blob/master/LICENSE</comments>
</license>
</licenses>
<repositories>

View File

@ -44,6 +44,8 @@ general:
# runAsUser: 1000
# fsGroup: 1000
manualConfirmation: true
manualConfirmationMessage: 'Shall we proceed to Promote & Release?'
manualConfirmationTimeout: 720 # 1 month
productiveBranch: 'master'
whitesource:
serviceUrl: 'https://saas.whitesourcesoftware.com/api'
@ -463,6 +465,12 @@ steps:
- 'opensourceConfiguration'
toJson: false
toHtml: false
sonarExecuteScan:
dockerImage: 'maven:3.5-jdk-8'
instance: 'SonarCloud'
options: []
pullRequestProvider: 'github'
sonarScannerDownloadUrl: 'https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-3.3.0.1492-linux.zip'
testsPublishResults:
failOnError: false
junit:

View File

@ -127,9 +127,11 @@ class ConfigurationHelper implements Serializable {
handleValidationFailures()
MapUtils.traverse(config, { v -> (v instanceof GString) ? v.toString() : v })
if(config.verbose) step.echo "[${name}] Configuration: ${config}"
return config
return MapUtils.deepCopy(config)
}
/* private */ def getConfigPropertyNested(key) {
return getConfigPropertyNested(config, key)
}

View File

@ -0,0 +1,12 @@
package com.sap.piper
import java.lang.annotation.ElementType
import java.lang.annotation.Retention
import java.lang.annotation.RetentionPolicy
import java.lang.annotation.Target
@Retention(RetentionPolicy.RUNTIME)
@Target([ElementType.METHOD, ElementType.TYPE])
public @interface GenerateStageDocumentation {
public String defaultStageName()
}

View File

@ -33,25 +33,27 @@ String getGitCommitId() {
return sh(returnStdout: true, script: 'git rev-parse HEAD').trim()
}
String[] extractLogLines(String filter = '',
String from = 'origin/master',
String to = 'HEAD',
String format = '%b') {
String[] extractLogLines(
String filter = '',
String from = 'origin/master',
String to = 'HEAD',
String format = '%b'
) {
// Checks below: there was an value provided from outside, but the value was null.
// Throwing an exception is more transparent than making a fallback to the defaults
// used in case the paramter is omitted in the signature.
if(filter == null) throw new IllegalArgumentException('Parameter \'filter\' not provided.')
if(! from?.trim()) throw new IllegalArgumentException('Parameter \'from\' not provided.')
if(! to?.trim()) throw new IllegalArgumentException('Parameter \'to\' not provided.')
if(! format?.trim()) throw new IllegalArgumentException('Parameter \'format\' not provided.')
// Checks below: there was an value provided from outside, but the value was null.
// Throwing an exception is more transparent than making a fallback to the defaults
// used in case the paramter is omitted in the signature.
if(filter == null) throw new IllegalArgumentException('Parameter \'filter\' not provided.')
if(! from?.trim()) throw new IllegalArgumentException('Parameter \'from\' not provided.')
if(! to?.trim()) throw new IllegalArgumentException('Parameter \'to\' not provided.')
if(! format?.trim()) throw new IllegalArgumentException('Parameter \'format\' not provided.')
sh ( returnStdout: true,
script: """#!/bin/bash
git log --pretty=format:${format} ${from}..${to}
"""
)?.split('\n')
?.findAll { line -> line ==~ /${filter}/ }
script: """#!/bin/bash
git log --pretty=format:${format} ${from}..${to}
"""
)?.split('\n')
?.findAll { line -> line ==~ /${filter}/ }
}

View File

@ -62,4 +62,56 @@ class MapUtils implements Serializable {
}
m.putAll(updates)
}
static private def getByPath(Map m, def key) {
List path = key in CharSequence ? key.tokenize('/') : key
def value = m.get(path.head())
if (path.size() == 1) return value
if (value in Map) return getByPath(value, path.tail())
return null
}
/*
* Provides a new map with the same content like the original map.
* Nested Collections and Maps are copied. Values with are not
* Collections/Maps are not copied/cloned.
* &lt;paranoia&gt;&/ltThe keys are also not copied/cloned, even if they are
* Maps or Collections;paranoia&gt;
*/
static deepCopy(Map original) {
Map copy = [:]
for (def e : original.entrySet()) {
if(e.value == null) {
copy.put(e.key, e.value)
} else {
copy.put(e.key, deepCopy(e.value))
}
}
copy
}
/* private */ static deepCopy(Set original) {
Set copy = []
for(def e : original)
copy << deepCopy(e)
copy
}
/* private */ static deepCopy(List original) {
List copy = []
for(def e : original)
copy << deepCopy(e)
copy
}
/*
* In fact not a copy, but a catch all for everything not matching
* with the other signatures
*/
/* private */ static deepCopy(def original) {
original
}
}

View File

@ -37,4 +37,3 @@ class MtaUtils {
if (!script.fileExists(targetMtaDescriptor)) throw new AbortException("'${targetMtaDescriptor}' has not been generated.")
}
}

View File

@ -17,32 +17,32 @@ public class ChangeManagement implements Serializable {
}
String getChangeDocumentId(
String from = 'origin/master',
String to = 'HEAD',
String label = 'ChangeDocument\\s?:',
String format = '%b'
) {
String from = 'origin/master',
String to = 'HEAD',
String label = 'ChangeDocument\\s?:',
String format = '%b'
) {
return getLabeledItem('ChangeDocumentId', from, to, label, format)
}
String getTransportRequestId(
String from = 'origin/master',
String to = 'HEAD',
String label = 'TransportRequest\\s?:',
String format = '%b'
) {
String from = 'origin/master',
String to = 'HEAD',
String label = 'TransportRequest\\s?:',
String format = '%b'
) {
return getLabeledItem('TransportRequestId', from, to, label, format)
}
private String getLabeledItem(
String name,
String from,
String to,
String label,
String format
) {
String name,
String from,
String to,
String label,
String format
) {
if( ! gitUtils.insideWorkTree() ) {
throw new ChangeManagementException("Cannot retrieve ${name}. Not in a git work tree. ${name} is extracted from git commit messages.")
@ -421,16 +421,16 @@ public class ChangeManagement implements Serializable {
String clientOpts = '') {
String cmCommandLine = '#!/bin/bash'
if(clientOpts) {
cmCommandLine += """
export CMCLIENT_OPTS="${clientOpts}" """
cmCommandLine += """
export CMCLIENT_OPTS="${clientOpts}" """
}
cmCommandLine += """
cmclient -e '$endpoint' \
-u '$username' \
-p '$password' \
-t ${type} \
${command} ${(args as Iterable).join(' ')}
"""
cmclient -e '$endpoint' \
-u '$username' \
-p '$password' \
-t ${type} \
${command} ${(args as Iterable).join(' ')}
"""
return cmCommandLine
}
}

View File

@ -23,7 +23,7 @@ public class StepHelpers {
}
script.echo "[INFO] Retrieving transport request id from commit history [from: ${configuration.changeManagement.git.from}, to: ${configuration.changeManagement.git.to}]." +
" Searching for pattern '${configuration.changeManagement.transportRequestLabel}'. Searching with format '${configuration.changeManagement.git.format}'."
" Searching for pattern '${configuration.changeManagement.transportRequestLabel}'. Searching with format '${configuration.changeManagement.git.format}'."
try {
transportRequestId = cm.getTransportRequestId(
@ -62,7 +62,7 @@ public class StepHelpers {
}
script.echo "[INFO] Retrieving ChangeDocumentId from commit history [from: ${configuration.changeManagement.git.from}, to: ${configuration.changeManagement.git.to}]." +
"Searching for pattern '${configuration.changeManagement.changeDocumentLabel}'. Searching with format '${configuration.changeManagement.git.format}'."
"Searching for pattern '${configuration.changeManagement.changeDocumentLabel}'. Searching with format '${configuration.changeManagement.git.format}'."
try {
changeDocumentId = cm.getChangeDocumentId(
@ -91,15 +91,15 @@ public class StepHelpers {
backendType = configuration.changeManagement.type as BackendType
} catch(IllegalArgumentException e) {
script.error "Invalid backend type: '${configuration.changeManagement.type}'. " +
"Valid values: [${BackendType.values().join(', ')}]. " +
"Configuration: 'changeManagement/type'."
"Valid values: [${BackendType.values().join(', ')}]. " +
"Configuration: 'changeManagement/type'."
}
if (backendType == BackendType.NONE) {
script.echo "[INFO] Change management integration intentionally switched off. " +
"In order to enable it provide 'changeManagement/type with one of " +
"[${BackendType.values().minus(BackendType.NONE).join(', ')}] and maintain " +
"other required properties like 'endpoint', 'credentialsId'."
"In order to enable it provide 'changeManagement/type with one of " +
"[${BackendType.values().minus(BackendType.NONE).join(', ')}] and maintain " +
"other required properties like 'endpoint', 'credentialsId'."
}
return backendType

View File

@ -4,6 +4,7 @@ import static org.hamcrest.Matchers.equalTo
import static org.hamcrest.Matchers.is
import static org.junit.Assert.assertThat
import static org.junit.Assert.fail
import static util.StepHelper.getSteps
import java.io.File;
import java.util.stream.Collectors
@ -242,11 +243,4 @@ public class CommonStepsTest extends BasePiperTest{
assertThat("Steps with call methods with return types other than void: ${stepsWithCallMethodsOtherThanVoid}",
stepsWithCallMethodsOtherThanVoid, is(empty()))
}
private static getSteps() {
List steps = []
new File('vars').traverse(type: FileType.FILES, maxDepth: 0)
{ if(it.getName().endsWith('.groovy')) steps << (it =~ /vars[\\\/](.*)\.groovy/)[0][1] }
return steps
}
}

View File

@ -35,9 +35,6 @@ class NeoDeployTest extends BasePiperTest {
private JenkinsLoggingRule loggingRule = new JenkinsLoggingRule(this)
private JenkinsShellCallRule shellRule = new JenkinsShellCallRule(this)
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
private JenkinsLockRule lockRule = new JenkinsLockRule(this)
private JenkinsFileExistsRule fileExistsRule = new JenkinsFileExistsRule(this, ['warArchive.war', 'archive.mtar', 'war.properties'])
@Rule
public RuleChain ruleChain = Rules
@ -51,9 +48,9 @@ class NeoDeployTest extends BasePiperTest {
.withCredentials('myCredentialsId', 'anonymous', '********')
.withCredentials('CI_CREDENTIALS_ID', 'defaultUser', '********'))
.around(stepRule)
.around(lockRule)
.around(new JenkinsLockRule(this))
.around(new JenkinsWithEnvRule(this))
.around(fileExistsRule)
.around(new JenkinsFileExistsRule(this, ['warArchive.war', 'archive.mtar', 'war.properties']))
private static warArchiveName = 'warArchive.war'

View File

@ -0,0 +1,237 @@
import static org.hamcrest.Matchers.containsString
import static org.hamcrest.Matchers.hasItem
import static org.hamcrest.Matchers.is
import static org.hamcrest.Matchers.allOf
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.RuleChain
import org.junit.rules.ExpectedException
import static org.junit.Assert.assertThat
import util.BasePiperTest
import util.JenkinsDockerExecuteRule
import util.JenkinsShellCallRule
import util.JenkinsReadYamlRule
import util.JenkinsStepRule
import util.JenkinsLoggingRule
import util.Rules
class SonarExecuteScanTest extends BasePiperTest {
private ExpectedException thrown = ExpectedException.none()
private JenkinsReadYamlRule readYamlRule = new JenkinsReadYamlRule(this)
private JenkinsStepRule jsr = new JenkinsStepRule(this)
private JenkinsLoggingRule jlr = new JenkinsLoggingRule(this)
private JenkinsShellCallRule jscr = new JenkinsShellCallRule(this)
private JenkinsDockerExecuteRule jedr = new JenkinsDockerExecuteRule(this)
@Rule
public RuleChain rules = Rules
.getCommonRules(this)
.around(readYamlRule)
.around(thrown)
.around(jedr)
.around(jscr)
.around(jlr)
.around(jsr)
def sonarInstance
@Before
void init() throws Exception {
sonarInstance = null
helper.registerAllowedMethod("withSonarQubeEnv", [String.class, Closure.class], { string, closure ->
sonarInstance = string
return closure()
})
helper.registerAllowedMethod("unstash", [String.class], { stashInput -> return []})
helper.registerAllowedMethod("fileExists", [String.class], { file -> return file })
helper.registerAllowedMethod('string', [Map], { m -> m })
helper.registerAllowedMethod('withCredentials', [List, Closure], { l, c ->
try {
binding.setProperty(l[0].variable, 'TOKEN_'+l[0].credentialsId)
c()
} finally {
binding.setProperty(l[0].variable, null)
}
})
nullScript.commonPipelineEnvironment.setArtifactVersion('1.2.3-20180101')
}
@Test
void testWithDefaults() throws Exception {
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils
)
// asserts
assertThat('Sonar instance is not set to the default value', sonarInstance, is('SonarCloud'))
assertThat('Sonar project version is not set to the default value', jscr.shell, hasItem(containsString('sonar-scanner -Dsonar.projectVersion=1')))
assertThat('Docker image is not set to the default value', jedr.dockerParams.dockerImage, is('maven:3.5-jdk-8'))
assertJobStatusSuccess()
}
@Test
void testWithCustomVersion() throws Exception {
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
projectVersion: '2'
)
// asserts
assertThat('Sonar project version is not set to the custom value', jscr.shell, hasItem(containsString('sonar-scanner -Dsonar.projectVersion=2')))
assertJobStatusSuccess()
}
@Test
void testWithCustomOptions() throws Exception {
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
options: '-Dsonar.host.url=localhost'
)
// asserts
assertThat('Sonar options are not set to the custom value', jscr.shell, hasItem(containsString('sonar-scanner -Dsonar.host.url=localhost')))
assertJobStatusSuccess()
}
@Test
void testWithCustomOptionsList() throws Exception {
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
options: ['sonar.host.url=localhost']
)
// asserts
assertThat('Sonar options are not set to the custom value', jscr.shell, hasItem(containsString('sonar-scanner -Dsonar.host.url=localhost')))
assertJobStatusSuccess()
}
@Test
void testWithCustomInstance() throws Exception {
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
instance: 'MySonarInstance'
)
// asserts
assertThat('Sonar instance is not set to the custom value', sonarInstance.toString(), is('MySonarInstance'))
assertJobStatusSuccess()
}
@Test
void testWithPRHandling() throws Exception {
binding.setVariable('env', [
'CHANGE_ID': '42',
'CHANGE_TARGET': 'master',
'BRANCH_NAME': 'feature/anything'
])
nullScript.commonPipelineEnvironment.setGithubOrg('testOrg')
//nullScript.commonPipelineEnvironment.setGithubRepo('testRepo')
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
//githubOrg: 'testOrg',
githubRepo: 'testRepo'
)
// asserts
assertThat(jscr.shell, hasItem(allOf(
containsString('-Dsonar.pullrequest.key=42'),
containsString('-Dsonar.pullrequest.base=master'),
containsString('-Dsonar.pullrequest.branch=feature/anything'),
containsString('-Dsonar.pullrequest.provider=github'),
containsString('-Dsonar.pullrequest.github.repository=testOrg/testRepo')
)))
assertJobStatusSuccess()
}
@Test
void testWithPRHandlingWithoutMandatory() throws Exception {
thrown.expect(Exception)
thrown.expectMessage('ERROR - NO VALUE AVAILABLE FOR githubRepo')
binding.setVariable('env', ['CHANGE_ID': '42'])
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
githubOrg: 'testOrg'
)
// asserts
assertJobStatusFailure()
}
@Test
void testWithLegacyPRHandling() throws Exception {
binding.setVariable('env', ['CHANGE_ID': '42'])
nullScript.commonPipelineEnvironment.setGithubOrg('testOrg')
//nullScript.commonPipelineEnvironment.setGithubRepo('testRepo')
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
legacyPRHandling: true,
githubTokenCredentialsId: 'githubId',
//githubOrg: 'testOrg',
githubRepo: 'testRepo'
)
// asserts
assertThat(jscr.shell, hasItem(allOf(
containsString('-Dsonar.analysis.mode=preview'),
containsString('-Dsonar.github.pullRequest=42'),
containsString('-Dsonar.github.oauth=TOKEN_githubId'),
containsString('-Dsonar.github.repository=testOrg/testRepo')
)))
assertJobStatusSuccess()
}
@Test
void testWithLegacyPRHandlingWithoutMandatory() throws Exception {
thrown.expect(Exception)
thrown.expectMessage('ERROR - NO VALUE AVAILABLE FOR githubTokenCredentialsId')
binding.setVariable('env', ['CHANGE_ID': '42'])
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
legacyPRHandling: true,
githubOrg: 'testOrg',
githubRepo: 'testRepo'
)
// asserts
assertJobStatusFailure()
}
@Test
void testWithSonarAuth() throws Exception {
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
sonarTokenCredentialsId: 'githubId'
)
// asserts
assertThat(jscr.shell, hasItem(containsString('-Dsonar.login=TOKEN_githubId')))
assertJobStatusSuccess()
}
@Test
void testWithSonarCloudOrganization() throws Exception {
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
organization: 'TestOrg-github'
)
// asserts
assertThat(jscr.shell, hasItem(containsString('-Dsonar.organization=TestOrg-github')))
assertJobStatusSuccess()
}
}

View File

@ -3,6 +3,9 @@ package com.sap.piper
import org.junit.Assert
import org.junit.Test
import static org.hamcrest.Matchers.is
import static org.junit.Assert.assertThat
class MapUtilsTest {
@Test
@ -50,4 +53,44 @@ class MapUtilsTest {
MapUtils.traverse(m, { s -> (s.startsWith('x')) ? "replaced" : s})
assert m == [a: 'replaced', m: [b: 'replaced', c: 'otherString']]
}
@Test
void testGetByPath() {
Map m = [trees: [oak: 5, beech :1], flowers:[rose: 23]]
assertThat(MapUtils.getByPath(m, 'flowers'), is([rose: 23]))
assertThat(MapUtils.getByPath(m, 'trees/oak'), is(5))
assertThat(MapUtils.getByPath(m, 'trees/palm'), is(null))
}
@Test
void testDeepCopy() {
List l = ['a', 'b', 'c']
def original = [
list: l,
set: (Set)['1', '2'],
nextLevel: [
list: ['x', 'y'],
duplicate: l,
set: (Set)[9, 8, 7]
]
]
def copy = MapUtils.deepCopy(original)
assert ! copy.is(original)
assert ! copy.list.is(original.list)
assert ! copy.set.is(original.set)
assert ! copy.nextLevel.list.is(original.nextLevel.list)
assert ! copy.nextLevel.set.is(original.nextLevel.set)
assert ! copy.nextLevel.duplicate.is(original.nextLevel.duplicate)
// Within the original identical list is used twice, but the
// assuption is that there are different lists in the copy.
assert ! copy.nextLevel.duplicate.is(copy.list)
assert copy == original
}
}

View File

@ -99,7 +99,7 @@ stages:
stepConditions:
firstStep:
config: testGeneral
testStage2:
testStage2:
stepConditions:
secondStep:
config: testStage
@ -107,7 +107,7 @@ stages:
stepConditions:
thirdStep:
config: testStep
'''
} else {
return '''
@ -155,23 +155,23 @@ stages:
testStage1:
stepConditions:
firstStep:
config:
config:
testGeneral:
- myValx
- myVal1
testStage2:
- myVal1
testStage2:
stepConditions:
secondStep:
config:
testStage:
config:
testStage:
- maValXyz
testStage3:
stepConditions:
thirdStep:
config:
config:
testStep:
- myVal3
'''
} else {
return '''
@ -218,18 +218,18 @@ stages:
testStage1:
stepConditions:
firstStep:
configKeys:
configKeys:
- myKey1_1
- myKey1_2
testStage2:
- myKey1_2
testStage2:
stepConditions:
secondStep:
configKeys:
configKeys:
- myKey2_1
testStage3:
stepConditions:
thirdStep:
configKeys:
configKeys:
- myKey3_1
'''
} else {
@ -451,27 +451,4 @@ steps: {}
assertThat(nullScript.commonPipelineEnvironment.configuration.runStage.Acceptance, is(true))
}
@Test
void testGetConfigValue() {
def config = [
invalidKey: 'invalidValue',
stringKey: 'stringValue',
listKey: [
'listValue1',
'listValue2'
],
nested: [
key: 'nestedValue'
]
]
assertThat(jsr.step.piperInitRunStageConfiguration.getConfigValue(config, 'stringKey'), is('stringValue'))
assertThat(jsr.step.piperInitRunStageConfiguration.getConfigValue(config, 'listKey'), is(['listValue1','listValue2']))
assertThat(jsr.step.piperInitRunStageConfiguration.getConfigValue(config, 'nested/key'), is('nestedValue'))
assertThat(jsr.step.piperInitRunStageConfiguration.getConfigValue(config, 'invalidKey/key'), is(nullValue()))
//assertThat(jsr.step.piperInitRunStageConfiguration.getConfigValue(config, 'nested/key'), is('nestedValue'))
}
}

View File

@ -0,0 +1,80 @@
#!groovy
package templates
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.RuleChain
import util.*
import static org.hamcrest.Matchers.containsString
import static org.hamcrest.Matchers.is
import static org.junit.Assert.assertThat
class PiperPipelineStageConfirmTest extends BasePiperTest {
private JenkinsStepRule jsr = new JenkinsStepRule(this)
private JenkinsLoggingRule jlr = new JenkinsLoggingRule(this)
private Map timeoutSettings
private Map inputSettings
@Rule
public RuleChain rules = Rules
.getCommonRules(this)
.around(new JenkinsReadYamlRule(this))
.around(jlr)
.around(jsr)
@Before
void init() {
binding.variables.env.STAGE_NAME = 'Confirm'
helper.registerAllowedMethod('timeout', [Map.class, Closure.class], {m, body ->
timeoutSettings = m
return body()
})
helper.registerAllowedMethod('input', [Map.class], {m ->
inputSettings = m
return [reason: 'this is my test reason for failing step 1 and step 3', acknowledgement: true]
})
}
@Test
void testStageDefault() {
jsr.step.piperPipelineStageConfirm(
script: nullScript
)
assertThat(timeoutSettings.unit, is('HOURS'))
assertThat(timeoutSettings.time, is(720))
assertThat(inputSettings.message, is('Shall we proceed to Promote & Release?'))
}
@Test
void testStageBuildUnstable() {
binding.setVariable('currentBuild', [result: 'UNSTABLE'])
nullScript.commonPipelineEnvironment.setValue('unstableSteps', ['step1', 'step3'])
helper.registerAllowedMethod('text', [Map.class], {m ->
assertThat(m.defaultValue, containsString('step1:'))
assertThat(m.defaultValue, containsString('step3:'))
assertThat(m.description, is('Please provide a reason for overruling following failed steps:'))
assertThat(m.name, is('reason'))
})
helper.registerAllowedMethod('booleanParam', [Map.class], {m ->
assertThat(m.description, is('I acknowledge that for traceability purposes the approval reason is stored together with my user name / user id:'))
assertThat(m.name, is('acknowledgement'))
})
jsr.step.piperPipelineStageConfirm(
script: nullScript
)
assertThat(inputSettings.message, is('Approve continuation of pipeline, although some steps failed.'))
assertThat(jlr.log, containsString('this is my test reason'))
assertThat(jlr.log, containsString('Acknowledged:\n-------------\ntrue'))
}
}

View File

@ -54,17 +54,41 @@ class PiperPipelineTest extends BasePiperTest {
helper.registerAllowedMethod('when', [Closure.class], {cWhen ->
helper.registerAllowedMethod('allOf', [Closure.class], null)
helper.registerAllowedMethod('allOf', [Closure.class], {cAllOf ->
def branchResult = false
helper.registerAllowedMethod('branch', [String.class], {branchName ->
if (!branchResult)
branchResult = (branchName == env.BRANCH_NAME)
if( !branchResult) {
throw new PipelineWhenException("Stage '${stageName}' skipped - expression: '${branchResult}'")
}
})
helper.registerAllowedMethod('expression', [Closure.class], { Closure cExp ->
def result = cExp()
if(!result) {
throw new PipelineWhenException("Stage '${stageName}' skipped - expression: '${result}'")
}
return result
})
return cAllOf()
})
helper.registerAllowedMethod('anyOf', [Closure.class], {cAnyOf ->
def result = false
helper.registerAllowedMethod('branch', [String.class], {branchName ->
if (!result)
result = (branchName == env.BRANCH_NAME)
if( !result) {
throw new PipelineWhenException("Stage '${stageName}' skipped - expression: '${result}'")
}
return result
})
helper.registerAllowedMethod('expression', [Closure.class], { Closure cExp ->
if (!result)
result = cExp()
return result
})
cAnyOf()
if(!result) {
throw new PipelineWhenException("Stage '${stageName}' skipped - anyOf: '${result}'")
}
return cAnyOf()
})
@ -151,8 +175,8 @@ class PiperPipelineTest extends BasePiperTest {
helper.registerAllowedMethod('piperPipelineStageCompliance', [Map.class], {m ->
stepsCalled.add('piperPipelineStageCompliance')
})
helper.registerAllowedMethod('input', [Map.class], {m ->
stepsCalled.add('input')
helper.registerAllowedMethod('piperPipelineStageConfirm', [Map.class], {m ->
stepsCalled.add('piperPipelineStageConfirm')
})
helper.registerAllowedMethod('piperPipelineStagePromote', [Map.class], {m ->
stepsCalled.add('piperPipelineStagePromote')
@ -188,10 +212,17 @@ class PiperPipelineTest extends BasePiperTest {
}
@Test
void testConfirm() {
void testConfirmUnstable() {
nullScript.commonPipelineEnvironment.configuration = [
general: [
manualConfirmation: false
]
]
binding.setVariable('currentBuild', [result: 'UNSTABLE'])
jsr.step.piperPipeline(script: nullScript)
assertThat(stepsCalled, hasItem('input'))
assertThat(stepsCalled, hasItem('piperPipelineStageConfirm'))
}
@ -204,7 +235,7 @@ class PiperPipelineTest extends BasePiperTest {
]
jsr.step.piperPipeline(script: nullScript)
assertThat(stepsCalled, not(hasItem('input')))
assertThat(stepsCalled, not(hasItem('piperPipelineStageConfirm')))
}
@Test
@ -232,7 +263,7 @@ class PiperPipelineTest extends BasePiperTest {
'piperPipelineStageSecurity',
'piperPipelineStagePerformance',
'piperPipelineStageCompliance',
'input',
'piperPipelineStageConfirm',
'piperPipelineStagePromote',
'piperPipelineStageRelease',
'piperPipelineStagePost'

View File

@ -41,7 +41,8 @@ class JenkinsSetupRule implements TestRule {
JOB_NAME : 'p',
BUILD_NUMBER: '1',
BUILD_URL : 'http://build.url',
BRANCH_NAME: 'master'
BRANCH_NAME: 'master',
WORKSPACE: 'any/path'
])
base.evaluate()

View File

@ -79,6 +79,7 @@ class LibraryLoadingTestExecutionListener extends AbstractTestExecutionListener
@Override
void beforeTestClass(TestContext testContext) throws Exception {
super.beforeTestClass(testContext)
StepTracker.before(testContext.testClass.getSimpleName())
def helper = LibraryLoadingTestExecutionListener.getSingletonInstance()
registerDefaultAllowedMethods(helper)
LibraryLoadingTestExecutionListener.START_CLASS_TRACKING = true
@ -87,6 +88,7 @@ class LibraryLoadingTestExecutionListener extends AbstractTestExecutionListener
@Override
void afterTestClass(TestContext testContext) throws Exception {
super.afterTestClass(testContext)
StepTracker.after()
PipelineTestHelper helper = LibraryLoadingTestExecutionListener.getSingletonInstance()
helper.clearAllowedMethodCallbacks(LibraryLoadingTestExecutionListener.TRACKED_ON_CLASS)
LibraryLoadingTestExecutionListener.TRACKED_ON_CLASS.clear()
@ -112,6 +114,7 @@ class LibraryLoadingTestExecutionListener extends AbstractTestExecutionListener
void beforeTestMethod(TestContext testContext) throws Exception {
super.beforeTestMethod(testContext)
def testInstance = testContext.getTestInstance()
StepTracker.before(testInstance.getClass().getSimpleName())
testInstance.binding.setVariable('currentBuild', [result: 'SUCCESS', currentResult: 'SUCCESS'])
PipelineTestHelper helper = LibraryLoadingTestExecutionListener.getSingletonInstance()
LibraryLoadingTestExecutionListener.START_METHOD_TRACKING = true
@ -121,6 +124,7 @@ class LibraryLoadingTestExecutionListener extends AbstractTestExecutionListener
void afterTestMethod(TestContext testContext) throws Exception {
super.afterTestMethod(testContext)
def testInstance = testContext.getTestInstance()
StepTracker.after()
PipelineTestHelper helper = LibraryLoadingTestExecutionListener.getSingletonInstance()
helper.clearCallStack()
@ -181,6 +185,7 @@ class LibraryLoadingTestExecutionListener extends AbstractTestExecutionListener
static class PipelineTestHelperHook {
def helper = new PipelineTestHelper() {
def clearAllowedMethodCallbacks(Collection c = []) {
List itemsToRemove = []
c.each {

View File

@ -0,0 +1,15 @@
package util;
import java.util.List;
import groovy.io.FileType
public class StepHelper {
private static getSteps() {
List steps = []
new File('vars').traverse(type: FileType.FILES, maxDepth: 0)
{ if(it.getName().endsWith('.groovy')) steps << (it =~ /vars[\\\/](.*)\.groovy/)[0][1] }
return steps
}
}

View File

@ -0,0 +1,69 @@
package util
import static com.lesfurets.jenkins.unit.MethodSignature.method
import static util.StepHelper.getSteps
import org.codehaus.groovy.runtime.MetaClassHelper
import com.lesfurets.jenkins.unit.MethodSignature
import com.lesfurets.jenkins.unit.PipelineTestHelper
import groovy.json.JsonBuilder
class StepTracker {
/*
* Contains the piper steps as key (derived from the test name, so this is blurry since it might
* contains also other cases than only piper step name) and the observed calls in a collection.
*/
static Map piperStepCallMapping = [:]
static Set piperSteps = StepHelper.getSteps()
static Set calls
static {
initialize()
}
final static void initialize() {
PipelineTestHelper.metaClass.getAllowedMethodEntry = {
// We need to be careful here, in case we switch to another
// version of the Les Furets framework we have to check if
// this here still works.
String name, Object[] args ->
Class[] paramTypes = MetaClassHelper.castArgumentsToClassArray(args)
MethodSignature signature = method(name, paramTypes)
def intercepted = allowedMethodCallbacks.find { k, v -> k == signature }
if(intercepted != null)
StepTracker.add(name)
return intercepted
}
}
static void before(String stepName) {
if(piperStepCallMapping[stepName] == null)
piperStepCallMapping[stepName] = (Set)[]
calls = piperStepCallMapping[stepName]
}
static void after() {
calls = null
write()
}
static void add (String call) {
calls.add(call)
}
static private void write() {
Map root = [
piperSteps: piperSteps,
calls: piperStepCallMapping.sort()
]
new File('target/trackedCalls.json').write(new JsonBuilder(root).toPrettyString())
}
}

View File

@ -169,9 +169,9 @@ void call(Map parameters = [:], Closure body = null) {
try {
sh """#!/bin/bash
git add .
git ${gitConfig} commit -m 'update version ${newVersion}'
git tag ${config.tagPrefix}${newVersion}"""
git add .
git ${gitConfig} commit -m 'update version ${newVersion}'
git tag ${config.tagPrefix}${newVersion}"""
config.gitCommitId = gitUtils.getGitCommitIdOrNull()
} catch (e) {
error "[${STEP_NAME}]git commit and tag failed: ${e}"

View File

@ -12,6 +12,7 @@ class commonPipelineEnvironment implements Serializable {
//stores the gitCommitId as well as additional git information for the build during pipeline run
String gitCommitId
String gitCommitMessage
String gitSshUrl
String gitHttpsUrl
String gitBranch
@ -46,6 +47,7 @@ class commonPipelineEnvironment implements Serializable {
configuration = [:]
gitCommitId = null
gitCommitMessage = null
gitSshUrl = null
gitHttpsUrl = null
gitBranch = null

View File

@ -42,4 +42,3 @@ def call(Map parameters = [:], body) {
return duration
}

View File

@ -54,15 +54,19 @@ void call(Map parameters = [:]) {
deleteDir()
checkout([$class: 'GitSCM', branches: [[name: config.branch]],
doGenerateSubmoduleConfigurations: false,
extensions: [[$class: 'SparseCheckoutPaths',
sparseCheckoutPaths: [[path: config.path]]
]],
submoduleCfg: [],
userRemoteConfigs: [[credentialsId: config.credentialsId,
url: config.repoUrl
]]
checkout([
$class: 'GitSCM',
branches: [[name: config.branch]],
doGenerateSubmoduleConfigurations: false,
extensions: [[
$class: 'SparseCheckoutPaths',
sparseCheckoutPaths: [[path: config.path]]
]],
submoduleCfg: [],
userRemoteConfigs: [[
credentialsId: config.credentialsId,
url: config.repoUrl
]]
])
}

View File

@ -11,11 +11,11 @@ import groovy.transform.Field
@Field Set STEP_CONFIG_KEYS = [
/**
* If it is set to true` the step `mailSendNotification` will be triggered in case of an error.
* If it is set to `true` the step `mailSendNotification` will be triggered in case of an error.
*/
'sendMail',
/**
* Defines the time period where the job waits for input. Default is 15 minutes. Once this time is passed the job enters state FAILED.
* Defines the time period where the job waits for input. Default is 15 minutes. Once this time is passed the job enters state `FAILED`.
*/
'timeoutInSeconds'
]

View File

@ -3,6 +3,7 @@ import com.sap.piper.ConfigurationLoader
import static com.sap.piper.Prerequisites.checkScript
import com.sap.piper.ConfigurationHelper
import com.sap.piper.MapUtils
import groovy.transform.Field
@Field String STEP_NAME = getClass().getName()
@ -65,27 +66,27 @@ void call(Map parameters = [:]) {
case 'config':
if (condition.getValue() instanceof Map) {
condition.getValue().each {configCondition ->
if (getConfigValue(stepConfig, configCondition.getKey()) in configCondition.getValue()) {
if (MapUtils.getByPath(stepConfig, configCondition.getKey()) in configCondition.getValue()) {
stepActive = true
}
}
} else if (getConfigValue(stepConfig, condition.getValue())) {
} else if (MapUtils.getByPath(stepConfig, condition.getValue())) {
stepActive = true
}
break
case 'configKeys':
if (condition.getValue() instanceof List) {
condition.getValue().each {configKey ->
if (getConfigValue(stepConfig, configKey)) {
if (MapUtils.getByPath(stepConfig, configKey)) {
stepActive = true
}
}
} else if (getConfigValue(stepConfig, condition.getValue())) {
} else if (MapUtils.getByPath(stepConfig, condition.getValue())) {
stepActive = true
}
break
case 'filePatternFromConfig':
def conditionValue = getConfigValue(stepConfig, condition.getValue())
def conditionValue = MapUtils.getByPath(stepConfig, condition.getValue())
if (conditionValue && findFiles(glob: conditionValue)) {
stepActive = true
}
@ -110,16 +111,3 @@ void call(Map parameters = [:]) {
echo "[${STEP_NAME}] Debug - Run Step Configuration: ${script.commonPipelineEnvironment.configuration.runStep}"
}
}
private def getConfigValue(Map stepConfig, def configKey) {
if (stepConfig == null) return null
List configPath = configKey instanceof String ? configKey.tokenize('/') : configKey
def configValue = stepConfig[configPath.head()]
if (configPath.size() == 1) return configValue
if (configValue in Map) return getConfigValue(configValue, configPath.tail())
return null
}

View File

@ -62,9 +62,9 @@ void call(parameters) {
}
stage('Confirm') {
agent none
when {allOf {branch parameters.script.commonPipelineEnvironment.getStepConfiguration('', '').productiveBranch; expression {return parameters.script.commonPipelineEnvironment.getStepConfiguration('piperInitRunStageConfiguration', env.STAGE_NAME).manualConfirmation}}}
when {allOf {expression { env.BRANCH_NAME ==~ parameters.script.commonPipelineEnvironment.getStepConfiguration('', '').productiveBranch }; anyOf {expression {return (currentBuild.result == 'UNSTABLE')}; expression {return parameters.script.commonPipelineEnvironment.getStepConfiguration('piperInitRunStageConfiguration', env.STAGE_NAME).manualConfirmation}}}}
steps {
input message: 'Shall we proceed to promotion & release?'
piperPipelineStageConfirm script: parameters.script
}
}
stage('Promote') {

View File

@ -0,0 +1,80 @@
import com.sap.piper.ConfigurationHelper
import com.sap.piper.GenerateStageDocumentation
import groovy.transform.Field
import static com.sap.piper.Prerequisites.checkScript
@Field String STEP_NAME = getClass().getName()
@Field Set GENERAL_CONFIG_KEYS = [
/**
* Specifies if a manual confirmation is active before running the __Promote__ and __Release__ stages of the pipeline.
* @possibleValues `true`, `false`
*/
'manualConfirmation',
/** Defines message displayed as default manual confirmation. Please note: only used in case pipeline is in state __SUCCESSFUL__ */
'manualConfirmationMessage',
/** Defines how many hours a manual confirmation is possible for a dedicated pipeline. */
'manualConfirmationTimeout'
]
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
/**
* In this stage a manual confirmation is requested before processing subsequent stages like __Promote__ and __Release__.
*
* This stage will be active in two scenarios:
* - manual activation of this stage
* - in case of an 'UNSTABLE' build (even when manual confirmation is inactive)
*/
@GenerateStageDocumentation(defaultStageName = 'Confirm')
void call(Map parameters = [:]) {
def script = checkScript(this, parameters) ?: this
def stageName = parameters.stageName?:env.STAGE_NAME
Map config = ConfigurationHelper.newInstance(this)
.loadStepDefaults()
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
.mixinStageConfig(script.commonPipelineEnvironment, stageName, STEP_CONFIG_KEYS)
.mixin(parameters, PARAMETER_KEYS)
.use()
String unstableStepNames = script.commonPipelineEnvironment.getValue('unstableSteps') ? "${script.commonPipelineEnvironment.getValue('unstableSteps').join(':\n------\n')}:" : ''
boolean approval = false
def userInput
timeout(
unit: 'HOURS',
time: config.manualConfirmationTimeout
){
if (currentBuild.result == 'UNSTABLE') {
while(!approval) {
userInput = input(
message: 'Approve continuation of pipeline, although some steps failed.',
ok: 'Approve',
parameters: [
text(
defaultValue: unstableStepNames,
description: 'Please provide a reason for overruling following failed steps:',
name: 'reason'
),
booleanParam(
defaultValue: false,
description: 'I acknowledge that for traceability purposes the approval reason is stored together with my user name / user id:',
name: 'acknowledgement'
)
]
)
approval = userInput.acknowledgement && userInput.reason?.length() > (unstableStepNames.length() + 10)
}
echo "Reason:\n-------------\n${userInput.reason}"
echo "Acknowledged:\n-------------\n${userInput.acknowledgement}"
} else {
input message: config.manualConfirmationMessage
}
}
}

View File

@ -0,0 +1,193 @@
import com.sap.piper.ConfigurationHelper
import com.sap.piper.GenerateDocumentation
import com.sap.piper.Utils
import static com.sap.piper.Prerequisites.checkScript
import groovy.transform.Field
import groovy.text.SimpleTemplateEngine
@Field String STEP_NAME = getClass().getName()
@Field Set GENERAL_CONFIG_KEYS = [
/**
* Pull-Request voting only:
* The URL to the Github API. see https://docs.sonarqube.org/display/PLUG/GitHub+Plugin#GitHubPlugin-Usage
* deprecated: only supported in LTS / < 7.2
*/
'githubApiUrl',
/**
* Pull-Request voting only:
* The Github organization.
* @default: `commonPipelineEnvironment.getGithubOrg()`
*/
'githubOrg',
/**
* Pull-Request voting only:
* The Github repository.
* @default: `commonPipelineEnvironment.getGithubRepo()`
*/
'githubRepo',
/**
* Pull-Request voting only:
* The Jenkins credentialId for a Github token. It is needed to report findings back to the pull-request.
* deprecated: only supported in LTS / < 7.2
* @possibleValues Jenkins credential id
*/
'githubTokenCredentialsId',
/**
* The Jenkins credentialsId for a SonarQube token. It is needed for non-anonymous analysis runs. see https://sonarcloud.io/account/security
* @possibleValues Jenkins credential id
*/
'sonarTokenCredentialsId',
]
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([
/**
* Pull-Request voting only:
* Disables the pull-request decoration with inline comments.
* deprecated: only supported in LTS / < 7.2
* @possibleValues `true`, `false`
*/
'disableInlineComments',
/**
* Name of the docker image that should be used. If empty, Docker is not used and the command is executed directly on the Jenkins system.
* see dockerExecute
*/
'dockerImage',
/**
* The name of the SonarQube instance defined in the Jenkins settings.
*/
'instance',
/**
* Pull-Request voting only:
* Activates the pull-request handling using the [GitHub Plugin](https://docs.sonarqube.org/display/PLUG/GitHub+Plugin) (deprecated).
* deprecated: only supported in LTS / < 7.2
* @possibleValues `true`, `false`
*/
'legacyPRHandling',
/**
* A list of options which are passed to the `sonar-scanner`.
*/
'options',
/**
* Organization that the project will be assigned to in SonarCloud.io.
*/
'organization',
/**
* The project version that is reported to SonarQube.
* @default: major number of `commonPipelineEnvironment.getArtifactVersion()`
*/
'projectVersion'
])
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
/**
* The step executes the [sonar-scanner](https://docs.sonarqube.org/display/SCAN/Analyzing+with+SonarQube+Scanner) cli command to scan the defined sources and publish the results to a SonarQube instance.
*/
@GenerateDocumentation
void call(Map parameters = [:]) {
handlePipelineStepErrors(stepName: STEP_NAME, stepParameters: parameters) {
def utils = parameters.juStabUtils ?: new Utils()
def script = checkScript(this, parameters) ?: this
// load default & individual configuration
Map configuration = ConfigurationHelper.newInstance(this)
.loadStepDefaults()
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
.mixinStepConfig(script.commonPipelineEnvironment, STEP_CONFIG_KEYS)
.mixinStageConfig(script.commonPipelineEnvironment, parameters.stageName?:env.STAGE_NAME, GENERAL_CONFIG_KEYS)
.mixin(parameters, PARAMETER_KEYS)
.addIfEmpty('projectVersion', script.commonPipelineEnvironment.getArtifactVersion()?.tokenize('.')?.get(0))
.addIfEmpty('githubOrg', script.commonPipelineEnvironment.getGithubOrg())
.addIfEmpty('githubRepo', script.commonPipelineEnvironment.getGithubRepo())
// check mandatory parameters
.withMandatoryProperty('githubTokenCredentialsId', null, { config -> config.legacyPRHandling && isPullRequest() })
.withMandatoryProperty('githubOrg', null, { isPullRequest() })
.withMandatoryProperty('githubRepo', null, { isPullRequest() })
.use()
if(configuration.options instanceof String)
configuration.options = [].plus(configuration.options)
def worker = { config ->
withSonarQubeEnv(config.instance) {
loadSonarScanner(config)
if(config.organization) config.options.add("sonar.organization=${config.organization}")
if(config.projectVersion) config.options.add("sonar.projectVersion=${config.projectVersion}")
// prefix options
config.options = config.options.collect { it.startsWith('-D') ? it : "-D${it}" }
sh "PATH=\$PATH:${env.WORKSPACE}/.sonar-scanner/bin sonar-scanner ${config.options.join(' ')}"
}
}
if(configuration.sonarTokenCredentialsId){
def workerForSonarAuth = worker
worker = { config ->
withCredentials([string(
credentialsId: config.sonarTokenCredentialsId,
variable: 'SONAR_TOKEN'
)]){
config.options.add("sonar.login=$SONAR_TOKEN")
workerForSonarAuth(config)
}
}
}
if(isPullRequest()){
def workerForGithubAuth = worker
worker = { config ->
if(config.legacyPRHandling) {
withCredentials([string(
credentialsId: config.githubTokenCredentialsId,
variable: 'GITHUB_TOKEN'
)]){
// support for https://docs.sonarqube.org/display/PLUG/GitHub+Plugin
config.options.add('sonar.analysis.mode=preview')
config.options.add("sonar.github.oauth=$GITHUB_TOKEN")
config.options.add("sonar.github.pullRequest=${env.CHANGE_ID}")
config.options.add("sonar.github.repository=${config.githubOrg}/${config.githubRepo}")
if(config.githubApiUrl) config.options.add("sonar.github.endpoint=${config.githubApiUrl}")
if(config.disableInlineComments) config.options.add("sonar.github.disableInlineComments=${config.disableInlineComments}")
workerForGithubAuth(config)
}
} else {
// see https://sonarcloud.io/documentation/analysis/pull-request/
config.options.add("sonar.pullrequest.key=${env.CHANGE_ID}")
config.options.add("sonar.pullrequest.base=${env.CHANGE_TARGET}")
config.options.add("sonar.pullrequest.branch=${env.BRANCH_NAME}")
config.options.add("sonar.pullrequest.provider=${config.pullRequestProvider}")
switch(config.pullRequestProvider){
case 'github':
config.options.add("sonar.pullrequest.github.repository=${config.githubOrg}/${config.githubRepo}")
break;
default: error "Pull-Request provider '${config.pullRequestProvider}' is not supported!"
}
workerForGithubAuth(config)
}
}
}
dockerExecute(
script: script,
dockerImage: configuration.dockerImage
){
worker(configuration)
}
}
}
private Boolean isPullRequest(){
return env.CHANGE_ID
}
private void loadSonarScanner(config){
def filename = new File(config.sonarScannerDownloadUrl).getName()
def foldername = filename.replace('.zip', '').replace('cli-', '')
sh """
curl --remote-name --remote-header-name --location --silent --show-error ${config.sonarScannerDownloadUrl}
unzip -q ${filename}
mv ${foldername} .sonar-scanner
"""
}

View File

@ -184,30 +184,33 @@ void call(parameters = [:]) {
try {
if(backendType == BackendType.SOLMAN) {
transportRequestId = cm.createTransportRequestSOLMAN(
configuration.changeManagement.solman.docker,
configuration.changeDocumentId,
configuration.developmentSystemId,
configuration.changeManagement.endpoint,
configuration.changeManagement.credentialsId,
configuration.changeManagement.clientOpts)
configuration.changeManagement.solman.docker,
configuration.changeDocumentId,
configuration.developmentSystemId,
configuration.changeManagement.endpoint,
configuration.changeManagement.credentialsId,
configuration.changeManagement.clientOpts
)
} else if(backendType == BackendType.CTS) {
transportRequestId = cm.createTransportRequestCTS(
configuration.changeManagement.cts.docker,
configuration.transportType,
configuration.targetSystem,
configuration.description,
configuration.changeManagement.endpoint,
configuration.changeManagement.credentialsId,
configuration.changeManagement.clientOpts)
configuration.changeManagement.cts.docker,
configuration.transportType,
configuration.targetSystem,
configuration.description,
configuration.changeManagement.endpoint,
configuration.changeManagement.credentialsId,
configuration.changeManagement.clientOpts
)
} else if (backendType == BackendType.RFC) {
transportRequestId = cm.createTransportRequestRFC(
configuration.changeManagement.rfc.docker,
configuration.changeManagement.endpoint,
configuration.changeManagement.rfc.developmentInstance,
configuration.changeManagement.rfc.developmentClient,
configuration.changeManagement.credentialsId,
configuration.description,
configuration.verbose)
configuration.changeManagement.rfc.docker,
configuration.changeManagement.endpoint,
configuration.changeManagement.rfc.developmentInstance,
configuration.changeManagement.rfc.developmentClient,
configuration.changeManagement.credentialsId,
configuration.description,
configuration.verbose
)
} else {
throw new IllegalArgumentException("Invalid backend type: '${backendType}'.")
}

View File

@ -70,7 +70,7 @@ import static com.sap.piper.cm.StepHelpers.getBackendTypeAndLogInfoIfCMIntegrati
'transportRequestId',
/** @see transportRequestCreate */
'verbose',
])
])
/** Releases a Transport Request. */
@GenerateDocumentation

View File

@ -177,9 +177,9 @@ void call(parameters = [:]) {
"Change document id not provided (parameter: \'changeDocumentId\' or via commit history).")
}
configuration = configHelper
.withMandatoryProperty('transportRequestId',
"Transport request id not provided (parameter: \'transportRequestId\' or via commit history).")
.use()
.withMandatoryProperty('transportRequestId',
"Transport request id not provided (parameter: \'transportRequestId\' or via commit history).")
.use()
def uploadingMessage = ['[INFO] Uploading file ' +
"'${backendType == BackendType.RFC ? configuration.applicationUrl : configuration.filePath}' " +