1
0
mirror of https://github.com/SAP/jenkins-library.git synced 2025-01-18 05:18:24 +02:00

Merge remote-tracking branch 'github/master' into HEAD

This commit is contained in:
Marcus Holl 2019-05-17 12:22:26 +02:00
commit d7d36c9445
182 changed files with 9017 additions and 4443 deletions

View File

@ -46,7 +46,7 @@ Code shall contain comments to explain the intention of the code when it is uncl
#### EditorConfig
To ensure a common file format, there is a `.editorConfig` file [in place](.editorconfig). To respect this file, [check](http://editorconfig.org/#download) if your editor does support it natively or you need to download a plugin.
To ensure a common file format, there is a `.editorConfig` file [in place](../.editorconfig). To respect this file, [check](http://editorconfig.org/#download) if your editor does support it natively or you need to download a plugin.
### Commit Message Style

2
.gitignore vendored
View File

@ -1,5 +1,5 @@
.idea/
bin/
/bin
.settings
logs
reports

View File

@ -16,6 +16,7 @@ cache:
# Travis Lifecycle: https://docs.travis-ci.com/user/job-lifecycle#the-job-lifecycle
# Travis Stages: https://docs.travis-ci.com/user/build-stages/
# Travis Conditions: https://docs.travis-ci.com/user/conditional-builds-stages-jobs
# Travis GHPages deploy provider: https://docs.travis-ci.com/user/deployment/pages/
jobs:
include:
- stage: Tests
@ -28,33 +29,22 @@ jobs:
after_script:
- JACOCO_SOURCE_PATH="src vars test" ./cc-test-reporter format-coverage target/site/jacoco/jacoco.xml --input-type jacoco
- ./cc-test-reporter upload-coverage
- name: Docs Build
if: type = pull_request
install: docker pull squidfunk/mkdocs-material:3.0.4
script:
- |
cp -r documentation/docs documentation/docs-tmp
documentation/bin/createDocu.sh vars documentation/docs-tmp/steps
docker run --rm -it -v ${TRAVIS_BUILD_DIR}:/docs -w /docs/documentation squidfunk/mkdocs-material:3.0.4 build --clean --verbose --strict
- name: Consumer Tests
if: repo = "SAP/jenkins-library" && ( (type != pull_request && branch =~ /^master$|^it\/.*$/) || (type == pull_request && head_repo = "SAP/jenkins-library" && head_branch =~ /^it\/.*$/) )
script: cd consumer-test && chmod +x integrationTestController.sh && ./integrationTestController.sh
script: cd consumer-test && groovy consumerTestController.groovy
- stage: Docs
name: Deploy
if: repo = "SAP/jenkins-library" AND branch = master AND NOT type = pull_request
install:
- docker pull squidfunk/mkdocs-material:3.0.4
- |
echo "Found change on master: Deployment of documentation"
PRIVATE_KEY="cfg/id_rsa"
openssl aes-256-cbc -K $encrypted_12c8071d2874_key -iv $encrypted_12c8071d2874_iv -in cfg/id_rsa.enc -out "${PRIVATE_KEY}" -d
chmod a+x gh-pages-deploy.sh
cp -r documentation/docs documentation/docs-tmp
documentation/bin/createDocu.sh vars documentation/docs-tmp/steps
script: docker run --rm -it --entrypoint "./gh-pages-deploy.sh" -e "TRAVIS_REPO_SLUG=${TRAVIS_REPO_SLUG}" -v ${TRAVIS_BUILD_DIR}:/docs -w /docs squidfunk/mkdocs-material:3.0.4
# TODO: make use of GHPages deploy provider: https://docs.travis-ci.com/user/deployment/pages/
#notifications:
# slack:
# secure: UYzfd4QYLtAX39r8LzV1dYp7cKMhYRRjI/xswMEkR+RgdMWxVPPH3kcsNLwkdNGSPn1b8Aidz8YLss9JolrepWjwI283dK8EUthZAOw03+PmL5X/3nOJ7aGv0sxwYqF5ypltBrerTf6jtPUTcQdtao+0O8bgnzShc6nWWE4MLXonjOm1pZLRUo81un+0bzm8C2ABIeHC6xuZCRycXP5u1mW1nDLK3900uY1rxIDTSZKEzA0IzLQhE9uROvI1r48fW8cKJQQjMMO5PPorq+0eDl2YTE8rQr9ldvuRE7A/ubsOQR0N5F8iAv1JTZXuXGt62fw6eKDQ1h94suEk7X+baV0EwlfhsHXcI1MxRFwxNSr9k1WaVFfA4TrM8XYBAcW3JGRA51ZK3q4EcjpuxpupaA7kZDtH53W7ePzH2TIp6yknln1q+yfcsP7cGv38sSKpKwOyMgAPRElkZzcoo31kw/PLzKPXYJEovRqx/0lWzczbFSscsroNaGCavC02++bUnyUXW2W+PG4gDSBFVZjtrvTPKnZ6DpHXV97x6xC/CzyhFj/Nf+ao/J9IIfocnc4vXJojwS550KIvM7xCDJwa/+29dajj2l6dQqrcOe3UT3O5UGU9I0KkGEDMfkLOD71eRy58qiYz3y953e52DvvzWQJbvfuk8ubMO+Fmn4GyRz8=
name: Create Documentation
install: docker pull squidfunk/mkdocs-material:3.0.4
before_script: documentation/bin/createDocu.sh
script: docker run --rm -it -v ${TRAVIS_BUILD_DIR}/documentation:/docs squidfunk/mkdocs-material:3.0.4 build --clean --strict
deploy:
on:
branch: master
provider: pages
skip_cleanup: true
github_token: ${GITHUB_TOKEN}
name: 'Travis CI Publisher'
local_dir: 'documentation/docs-gen'
keep_history: true
#verbose: true

View File

@ -74,8 +74,7 @@ To setup the shared library, you need to perform the following steps:
1. Scroll down to section *Global Pipeline Libraries* and add a new Library by
clicking the *Add* button.
1. set *Library Name* to `piper-lib-os`
1. set *Default Version* to the branch or tag you want to consume (e.g.
`master` or `v0.1`)
1. set *Default Version* to the branch or tag you want to consume (e.g. `master` or `v0.1`)
1. set *Retrieval Method* to `Modern SCM`
1. set *Source Code Management* to `Git`
1. set *Project Repository* to `https://github.com/SAP/jenkins-library`

View File

@ -0,0 +1,135 @@
@Grab('org.yaml:snakeyaml:1.17')
import org.yaml.snakeyaml.Yaml
class TestRunnerThread extends Thread {
static def workspacesRootDir
static def libraryVersionUnderTest
static def repositoryUnderTest
Process currentProcess
final StringBuilder stdOut = new StringBuilder()
final StringBuilder stdErr = new StringBuilder()
int lastPrintedStdOutLine = -1
public def returnCode = -1
public def lastCommand
def area
def testCase
def uniqueName
def testCaseRootDir
def testCaseWorkspace
def testCaseConfig
TestRunnerThread(File testCaseFile) {
// Regex pattern expects a folder structure such as '/rootDir/areaDir/testCase.extension'
def testCaseMatches = (testCaseFile.toString() =~
/^[\w\-]+\\/([\w\-]+)\\/([\w\-]+)\..*\u0024/)
this.area = testCaseMatches[0][1]
this.testCase = testCaseMatches[0][2]
if (!area || !testCase) {
throw new RuntimeException("Expecting file structure '/rootDir/areaDir/testCase.yml' " +
"but got '${testCaseFile}'.")
}
this.uniqueName = "${area}|${testCase}"
this.testCaseRootDir = new File("${workspacesRootDir}/${area}/${testCase}")
this.testCaseWorkspace = "${testCaseRootDir}/workspace"
this.testCaseConfig = new Yaml().load(testCaseFile.text)
}
void run() {
println "[INFO] Test case '${uniqueName}' launched."
if (testCaseRootDir.exists() || !testCaseRootDir.mkdirs()) {
throw new RuntimeException("Creation of dir '${testCaseRootDir}' failed.")
}
executeShell("git clone -b ${testCase} ${testCaseConfig.referenceAppRepoUrl} " +
"${testCaseWorkspace}")
addJenkinsYmlToWorkspace()
setLibraryVersionInJenkinsfile()
//Commit the changed version because artifactSetVersion expects the git repo not to be dirty
executeShell(["git", "-C", "${testCaseWorkspace}", "commit", "--all",
'--author="piper-testing-bot <piper-testing-bot@example.com>"',
'--message="Set piper lib version for test"'])
executeShell("docker run -v /var/run/docker.sock:/var/run/docker.sock " +
"-v ${System.getenv('PWD')}/${testCaseWorkspace}:/workspace -v /tmp " +
"-e CASC_JENKINS_CONFIG=/workspace/jenkins.yml -e CX_INFRA_IT_CF_USERNAME " +
"-e CX_INFRA_IT_CF_PASSWORD -e BRANCH_NAME=${testCase} ppiper/jenkinsfile-runner")
println "*****[INFO] Test case '${uniqueName}' finished successfully.*****"
printOutput()
}
// Configure path to library-repository under test in Jenkins config
private void addJenkinsYmlToWorkspace() {
def sourceFile = 'jenkins.yml'
def sourceText = new File(sourceFile).text.replaceAll(
'__REPO_SLUG__', repositoryUnderTest)
def target = new File("${testCaseWorkspace}/${sourceFile}")
target.write(sourceText)
}
// Force usage of library version under test by setting it in the Jenkinsfile,
// which is then the first definition and thus has the highest precedence.
private void setLibraryVersionInJenkinsfile() {
def jenkinsfile = new File("${testCaseWorkspace}/Jenkinsfile")
def manipulatedText =
"@Library(\"piper-library-os@${libraryVersionUnderTest}\") _\n" +
jenkinsfile.text
jenkinsfile.write(manipulatedText)
}
private void executeShell(command) {
lastCommand = command
def startOfCommandString = "Shell command: '${command}'\n"
stdOut << startOfCommandString
stdErr << startOfCommandString
currentProcess = command.execute()
currentProcess.waitForProcessOutput(stdOut, stdErr)
returnCode = currentProcess.exitValue()
currentProcess = null
if (returnCode > 0) {
throw new ReturnCodeNotZeroException("Test case: [${uniqueName}]; " +
"shell command '${command} exited with return code '${returnCode}")
}
}
void printOutput() {
println "\n[INFO] stdout output from test case ${uniqueName}:"
stdOut.eachLine { line, i ->
println "${i} [${uniqueName}] ${line}"
lastPrintedStdOutLine = i
}
println "\n[INFO] stderr output from test case ${uniqueName}:"
stdErr.eachLine { line, i ->
println "${i} [${uniqueName}] ${line}"
}
}
public void printRunningStdOut() {
stdOut.eachLine { line, i ->
if (i > lastPrintedStdOutLine) {
println "${i} [${uniqueName}] ${line}"
lastPrintedStdOutLine = i
}
}
}
@Override
public String toString() {
return uniqueName
}
}
class ReturnCodeNotZeroException extends Exception {
ReturnCodeNotZeroException(message) {
super(message)
}
}

View File

@ -0,0 +1,232 @@
import groovy.io.FileType
import static groovy.json.JsonOutput.toJson
COMMIT_HASH = null
RUNNING_LOCALLY = false
AUXILIARY_SLEEP_MS = 10000
START_TIME_MS = System.currentTimeMillis()
WORKSPACES_ROOT = 'workspaces'
TEST_CASES_DIR = 'testCases'
LIBRARY_VERSION_UNDER_TEST = "git log --format=%H -n 1".execute().text.trim()
EXCLUDED_FROM_CONSUMER_TESTING_REGEXES = [
/^documentation\/.*$/,
/^.travis.yml$/,
/^test\/.*$/
]
newEmptyDir(WORKSPACES_ROOT)
TestRunnerThread.workspacesRootDir = WORKSPACES_ROOT
TestRunnerThread.libraryVersionUnderTest = LIBRARY_VERSION_UNDER_TEST
TestRunnerThread.repositoryUnderTest = System.getenv('TRAVIS_REPO_SLUG') ?: 'SAP/jenkins-library'
def testCaseThreads
def cli = new CliBuilder(
usage: 'groovy consumerTestController.groovy [<options>]',
header: 'Options:',
footer: 'If no options are set, all tests are run centrally, i.e. on travisCI.')
cli.with {
h longOpt: 'help', 'Print this help text and exit.'
l longOpt: 'run-locally', 'Run consumer tests locally in Docker, i.e. skip reporting of GitHub status.'
s longOpt: 'single-test', args: 1, argName: 'filePath', 'Run single test.'
}
def options = cli.parse(args)
if (options.h) {
cli.usage()
return
}
if (options.l) {
RUNNING_LOCALLY = true
}
if (!RUNNING_LOCALLY) {
/*
In case the build is performed for a pull request TRAVIS_COMMIT is a merge
commit between the base branch and the PR branch HEAD. That commit is actually built.
But for notifying about a build status we need the commit which is currently
the HEAD of the PR branch.
In case the build is performed for a simple branch (not associated with a PR)
In this case there is no merge commit between any base branch and HEAD of a PR branch.
The commit which we need for notifying about a build status is in this case simply
TRAVIS_COMMIT itself.
*/
COMMIT_HASH = System.getenv('TRAVIS_PULL_REQUEST_SHA') ?: System.getenv('TRAVIS_COMMIT')
if (changeDoesNotNeedConsumerTesting()) {
println 'No consumer tests necessary.'
notifyGithub("success", "No consumer tests necessary.")
return
} else {
notifyGithub("pending", "Consumer tests are in progress.")
}
}
if (!System.getenv('CX_INFRA_IT_CF_USERNAME') || !System.getenv('CX_INFRA_IT_CF_PASSWORD')) {
exitPrematurely('Environment variables CX_INFRA_IT_CF_USERNAME and CX_INFRA_IT_CF_PASSWORD need to be set.')
}
if (options.s) {
def file = new File(options.s)
if (!file.exists()) {
exitPrematurely("Test case configuration file '${file}' does not exist. " +
"Please provide path to a configuration file of structure '/rootDir/areaDir/testCase.yml'.")
}
testCaseThreads = [new TestRunnerThread(file)]
} else {
testCaseThreads = listTestCaseThreads()
}
testCaseThreads.each { it ->
it.start()
}
//The thread below will print to console while the test cases are running.
//Otherwise the job would be canceled after 10 minutes without output.
def done = false
Thread.start {
def outputWasPrintedPrematurely = false
def singleTestCase = (testCaseThreads.size() == 1)
if (singleTestCase) {
AUXILIARY_SLEEP_MS = 1000 //for a single test case we print the running output every second
}
for (; ;) {
if (singleTestCase) {
testCaseThreads[0].printRunningStdOut()
} else {
println "[INFO] Consumer tests are still running."
}
// Build is killed at 50 min, print log to console at minute 45
int MINUTES_SINCE_START = (System.currentTimeMillis() - START_TIME_MS) / (1000 * 60)
if (!singleTestCase && MINUTES_SINCE_START > 44 && !outputWasPrintedPrematurely) {
testCaseThreads.each { thread ->
thread.printOutput()
}
outputWasPrintedPrematurely = true
}
sleep(AUXILIARY_SLEEP_MS)
if (done) {
break
}
}
}
testCaseThreads.each { it ->
it.join()
}
done = true
def failedThreads = testCaseThreads.findAll { thread ->
thread.returnCode != 0
}
def status
def statusMessage
if (failedThreads.size() == 0) {
status = "success"
statusMessage = "All consumer tests finished successfully. Congratulations!"
} else {
failedThreads.each { failedThread ->
println "[ERROR] ${failedThread.uniqueName}: Process execution of command: '${failedThread.lastCommand}' failed. " +
"Return code: ${failedThread.returnCode}."
failedThread.printOutput()
}
status = "failure"
statusMessage "The following consumer test(s) failed: ${failedThreads}"
}
if (!RUNNING_LOCALLY) {
notifyGithub(status, statusMessage)
}
println statusMessage
if (status == "failure") {
System.exit(1)
}
def listTestCaseThreads() {
//Each dir that includes a yml file is a test case
def threads = []
new File(TEST_CASES_DIR).traverse(type: FileType.FILES, nameFilter: ~/^.+\.yml\u0024/) { file ->
threads << new TestRunnerThread(file)
}
return threads
}
def notifyGithub(state, description) {
println "[INFO] Notifying about state '${state}' for commit '${COMMIT_HASH}'."
URL url = new URL("https://api.github.com/repos/SAP/jenkins-library/statuses/${COMMIT_HASH}")
HttpURLConnection con = (HttpURLConnection) url.openConnection()
con.setRequestMethod('POST')
con.setRequestProperty("Content-Type", "application/json; utf-8");
con.setRequestProperty('User-Agent', 'groovy-script')
con.setRequestProperty('Authorization', "token ${System.getenv('INTEGRATION_TEST_VOTING_TOKEN')}")
def postBody = [
state : state,
target_url : System.getenv('TRAVIS_BUILD_WEB_URL'),
description: description,
context : "integration-tests"
]
con.setDoOutput(true)
con.getOutputStream().withStream { os ->
os.write(toJson(postBody).getBytes("UTF-8"))
}
int responseCode = con.getResponseCode()
if (responseCode != HttpURLConnection.HTTP_CREATED) {
exitPrematurely("[ERROR] Posting status to github failed. Expected response code " +
"'${HttpURLConnection.HTTP_CREATED}', but got '${responseCode}'. " +
"Response message: '${con.getResponseMessage()}'",
34) // Error code taken from curl: CURLE_HTTP_POST_ERROR
}
}
def changeDoesNotNeedConsumerTesting() {
if (System.getenv('TRAVIS_BRANCH') == 'master') {
return false
}
def excludesRegex = '(' + EXCLUDED_FROM_CONSUMER_TESTING_REGEXES.join('|') + ')'
"git remote add sap https://github.com/SAP/jenkins-library.git".execute().waitFor()
"git fetch sap".execute().waitFor()
def diff = "git diff --name-only sap/master ${LIBRARY_VERSION_UNDER_TEST}".execute().text.trim()
for (def line : diff.readLines()) {
if (!(line ==~ excludesRegex)) {
return false
}
}
return true
}
static def newEmptyDir(String dirName) {
def dir = new File(dirName)
if (dir.exists()) {
if (!dir.deleteDir()) {
exitPrematurely("Deletion of dir '${dirName}' failed.")
}
}
if (!dir.mkdirs()) {
exitPrematurely("Creation of dir '${dirName}' failed.")
}
}
static def exitPrematurely(String message, int returnCode = 1) {
println message
System.exit(returnCode)
}

View File

@ -1,139 +0,0 @@
#!/bin/bash
function fail() {
local message="$1"
local returnCode=${2:-1}
echo "[ERROR] ${message}" >&2
exit "${returnCode}"
}
function notify() {
local state=${1}
local description=${2}
local hash=${3}
echo "[INFO] Notifying about state \"${state}\" for commit \"${hash}\"."
curl -X POST \
--fail \
--silent \
--output /dev/null \
--data "{\"state\": \"${state}\", \"target_url\": \"${TRAVIS_BUILD_WEB_URL}\", \"description\": \"${description}\", \"context\": \"integration-tests\"}" \
--user "${INTEGRATION_TEST_VOTING_USER}:${INTEGRATION_TEST_VOTING_TOKEN}" \
"https://api.github.com/repos/SAP/jenkins-library/statuses/${hash}" || fail "Cannot send notification. curl return code: $?"
}
function cleanup() {
[[ -z "${notificationThreadPid}" ]] || kill -PIPE "${notificationThreadPid}" &>/dev/null
}
trap cleanup EXIT
#
# In case the build is performed for a pull request TRAVIS_COMMIT is a merge
# commit between the base branch and the PR branch HEAD. That commit is actually built.
# But for notifying about a build status we need the commit which is currently
# the HEAD of the PR branch.
#
# In case the build is performed for a simple branch (not associated with a PR)
# In this case there is no merge commit between any base branch and HEAD of a PR branch.
# The commit which we need for notifying about a build status is in this case simply
# TRAVIS_COMMIT itself.
#
COMMIT_HASH_FOR_STATUS_NOTIFICATIONS="${TRAVIS_PULL_REQUEST_SHA}"
[[ -z "${COMMIT_HASH_FOR_STATUS_NOTIFICATIONS}" ]] && COMMIT_HASH_FOR_STATUS_NOTIFICATIONS="${TRAVIS_COMMIT}"
notify "pending" "Integration tests in progress." "${COMMIT_HASH_FOR_STATUS_NOTIFICATIONS}"
WORKSPACES_ROOT=workspaces
[[ -e "${WORKSPACES_ROOT}" ]] && rm -rf ${WORKSPACES_ROOT}
TEST_CASES=$(find testCases -name '*.yml')
# This auxiliary thread is needed in order to produce some output while the
# test are running. Otherwise the job will be canceled after 10 minutes without
# output.
while true; do sleep 10; echo "[INFO] Integration tests still running."; done &
notificationThreadPid=$!
declare -a processes
i=0
for f in ${TEST_CASES}
do
testCase=$(basename "${f%.*}")
area=$(dirname "${f#*/}")
echo "[INFO] Running test case \"${testCase}\" in area \"${area}\"."
TEST_CASE_ROOT="${WORKSPACES_ROOT}/${area}/${testCase}"
[[ -e "${TEST_CASE_ROOT}" ]] && rm -rf "${TEST_CASE_ROOT}"
mkdir -p "${TEST_CASE_ROOT}" || fail "Cannot create test case root directory for test case \"${testCase}\"." 1
source ./runTest.sh "${testCase}" "${TEST_CASE_ROOT}" &> "${TEST_CASE_ROOT}/log.txt" &
pid=$!
processes[$i]="${area}/${testCase}:${pid}"
echo "[INFO] Test case \"${testCase}\" in area \"${area}\" launched. (PID: \"${pid}\")."
let i=i+1
done
[[ "${i}" == 0 ]] && fail "No tests has been executed." 1
#
# wait for the test cases and cat the log
for p in "${processes[@]}"
do
area=$(dirname "${p%:*}")
testCase=$(basename "${p%:*}")
processId="${p#*:}"
echo "[INFO] Waiting for test case \"${testCase}\" in area \"${area}\" (PID: \"${processId}\")."
wait "${processId}"
echo "[INFO] Test case \"${testCase}\" in area \"${area}\" finished (PID: \"${processId}\")."
done
kill -PIPE "${notificationThreadPid}" &>/dev/null && notificationThreadPid=""
#
# provide the logs
for p in "${processes[@]}"
do
area=$(dirname "${p%:*}")
testCase=$(basename "${p%:*}")
TEST_CASE_ROOT="${WORKSPACES_ROOT}/${area}/${testCase}"
echo "[INFO] === START === Logs for test case \"${testCase}\" ===."
cat "${TEST_CASE_ROOT}/log.txt"
echo "[INFO] === END === Logs for test case \"${testCase}\" ===."
done
#
# list test case status
echo "[INFO] Build status:"
failure="false"
for p in "${processes[@]}"
do
status="UNDEFINED"
area=$(dirname "${p%:*}")
testCase=$(basename "${p%:*}")
TEST_CASE_ROOT="${WORKSPACES_ROOT}/${area}/${testCase}"
if [[ -f "${TEST_CASE_ROOT}/SUCCESS" ]]
then
status="SUCCESS"
else
status="FAILURE"
failure="true"
fi
printf "[INFO] %-30s: %s\n" "${testCase}" "${status}"
done
STATUS_DESCRIPTION="The integration tests failed."
STATUS_STATE="failure"
if [[ "${failure}" == "false" ]]
then
STATUS_DESCRIPTION="The integration tests succeeded."
STATUS_STATE="success"
fi
notify "${STATUS_STATE}" "${STATUS_DESCRIPTION}" "${COMMIT_HASH_FOR_STATUS_NOTIFICATIONS}"
[[ "${failure}" != "false" ]] && fail "Integration tests failed." 1
echo "[INFO] Integration tests succeeded."
exit 0

View File

@ -1,30 +0,0 @@
#!/usr/bin/env bash
TEST_CASE=$1
TEST_CASE_ROOT=$2
TEST_CASE_WORKSPACE="${TEST_CASE_ROOT}/workspace"
LIBRARY_VERSION_UNDER_TEST=$(git log --format="%H" -n 1)
REPOSITORY_UNDER_TEST=${TRAVIS_REPO_SLUG:-SAP/jenkins-library}
git clone -b "${TEST_CASE}" https://github.com/sap/cloud-s4-sdk-book "${TEST_CASE_WORKSPACE}"
cp -f jenkins.yml "${TEST_CASE_WORKSPACE}"
cd "${TEST_CASE_WORKSPACE}" || exit 1
# Configure path to library-repository under test in Jenkins config
sed -i -e "s:__REPO_SLUG__:${REPOSITORY_UNDER_TEST}:g" jenkins.yml
# Force usage of library version under test by setting it in the Jenkinsfile which is then the first definition and thus has the highest precedence
echo "@Library(\"piper-library-os@$LIBRARY_VERSION_UNDER_TEST\") _" | cat - Jenkinsfile > temp && mv temp Jenkinsfile
# Commit the changed version because artifactSetVersion expects the git repo not to be dirty
git commit --all --author="piper-testing-bot <piper-testing-bot@example.com>" --message="Set piper lib version for test"
docker run -v /var/run/docker.sock:/var/run/docker.sock -v "${PWD}":/workspace -v /tmp -e CASC_JENKINS_CONFIG=/workspace/jenkins.yml \
-e CX_INFRA_IT_CF_USERNAME -e CX_INFRA_IT_CF_PASSWORD -e BRANCH_NAME="${TEST_CASE}" ppiper/jenkinsfile-runner
RC=$?
cd - &> /dev/null || { echo "[ERROR] change directory back into integration test root folder failed."; exit 1; }
[[ "${RC}" == 0 ]] && touch "${TEST_CASE_ROOT}/SUCCESS"

View File

@ -1,2 +1,2 @@
# Empty for the moment.
# Might contain test configuration in the future.
# Test case configuration
referenceAppRepoUrl: "https://github.com/sap/cloud-s4-sdk-book"

View File

@ -1,2 +1,2 @@
# Empty for the moment.
# Might contain test configuration in the future.
# Test case configuration
referenceAppRepoUrl: "https://github.com/sap/cloud-s4-sdk-book"

View File

@ -1,12 +1,15 @@
import groovy.io.FileType
import groovy.json.JsonOutput
import org.yaml.snakeyaml.Yaml
import org.codehaus.groovy.control.CompilerConfiguration
import com.sap.piper.GenerateDocumentation
import java.util.regex.Matcher
import groovy.text.StreamingTemplateEngine
import com.sap.piper.MapUtils
//
// Collects helper functions for rendering the docu
// Collects helper functions for rendering the documentation
//
class TemplateHelper {
@ -19,12 +22,33 @@ class TemplateHelper {
parameters.keySet().toSorted().each {
def props = parameters.get(it)
t += "| `${it}` | ${props.mandatory ?: props.required ? 'yes' : 'no'} | ${(props.defaultValue ? '`' + props.defaultValue + '`' : '') } | ${props.value ?: ''} |\n"
def defaultValue = isComplexDefault(props.defaultValue) ? renderComplexDefaultValue(props.defaultValue) :
props.defaultValue != null ? "`${props.defaultValue}`" : ''
t += "| `${it}` | ${props.mandatory ?: props.required ? 'yes' : 'no'} | ${defaultValue} | ${props.value ?: ''} |\n"
}
t
}
private static boolean isComplexDefault(def _default) {
if(! (_default in Collection)) return false
if(_default.size() == 0) return false
for(def entry in _default) {
if(! (entry in Map)) return false
if(! entry.dependentParameterKey) return false
if(! entry.key) return false
}
return true
}
private static renderComplexDefaultValue(def _default) {
_default
.collect { "${it.dependentParameterKey}=`${it.key ?: '<empty>'}`:`${it.value ?: '<empty>'}`" }
.join('<br />')
}
static createParameterDescriptionSection(Map parameters) {
def t = ''
parameters.keySet().toSorted().each {
@ -62,13 +86,15 @@ class TemplateHelper {
//
class Helper {
static projectRoot = new File(Helper.class.protectionDomain.codeSource.location.path).getParentFile().getParentFile().getParentFile()
static getConfigHelper(classLoader, roots, script) {
def compilerConfig = new CompilerConfiguration()
compilerConfig.setClasspathList( roots )
new GroovyClassLoader(classLoader, compilerConfig, true)
.parseClass(new File('src/com/sap/piper/ConfigurationHelper.groovy'))
.parseClass(new File(projectRoot, 'src/com/sap/piper/ConfigurationHelper.groovy'))
.newInstance(script, [:]).loadStepDefaults()
}
@ -80,16 +106,20 @@ class Helper {
m, c -> c()
}
prepareDefaultValuesStep.metaClass.libraryResource {
f -> new File("resources/${f}").text
f -> new File(projectRoot,"resources/${f}").text
}
prepareDefaultValuesStep.metaClass.readYaml {
m -> new Yaml().load(m.text)
}
prepareDefaultValuesStep.metaClass.echo {
m -> println(m)
}
prepareDefaultValuesStep
}
static getDummyScript(def prepareDefaultValuesStep, def stepName) {
static getDummyScript(def prepareDefaultValuesStep, def stepName, Map prepareDefaultValuesStepParams) {
def _prepareDefaultValuesStep = prepareDefaultValuesStep
def _stepName = stepName
@ -99,7 +129,8 @@ class Helper {
def STEP_NAME = _stepName
def prepareDefaultValues() {
_prepareDefaultValuesStep()
_prepareDefaultValuesStep(prepareDefaultValuesStepParams)
}
def run() {
@ -174,13 +205,23 @@ class Helper {
boolean docu = false,
value = false,
mandatory = false,
parentObject = false,
docuEnd = false
def docuLines = [], valueLines = [], mandatoryLines = []
def docuLines = [], valueLines = [], mandatoryLines = [], parentObjectLines = []
f.eachLine {
line ->
if(line ==~ /.*dependingOn.*/) {
def dependentConfigKey = (line =~ /.*dependingOn\('(.*)'\).mixin\('(.*)'/)[0][1]
def configKey = (line =~ /.*dependingOn\('(.*)'\).mixin\('(.*)'/)[0][2]
if(! step.dependentConfig[configKey]) {
step.dependentConfig[configKey] = []
}
step.dependentConfig[configKey] << dependentConfigKey
}
if(docuEnd) {
docuEnd = false
@ -197,13 +238,17 @@ class Helper {
throw new RuntimeException('Cannot retrieve parameter for a comment')
}
def _docu = [], _value = [], _mandatory = [], _parentObject = []
docuLines.each { _docu << it }
valueLines.each { _value << it }
mandatoryLines.each { _mandatory << it }
parentObjectLines.each { _parentObject << it }
_parentObject << param
param = _parentObject*.trim().join('/').trim()
if(step.parameters[param].docu || step.parameters[param].value)
System.err << "[WARNING] There is already some documentation for parameter '${param}. Is this parameter documented twice?'\n"
def _docu = [], _value = [], _mandatory = []
docuLines.each { _docu << it }
valueLines.each { _value << it}
mandatoryLines.each { _mandatory << it}
step.parameters[param].docu = _docu*.trim().join(' ').trim()
step.parameters[param].value = _value*.trim().join(' ').trim()
step.parameters[param].mandatory = _mandatory*.trim().join(' ').trim()
@ -211,6 +256,7 @@ class Helper {
docuLines.clear()
valueLines.clear()
mandatoryLines.clear()
parentObjectLines.clear()
}
if( line.trim() ==~ /^\/\*\*.*/ ) {
@ -227,11 +273,19 @@ class Helper {
if(_line ==~ /.*@possibleValues.*/) {
mandatory = false // should be something like reset attributes
value = true
parentObject = false
}
// some remark for mandatory e.g. some parameters are only mandatory under certain conditions
if(_line ==~ /.*@mandatory.*/) {
value = false // should be something like reset attributes ...
mandatory = true
parentObject = false
}
// grouping config properties within a parent object for easier readability
if(_line ==~ /.*@parentConfigKey.*/) {
value = false // should be something like reset attributes ...
mandatory = false
parentObject = true
}
if(value) {
@ -248,7 +302,14 @@ class Helper {
}
}
if(! value && ! mandatory) {
if(parentObject) {
if(_line) {
_line = (_line =~ /.*@parentConfigKey\s*?(.*)/)[0][1]
parentObjectLines << _line
}
}
if(!value && !mandatory && !parentObject) {
docuLines << _line
}
}
@ -257,6 +318,7 @@ class Helper {
docu = false
value = false
mandatory = false
parentObject = false
docuEnd = true
}
}
@ -289,7 +351,7 @@ class Helper {
def params = [] as Set
f.eachLine {
line ->
if( line ==~ /.*withMandatoryProperty.*/ ) {
if (line ==~ /.*withMandatoryProperty.*/) {
def param = (line =~ /.*withMandatoryProperty\('(.*)'/)[0][1]
params << param
}
@ -297,11 +359,23 @@ class Helper {
return params
}
static getValue(Map config, def pPath) {
def p =config[pPath.head()]
if(pPath.size() == 1) return p // there is no tail
if(p in Map) getValue(p, pPath.tail())
else return p
static getParentObjectMappings(File f) {
def mappings = [:]
def parentObjectKey = ''
f.eachLine {
line ->
if (line ==~ /.*parentConfigKey.*/ && !parentObjectKey) {
def param = (line =~ /.*parentConfigKey\s*?(.*)/)[0][1]
parentObjectKey = param.trim()
} else if (line ==~ /\s*?(.*)[,]{0,1}/ && parentObjectKey) {
def pName = retrieveParameterName(line)
if(pName) {
mappings.put(pName, parentObjectKey)
parentObjectKey = ''
}
}
}
return mappings
}
static resolveDocuRelevantSteps(GroovyScriptEngine gse, File stepsDir) {
@ -325,12 +399,13 @@ class Helper {
}
roots = [
'vars',
'src',
]
new File(Helper.projectRoot, "vars").getAbsolutePath(),
new File(Helper.projectRoot, "src").getAbsolutePath()
]
stepsDir = null
stepsDocuDir = null
String customDefaults = null
steps = []
@ -340,17 +415,22 @@ steps = []
if(args.length >= 1)
stepsDir = new File(args[0])
stepsDir = stepsDir ?: new File('vars')
stepsDir = stepsDir ?: new File(Helper.projectRoot, "vars")
if(args.length >= 2)
stepsDocuDir = new File(args[1])
stepsDocuDir = stepsDocuDir ?: new File('documentation/docs/steps')
stepsDocuDir = stepsDocuDir ?: new File(Helper.projectRoot, "documentation/docs/steps")
def argsDrop = 2
if(args.length >= 3 && args[2].contains('.yml')) {
customDefaults = args[2]
argsDrop ++
}
if(args.length >= 3)
steps = (args as List).drop(2) // the first two entries are stepsDir and docuDir
// the other parts are considered as step names
steps = (args as List).drop(argsDrop) // the first two entries are stepsDir and docuDir
// the other parts are considered as step names
// assign parameters
@ -372,7 +452,7 @@ if( !stepsDir.exists() ) {
// sanity checks
//
def gse = new GroovyScriptEngine( [ stepsDir.getName() ] as String[] , getClass().getClassLoader() )
def gse = new GroovyScriptEngine([ stepsDir.getAbsolutePath() ] as String[], GenerateDocumentation.class.getClassLoader() )
//
// find all the steps we have to document (if no step has been provided from outside)
@ -389,7 +469,7 @@ boolean exceptionCaught = false
def stepDescriptors = [:]
for (step in steps) {
try {
stepDescriptors."${step}" = handleStep(step, prepareDefaultValuesStep, gse)
stepDescriptors."${step}" = handleStep(step, prepareDefaultValuesStep, gse, customDefaults)
} catch(Exception e) {
exceptionCaught = true
System.err << "${e.getClass().getName()} caught while handling step '${step}': ${e.getMessage()}.\n"
@ -404,6 +484,8 @@ for(step in stepDescriptors) {
def otherStep = param.value.docu.replaceAll('@see', '').trim()
param.value.docu = fetchTextFrom(otherStep, param.key, stepDescriptors)
param.value.mandatory = fetchMandatoryFrom(otherStep, param.key, stepDescriptors)
if(! param.value.value)
param.value.value = fetchPossibleValuesFrom(otherStep, param.key, stepDescriptors)
}
}
}
@ -424,6 +506,10 @@ if(exceptionCaught) {
System.exit(1)
}
File docuMetaData = new File('target/docuMetaData.json')
if(docuMetaData.exists()) docuMetaData.delete()
docuMetaData << new JsonOutput().toJson(stepDescriptors)
System.err << "[INFO] done.\n"
void renderStep(stepName, stepProperties) {
@ -467,7 +553,11 @@ def fetchMandatoryFrom(def step, def parameterName, def steps) {
}
}
def handleStep(stepName, prepareDefaultValuesStep, gse) {
def fetchPossibleValuesFrom(def step, def parameterName, def steps) {
return steps[step]?.parameters[parameterName]?.value ?: ''
}
def handleStep(stepName, prepareDefaultValuesStep, gse, customDefaults) {
File theStep = new File(stepsDir, "${stepName}.groovy")
File theStepDocu = new File(stepsDocuDir, "${stepName}.md")
@ -479,9 +569,13 @@ def handleStep(stepName, prepareDefaultValuesStep, gse) {
System.err << "[INFO] Handling step '${stepName}'.\n"
Map prepareDefaultValuesStepParams = [:]
if (customDefaults)
prepareDefaultValuesStepParams.customDefaults = customDefaults
def defaultConfig = Helper.getConfigHelper(getClass().getClassLoader(),
roots,
Helper.getDummyScript(prepareDefaultValuesStep, stepName)).use()
Helper.getDummyScript(prepareDefaultValuesStep, stepName, prepareDefaultValuesStepParams)).use()
def params = [] as Set
@ -502,7 +596,23 @@ def handleStep(stepName, prepareDefaultValuesStep, gse) {
params.addAll(requiredParameters)
def step = [parameters:[:]]
// translate parameter names according to compatibility annotations
def parentObjectMappings = Helper.getParentObjectMappings(theStep)
def compatibleParams = [] as Set
if(parentObjectMappings) {
params.each {
if (parentObjectMappings[it])
compatibleParams.add(parentObjectMappings[it] + '/' + it)
else
compatibleParams.add(it)
}
if (compatibleParams)
params = compatibleParams
}
// 'dependentConfig' is only present here for internal reasons and that entry is removed at
// end of method.
def step = [parameters:[:], dependentConfig: [:]]
//
// START special handling for 'script' parameter
@ -526,7 +636,7 @@ def handleStep(stepName, prepareDefaultValuesStep, gse) {
it ->
def defaultValue = Helper.getValue(defaultConfig, it.split('/'))
def defaultValue = MapUtils.getByPath(defaultConfig, it)
def parameterProperties = [
defaultValue: defaultValue,
@ -545,5 +655,34 @@ def handleStep(stepName, prepareDefaultValuesStep, gse) {
Helper.scanDocu(theStep, step)
step.parameters.each { k, v ->
if(step.dependentConfig.get(k)) {
def dependentParameterKey = step.dependentConfig.get(k)[0]
def dependentValues = step.parameters.get(dependentParameterKey)?.value
if (dependentValues) {
def the_defaults = []
dependentValues
.replaceAll('[\'"` ]', '')
.split(',').each {possibleValue ->
if (!possibleValue instanceof Boolean && defaultConfig.get(possibleValue)) {
the_defaults <<
[
dependentParameterKey: dependentParameterKey,
key: possibleValue,
value: MapUtils.getByPath(defaultConfig.get(possibleValue), k)
]
}
}
v.defaultValue = the_defaults
}
}
}
//
// 'dependentConfig' is only present for internal purposes and must not be used outside.
step.remove('dependentConfig')
step
}

View File

@ -15,7 +15,8 @@ Set up an agile development process with Jenkins CI, which automatically feeds c
In many SAP development scenarios, it is vital to synchronize both backend and frontend deliveries. These deliveries are typically an SAP UI5 application and an ABAP backend from which it is served. The SAP UI5 parts are often developed using agile practices and use Continuous Integration pipelines that automatically build, test, and deploy the application.
**Note:** This scenario description is an example. You can apply the process to other scenarios and component sets, as well.
!!! note
This scenario description is an example. You can apply the process to other scenarios and component sets, as well.
In this scenario, we want to show how an agile development process with Jenkins CI can automatically feed changes into SAP Solution Manager. In SAP Solution Manager, all parts of the application stack come together and can be subject to classic change and transport management.

View File

@ -1,6 +1,6 @@
# Build and Deploy SAP UI5 or SAP Fiori Applications on SAP Cloud Platform with Jenkins
# Build and Deploy SAPUI5 or SAP Fiori Applications on SAP Cloud Platform with Jenkins
Build an application based on SAP UI5 or SAP Fiori with Jenkins and deploy the build result into an SAP Cloud Platform account in the Neo environment.
Build an application based on SAPUI5 or SAP Fiori with Jenkins and deploy the build result into an SAP Cloud Platform account in the Neo environment.
## Prerequisites
@ -11,12 +11,10 @@ Build an application based on SAP UI5 or SAP Fiori with Jenkins and deploy the b
* You have installed Node.js including node and npm. See [Node.js](https://nodejs.org/en/download/).
* You have installed the SAP Cloud Platform Neo Environment SDK. See [SAP Development Tools](https://tools.hana.ondemand.com/#cloud).
### Project Prerequisites
This scenario requires additional files in your project and in the execution environment on your Jenkins instance.
On the project level, provide and adjust the following template:
| File Name | Description | Position |
@ -26,13 +24,11 @@ On the project level, provide and adjust the following template:
| [`package.json`](https://github.com/SAP/jenkins-library/blob/master/documentation/docs/scenarios/ui5-sap-cp/files/package.json) | This file lists the required development dependencies for the build. | Add the content of the `package.json` file to your existing `package.json` file. |
| [`Gruntfile.js`](https://github.com/SAP/jenkins-library/blob/master/documentation/docs/scenarios/ui5-sap-cp/files/Gruntfile.js) | This file controls the grunt build. By default the tasks `clean`, `build`, and `lint` are executed. | Place the `Gruntfile.js` in the root directory of your project. |
## Context
This scenario combines various different steps to create a complete pipeline.
In this scenario, we want to show how to build an application based on SAP UI5 or SAP Fiori by using the multi-target application (MTA) concept and how to deploy the build result into an SAP Cloud Platform account in the Neo environment. This document comprises the [mtaBuild](https://sap.github.io/jenkins-library/steps/mtaBuild/) and the [neoDeploy](https://sap.github.io/jenkins-library/steps/neoDeploy/) steps.
In this scenario, we want to show how to build an application based on SAPUI5 or SAP Fiori by using the multi-target application (MTA) concept and how to deploy the build result into an SAP Cloud Platform account in the Neo environment. This document comprises the [mtaBuild](https://sap.github.io/jenkins-library/steps/mtaBuild/) and the [neoDeploy](https://sap.github.io/jenkins-library/steps/neoDeploy/) steps.
![This pipeline in Jenkins Blue Ocean](images/pipeline.jpg)
###### Screenshot: Build and Deploy Process in Jenkins
@ -73,7 +69,6 @@ steps:
| `buildTarget` | The target platform to which the mtar can be deployed. Possible values are: `CF`, `NEO`, `XSA` |
| `mtaJarLocation` | The location of the multi-target application archive builder jar file, including file name and extension. |
#### Configuration for the Deployment to SAP Cloud Platform
| Parameter | Description |
@ -83,7 +78,6 @@ steps:
| `host` | The SAP Cloud Platform host to deploy to. |
| `neoHome` | The path to the `neo-java-web-sdk` tool that is used for the deployment. |
### Parameters
For the detailed description of the relevant parameters, see:

View File

@ -1,79 +1,14 @@
# artifactSetVersion
# ${docGenStepName}
## Description
The continuous delivery process requires that each build is done with a unique version number.
The version generated using this step will contain:
* Version (major.minor.patch) from descriptor file in master repository is preserved. Developers should be able to autonomously decide on increasing either part of this version number.
* Timestamp
* CommitId (by default the long version of the hash)
Optionally, but enabled by default, the new version is pushed as a new tag into the source code repository (e.g. GitHub).
If this option is chosen, git credentials and the repository URL needs to be provided.
Since you might not want to configure the git credentials in Jenkins, committing and pushing can be disabled using the `commitVersion` parameter as described below.
If you require strict reproducibility of your builds, this should be used.
## ${docGenDescription}
## Prerequsites
none
## Parameters
## ${docGenParameters}
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
| script | yes | | |
| artifactType | no | | 'appContainer' |
| buildTool | no | maven | docker, dlang, golang, maven, mta, npm, pip, sbt |
| commitVersion | no | `true` | `true`, `false` |
| dockerVersionSource | no | `''` | FROM, (ENV name),appVersion |
| filePath | no | buildTool=`docker`: Dockerfile <br />buildTool=`dlang`: dub.json <br />buildTool=`golang`: VERSION <br />buildTool=`maven`: pom.xml <br />buildTool=`mta`: mta.yaml <br />buildTool=`npm`: package.json <br />buildTool=`pip`: version.txt <br />buildTool=`sbt`: sbtDescriptor.json| |
| gitCommitId | no | `GitUtils.getGitCommitId()` | |
| gitSshCredentialsId | If `commitVersion` is `true` | as defined in custom configuration | |
| gitUserEMail | no | | |
| gitUserName | no | | |
| gitSshUrl | If `commitVersion` is `true` | | |
| tagPrefix | no | 'build_' | |
| timestamp | no | current time in format according to `timestampTemplate` | |
| timestampTemplate | no | `%Y%m%d%H%M%S` | |
| versioningTemplate | no |buildTool=`docker`: `${version}-${timestamp}${commitId?"_"+commitId:""}`<br> />buildTool=`dlang`: `${version}-${timestamp}${commitId?"+"+commitId:""}`<br />buildTool=`golang`:`${version}-${timestamp}${commitId?"+"+commitId:""}`<br />buildTool=`maven`: `${version}-${timestamp}${commitId?"_"+commitId:""}`<br />buildTool=`mta`: `${version}-${timestamp}${commitId?"+"+commitId:""}`<br />buildTool=`npm`: `${version}-${timestamp}${commitId?"+"+commitId:""}`<br />buildTool=`pip`: `${version}.${timestamp}${commitId?"."+commitId:""}`<br />buildTool=`sbt`: `${version}-${timestamp}${commitId?"+"+commitId:""}`| |
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving e.g. configuration parameters.
* `artifactType` defines the type of the artifact.
* `buildTool` defines the tool which is used for building the artifact.
* `commitVersion` controls if the changed version is committed and pushed to the git repository. If this is enabled (which is the default), you need to provide `gitCredentialsId` and `gitSshUrl`.
* `dockerVersionSource` specifies the source to be used for the main version which is used for generating the automatic version.
* This can either be the version of the base image - as retrieved from the `FROM` statement within the Dockerfile, e.g. `FROM jenkins:2.46.2`
* Alternatively the name of an environment variable defined in the Docker image can be used which contains the version number, e.g. `ENV MY_VERSION 1.2.3`
* The third option `appVersion` applies only to the artifactType `appContainer`. Here the version of the app which is packaged into the container will be used as version for the container itself.
* Using `filePath` you could define a custom path to the descriptor file.
* `gitCommitId` defines the version prefix of the automatically generated version. By default it will take the long commitId hash. You could pass any other string (e.g. the short commitId hash) to be used. In case you don't want to have the gitCommitId added to the automatic versioning string you could set the value to an empty string: `''`.
* `gitSshCredentialsId`defines the ssh git credentials to be used for writing the tag.
* The parameters `gitUserName` and `gitUserEMail` allow to overwrite the global git settings available on your Jenkins server
* `gitSshUrl` defines the git ssh url to the source code repository.
* `tagPrefix` defines the prefix wich is used for the git tag which is written during the versioning run.
* `timestamp` defines the timestamp to be used in the automatic version string. You could overwrite the default behavior by explicitly setting this string.
## Step configuration
The following parameters can also be specified as step parameters using the global configuration file:
* `artifactType`
* `buildTool`
* `commitVersion`
* `dockerVersionSource`
* `filePath`
* `gitCredentialsId`
* `gitUserEMail`
* `gitUserName`
* `gitSshUrl`
* `tagPrefix`
* `timestamp`
* `timestampTemplate`
* `versioningTemplate`
## ${docGenConfiguration}
## Example

View File

@ -1,38 +1,21 @@
# batsExecuteTests
# ${docGenStepName}
## Description
This step executes tests using the [Bash Automated Testing System - bats-core](https://github.com/bats-core/bats-core)
## ${docGenDescription}
## Prerequsites
You need to have a Bats test file. By default you would put this into directory `src/test` within your source code repository.
## Parameters
## ${docGenParameters}
| parameter | mandatory | default | possible values |
|-----------|-----------|---------|-----------------|
| script | yes | | |
| dockerImage | no | `node:8-stretch` | |
| dockerWorkspace | no |`/home/node`| |
| envVars | no | `[:]` | |
| failOnError | no | `false` | |
| gitBranch | no | | |
| gitSshKeyCredentialsId | no | | |
| outputFormat | no | `junit` | `tap` |
| repository | no | `https://github.com/bats-core/bats-core.git` | |
| stashContent | no | `['tests']` | |
| testPackage | no | `piper-bats` | |
| testPath | no | `src/test`| |
| testRepository | no | | |
## ${docGenConfiguration}
Details:
## Example
* `outputFormat` defines the format of the test result output. `junit` would be the standard for automated build environments but you could use also the option `tap`.
* For the transformation of the test result to xUnit format the node module **tap-xunit** is used. `dockerImage` and `dockerWorkspace` define the Docker image used for the transformation and `testPackage` defines the name of the test package used in the xUnit result file.
* `testPath` defines either the directory which contains the test files (`*.bats`) or a single file. You can find further details in the [Bats-core documentation](https://github.com/bats-core/bats-core#usage)
* With `failOnError` you can define the behavior, in case tests fail. For example, in case of `outputFormat: 'junit'` you should set it to `false`. Otherwise test results cannot be recorded using the `testsPublishhResults` step afterwards.
* You can pass environment variables to the test execution by defining parameter `envVars`.
```groovy
batsExecuteTests script:this
testsPublishResults junit: [pattern: '**/Test-*.xml', archive: true]
```
With `envVars` it is possible to pass either fixed values but also templates using [`commonPipelineEnvironment`](commonPipelineEnvironment.md).
@ -41,37 +24,10 @@ Details:
```yaml
batsExecuteTests script: this, envVars = [
FIX_VALUE: 'my fixed value',
CONTAINER_NAME: '${commonPipelineEnvironment.configuration.steps.executeBatsTests.dockerContainerName}',
IMAGE_NAME: '${return commonPipelineEnvironment.getDockerImageNameAndTag()}'
CONTAINER_NAME: '\${commonPipelineEnvironment.configuration.steps.executeBatsTests.dockerContainerName}',
IMAGE_NAME: '\${return commonPipelineEnvironment.getDockerImageNameAndTag()}'
]
```
This means within the test one could refer to environment variables by calling e.g.
`run docker run --rm -i --name $CONTAINER_NAME --entrypoint /bin/bash $IMAGE_NAME echo "Test"`
* Using parameters `testRepository` the tests can be loaded from another reposirory. In case the tests are not located in the master branch the branch can be specified with `gitBranch`. For protected repositories you can also define the access credentials via `gitSshKeyCredentialsId`. **Note: In case of using a protected repository, `testRepository` should include the ssh link to the repository.**
* The parameter `repository` defines the version of **bats-core** to be used. By default we use the version from the master branch.
## Step configuration
The following parameters can also be specified as step/stage/general parameters using the [global configuration](../configuration.md):
* dockerImage
* dockerWorkspace
* envVars
* failOnError
* gitBranch
* gitSshKeyCredentialsId
* outputFormat
* repository
* stashContent
* testPackage
* testPath
* testRepository
## Example
```groovy
batsExecuteTests script:this
testsPublishResults junit: [pattern: '**/Test-*.xml', archive: true]
```
`run docker run --rm -i --name \$CONTAINER_NAME --entrypoint /bin/bash \$IMAGE_NAME echo "Test"`

View File

@ -1,8 +1,6 @@
# checksPublishResults
# ${docGenStepName}
## Description
This step can publish static check results from various sources.
## ${docGenDescription}
## Prerequisites
@ -15,31 +13,7 @@ This step can publish static check results from various sources.
* [warnings](https://plugins.jenkins.io/warnings)
* [core](https://plugins.jenkins.io/core)
## Parameters
| parameter | mandatory | default | possible values |
| ---------------|-----------|-----------------------------------|--------------------|
| script | yes | | |
| aggregation | no | `true` | see below |
| tasks | no | `false` | see below |
| pmd | no | `false` | see below |
| cpd | no | `false` | see below |
| findbugs | no | `false` | see below |
| checkstyle | no | `false` | see below |
| eslint | no | `false` | see below |
| pylint | no | `false` | see below |
| archive | no | `false` | `true`, `false` |
* `aggregation` - Publishes .
* `tasks` - Searches and publishes TODOs in files with the [Task Scanner Plugin](https://wiki.jenkins-ci.org/display/JENKINS/Task+Scanner+Plugin).
* `pmd` - Publishes PMD findings with the [PMD plugin](https://plugins.jenkins.io/pmd) .
* `cpd` - Publishes CPD findings with the [DRY plugin](https://plugins.jenkins.io/dry).
* `findbugs` - Publishes Findbugs findings with the [Findbugs plugin](https://plugins.jenkins.io/findbugs).
* `checkstyle` - Publishes Checkstyle findings with the [Checkstyle plugin](https://plugins.jenkins.io/checkstyle).
* `eslint` - Publishes ESLint findings (in [JSLint format](https://eslint.org/docs/user-guide/formatters/)) with the [Warnings plugin](https://plugins.jenkins.io/warnings).
* `pylint` - Publishes PyLint findings with the [Warnings plugin](https://plugins.jenkins.io/warnings), pylint needs to run with `--output-format=parseable` option.
Each of the parameters `aggregation`, `tasks`, `pmd`, `cpd`, `findbugs`, `checkstyle`, `eslint` and `pylint` can be set to `true` or `false` but also to a map of parameters to hand in different settings for the tools.
## ${docGenParameters}
### aggregation
@ -106,19 +80,7 @@ Each of the parameters `aggregation`, `tasks`, `pmd`, `cpd`, `findbugs`, `checks
| archive | no | `true` | `true`, `false` |
| thresholds | no | none | see [thresholds](#thresholds) |
## Step configuration
Following parameters can also be specified as step parameters using the global configuration file:
* `aggregation`
* `tasks`
* `pmd`
* `cpd`
* `findbugs`
* `checkstyle`
* `eslint`
* `pylint`
* `archive`
## ${docGenConfiguration}
### Thresholds

View File

@ -1,18 +1,6 @@
# cloudFoundryDeploy
# ${docGenStepName}
## Description
The application will be deployed to a test or production space within Cloud Foundry.
Deployment can be done
* in a standard way
* in a zero downtime manner (using a [blue-green deployment approach](https://martinfowler.com/bliki/BlueGreenDeployment.html))
!!! note "Deployment supports multiple deployment tools"
Currently the following are supported:
* Standard `cf push` and [Bluemix blue-green plugin](https://github.com/bluemixgaragelondon/cf-blue-green-deploy#how-to-use)
* [MTA CF CLI Plugin](https://github.com/cloudfoundry-incubator/multiapps-cli-plugin)
## ${docGenDescription}
## Prerequisites
@ -21,90 +9,9 @@ Deployment can be done
![Jenkins credentials configuration](../images/cf_credentials.png)
## Parameters
## ${docGenParameters}
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
| script | yes | | |
| cloudFoundry | yes | | |
| deployTool | no | cf_native | cf_native, mtaDeployPlugin |
| deployType | no | standard | standard, blue-green |
| keepOldInstance | no | false | true, false |
| dockerImage | no | s4sdk/docker-cf-cli | |
| dockerWorkspace | no | /home/piper | |
| mtaDeployParameters | | for _deployType:standard_ `-f`<br />for _deployType:blue-green_ `-f --no-confirm` | |
| mtaExtensionDescriptor | no | '' | |
| mtaPath | no | '' | |
| smokeTestScript | no | blueGreenCheckScript.sh (provided by library). <br />Can be overwritten using config property 'smokeTestScript' | |
| smokeTestStatusCode | no | 200 | |
| stashContent | no | [] | |
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving e.g. configuration parameters.
* `cloudFoundry` defines a map containing following properties:
* `apiEndpoint`: Cloud Foundry API endpoint (default: `https://api.cf.eu10.hana.ondemand.com`)
* `appName`: App name of application to be deployed (optional)
* `credentialsId`: Credentials to be used for deployment (mandatory)
* `manifest`: Manifest to be used for deployment
* `org`: Cloud Foundry target organization (mandatory)
* `space`: Cloud Foundry target space (mandatory)
Example: `cloudFoundry: [apiEndpoint: 'https://test.server.com', appName:'cfAppName', credentialsId: 'cfCredentialsId', manifest: 'cfManifest', org: 'cfOrg', space: 'cfSpace']`
!!! note
It is also possible to use following configuration parameters instead of `cloudFoundry` map:
- cfApiEndpoint
- cfAppName
- cfCredentialsId
- cfManifest
- cfOrg
- cfSpace
!!! note
Due to [an incompatible change](https://github.com/cloudfoundry/cli/issues/1445) in the Cloud Foundry CLI, multiple buildpacks are not supported by this step.
If your `application` contains a list of `buildpacks` instead a single `buildpack`, this will be automatically re-written by the step when blue-green deployment is used.
* `deployTool` defines the tool which should be used for deployment.
* `deployType` defines the type of deployment, either `standard` deployment which results in a system downtime or a zero-downtime `blue-green` deployment.
* `keepOldInstance` in case of a `blue-green` deployment the old instance will be deleted by default. If this option is set to true the old instance will remain stopped in the Cloud Foundry space.
* `dockerImage` defines the Docker image containing the deployment tools (like cf cli, ...) and `dockerWorkspace` defines the home directory of the default user of the `dockerImage`
* `smokeTestScript` allows to specify a script which performs a check during blue-green deployment. The script gets the FQDN as parameter and returns `exit code 0` in case check returned `smokeTestStatusCode`. More details can be found [here](https://github.com/bluemixgaragelondon/cf-blue-green-deploy#how-to-use) <br /> Currently this option is only considered for deployTool `cf_native`.
* `stashContent` defines the stash names which should be unstashed at the beginning of the step. This makes the files available in case the step is started on an empty node.
### Deployment with cf_native
* `appName` in `cloudFoundry` map (or `cfAppName`) defines the name of the application which will be deployed to the Cloud Foundry space.
* `manifest` in `cloudFoundry` maps (or `cfManifest`) defines the manifest to be used for Cloud Foundry deployment.
!!! note
Cloud Foundry supports the deployment of multiple applications using a single manifest file.
This option is supported with Piper.
In this case define `appName: ''` since the app name for the individual applications have to be defined via the manifest.
You can find details in the [Cloud Foundry Documentation](https://docs.cloudfoundry.org/devguide/deploy-apps/manifest.html#multi-apps)
### Deployment with mtaDeployPlugin
* `mtaPath` define path to *.mtar for deployment.
* `mtaExtensionDescriptor` defines additional extension descriptor file for deployment.
* `mtaDeployParameters` defines additional parameters passed to mta deployment.
## Step configuration
The following parameters can also be specified as step/stage/general parameters using the [global configuration](../configuration.md):
* cloudFoundry
* deployUser
* deployTool
* deployType
* dockerImage
* dockerWorkspace
* mtaDeployParameters
* mtaExtensionDescriptor
* mtaPath
* smokeTestScript
* smokeTestStatusCode
* stashContent
## ${docGenConfiguration}
## Example

View File

@ -10,176 +10,6 @@ none
## Method details
### getArtifactVersion()
#### Description
Returns the version of the artifact which is build in the pipeline.
#### Parameters
none
#### Return value
A `String` containing the version.
#### Side effects
none
#### Exceptions
none
#### Example
```groovy
def myVersion = commonPipelineEnvironment.getArtifactVersion()
```
### setArtifactVersion(version)
#### Description
Sets the version of the artifact which is build in the pipeline.
#### Parameters
none
#### Return value
none
#### Side effects
none
#### Exceptions
none
#### Example
```groovy
commonPipelineEnvironment.setArtifactVersion('1.2.3')
```
### getConfigProperties()
#### Description
Returns the map of project specific configuration properties. No defensive copy is created.
Write operations to the map are visible further down in the pipeline.
#### Parameters
none
#### Return value
A map containing project specific configuration properties.
#### Side effects
none
#### Exceptions
none
#### Example
```groovy
commonPipelineEnvironment.getConfigProperties()
```
### setConfigProperties(configuration)
#### Description
Sets the map of configuration properties. Any existing map is overwritten.
#### Parameters
* `configuration` - A map containing the new configuration
#### Return value
none
#### Side effects
none
#### Exceptions
none
#### Example
```groovy
commonPipelineEnvironment.setConfigProperties([DEPLOY_HOST: 'deploy-host.com', DEPLOY_ACCOUNT: 'deploy-account'])
```
### getConfigProperty(property)
#### Description
Gets a specific value from the configuration property.
#### Parameters
* `property` - The key of the property.
#### Return value
* The value associated with key `property`. `null` is returned in case the property does not exist.
#### Side effects
none
#### Exceptions
none
#### Example
```groovy
commonPipelineEnvironment.getConfigProperty('DEPLOY_HOST')
```
### setConfigProperty(property, value)
#### Description
Sets property `property` with value `value`. Any existing property with key `property` is overwritten.
#### Parameters
* `property` - The key of the property.
* `value` - The value of the property.
#### Return value
none
#### Side effects
none
#### Exceptions
none
#### Example
```groovy
commonPipelineEnvironment.setConfigProperty('DEPLOY_HOST', 'my-deploy-host.com')
```
### getInfluxCustomData()
#### Description
@ -239,58 +69,6 @@ none
def myInfluxDataMap = commonPipelineEnvironment.getInfluxCustomDataMap()
```
### getMtarFileName()
#### Description
Returns the path of the mtar archive file.
#### Parameters
none
#### Return value
The path of the mtar archive file.
#### Side effects
none
#### Exceptions
none
#### Example
```groovy
commonPipelineEnvironment.getMtarFileName()
```
### setMtarFileName(name)
#### Description
Sets the path of the mtar archive file. Any old value is discarded.
#### Parameters
* `mtarFilePath` - The path of the mtar archive file name.
#### Side effects
none
#### Exceptions
none
#### Example
```groovy
commonPipelineEnvironment.setMtarFileName('path/to/foo.mtar')
```
### getPipelineMeasurement(measurementName)
#### Description

View File

@ -1,20 +1,15 @@
# containerExecuteStructureTests
# ${docGenStepName}
## Description
In this step [Container Structure Tests](https://github.com/GoogleContainerTools/container-structure-test) are executed.
This testing framework allows you to execute different test types against a Docker container, for example:
* Command tests (only if a Docker Deamon is available)
* File existence tests
* File content tests
* Metadata test
## ${docGenDescription}
## Prerequisites
Test configuration is available.
## ${docGenParameters}
## ${docGenConfiguration}
## Example
```
@ -24,59 +19,3 @@ containerExecuteStructureTests(
testImage: 'node:latest'
)
```
## Parameters
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
|script|yes|||
|containerCommand|no|``||
|containerShell|no|``||
|dockerImage|yes|`ppiper/container-structure-test`||
|dockerOptions|no|`-u 0 --entrypoint=''`||
|failOnError|no|`true`|`true`, `false`|
|pullImage|no||`true`, `false`|
|stashContent|no|<ul><li>`tests`</li></ul>||
|testConfiguration|no|||
|testDriver|no|||
|testImage|no|||
|testReportFilePath|no|`cst-report.json`||
|verbose|no||`true`, `false`|
Details:
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
* `containerCommand`: Only for Kubernetes environments: Command which is executed to keep container alive, defaults to '/usr/bin/tail -f /dev/null'
* containerShell: Only for Kubernetes environments: Shell to be used inside container, defaults to '/bin/sh'
* dockerImage: Docker image for code execution.
* dockerOptions: Options to be passed to Docker image when starting it (only relevant for non-Kubernetes case).
* failOnError: Defines the behavior, in case tests fail.
* pullImage: Only relevant for testDriver 'docker'.
* stashContent: If specific stashes should be considered for the tests, you can pass this via this parameter.
* testConfiguration: Container structure test configuration in yml or json format. You can pass a pattern in order to execute multiple tests.
* testDriver: Container structure test driver to be used for testing, please see [https://github.com/GoogleContainerTools/container-structure-test](https://github.com/GoogleContainerTools/container-structure-test) for details.
* testImage: Image to be tested
* testReportFilePath: Path and name of the test report which will be generated
* verbose: Print more detailed information into the log.
## Step configuration
We recommend to define values of step parameters via [config.yml file](../configuration.md).
In following sections the configuration is possible:
| parameter | general | step | stage |
| ----------|-----------|---------|-----------------|
|script||||
|containerCommand||X|X|
|containerShell||X|X|
|dockerImage||X|X|
|dockerOptions||X|X|
|failOnError||X|X|
|pullImage||X|X|
|stashContent||X|X|
|testConfiguration||X|X|
|testDriver||X|X|
|testImage||X|X|
|testReportFilePath||X|X|
|verbose|X|X|X|

View File

@ -0,0 +1,20 @@
# ${docGenStepName}
## ${docGenDescription}
## Prerequsites
You need to store the API token for the Detect service as _'Secret text'_ credential in your Jenkins system.
!!! note "minimum plugin requirement"
This step requires [synopsys-detect-plugin](https://github.com/jenkinsci/synopsys-detect-plugin) with at least version `2.0.0`.
## Example
```groovy
detectExecuteScan script: this, scanProperties: ['--logging.level.com.synopsys.integration=TRACE']
```
## ${docGenParameters}
## ${docGenConfiguration}

View File

@ -1,37 +1,10 @@
# durationMeasure
# ${docGenStepName}
## Description
## ${docGenDescription}
This step is used to measure the duration of a set of steps, e.g. a certain stage.
The duration is stored in a Map. The measurement data can then be written to an Influx database using step [influxWriteData](influxWriteData.md).
## ${docGenParameters}
!!! tip
Measuring for example the duration of pipeline stages helps to identify potential bottlenecks within the deployment pipeline.
This then helps to counter identified issues with respective optimization measures, e.g parallelization of tests.
## Prerequisites
none
## Pipeline configuration
none
## Parameters
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
| script | yes | | |
| measurementName | no | test_duration | |
Details:
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
* `measurementName` defines the name of the measurement which is written to the Influx database.
## Step configuration
none
## ${docGenConfiguration}
## Example

View File

@ -1,39 +1,17 @@
# gaugeExecuteTests
# ${docGenStepName}
## Description
In this step Gauge ([getgauge.io](http:getgauge.io)) acceptance tests are executed.
Using Gauge it will be possible to have a three-tier test layout:
* Acceptance Criteria
* Test implemenation layer
* Application driver layer
This layout is propagated by Jez Humble and Dave Farley in their book "Continuous Delivery" as a way to create maintainable acceptance test suites (see "Continuous Delivery", p. 190ff).
Using Gauge it is possible to write test specifications in [Markdown syntax](http://daringfireball.net/projects/markdown/syntax) and therefore allow e.g. product owners to write the relevant acceptance test specifications. At the same time it allows the developer to implement the steps described in the specification in her development environment.
You can use the [sample projects](https://github.com/getgauge/gauge-mvn-archetypes) of Gauge.
!!! note "Make sure to run against a Selenium Hub configuration"
In the test example of _gauge-archetype-selenium_ please make sure to allow it to run against a Selenium hub:
Please extend DriverFactory.java for example in following way:
``` java
String hubUrl = System.getenv("HUB_URL");
//when running on a Docker deamon (and not using Kubernetes plugin), Docker images will be linked
//in this case hubUrl will be http://selenium:4444/wd/hub due to the linking of the containers
hubUrl = (hubUrl == null) ? "http://localhost:4444/wd/hub" : hubUrl;
Capabilities chromeCapabilities = DesiredCapabilities.chrome();
System.out.println("Running on Selenium Hub: " + hubUrl);
return new RemoteWebDriver(new URL(hubUrl), chromeCapabilities);
```
## ${docGenDescription}
## Prerequsites
none
## ${docGenParameters}
## ${docGenConfiguration}
We recommend to define values of step parameters via [config.yml file](../configuration.md).
## Example
Pipeline step:
@ -41,63 +19,3 @@ Pipeline step:
```groovy
gaugeExecuteTests script: this, testServerUrl: 'http://test.url'
```
## Parameters
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
|script|yes|||
|buildTool|no|`maven`||
|dockerEnvVars|no|`[HUB:TRUE, HUB_URL:http://localhost:4444/wd/hub]`||
|dockerImage|no|buildTool=`maven`: `maven:3.5-jdk-8`<br />buildTool=`npm`: `node:8-stretch`<br />||
|dockerName|no|buildTool=`maven`: `maven`<br />buildTool=`npm`: `npm`<br />||
|dockerWorkspace|no|buildTool=`maven`: <br />buildTool=`npm`: `/home/node`<br />||
|failOnError|no|`false`||
|gitBranch|no|||
|gitSshKeyCredentialsId|no|``||
|installCommand|no|`curl -SsL https://downloads.gauge.org/stable | sh -s -- --location=$HOME/bin/gauge`||
|languageRunner|no|buildTool=`maven`: `java`<br />buildTool=`npm`: `js`<br />||
|runCommand|no|buildTool=`maven`: `mvn test-compile gauge:execute`<br />buildTool=`npm`: `gauge run`<br />||
|stashContent|no|<ul><li>`buildDescriptor`</li><li>`tests`</li></ul>||
|testOptions|no|buildTool=`maven`: `-DspecsDir=specs`<br />buildTool=`npm`: `specs`<br />||
|testRepository|no|||
|testServerUrl|no|||
Details:
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
* `buildTool` defines the build tool to be used for the test execution.
* `dockerEnvVars`, see step [dockerExecute](dockerExecute.md)
* `dockerImage`, see step [dockerExecute](dockerExecute.md)
* `dockerName`, see step [dockerExecute](dockerExecute.md)
* `dockerWorkspace`, see step [dockerExecute](dockerExecute.md)
* With `failOnError` you can define the behavior, in case tests fail. When this is set to `true` test results cannot be recorded using the `publishTestResults` step afterwards.
* `installCommand` defines the command for installing Gauge. In case the `dockerImage` already contains Gauge it can be set to empty: ``.
* `languageRunner` defines the Gauge language runner to be used.
* `runCommand` defines the command which is used for executing Gauge.
* If specific stashes should be considered for the tests, you can pass this via parameter `stashContent`
* `testOptions` allows to set specific options for the Gauge execution. Details can be found for example [in the Gauge Maven plugin documentation](https://github.com/getgauge/gauge-maven-plugin#executing-specs)
* In case the test implementation is stored in a different repository than the code itself, you can define the repository containing the tests using parameter `testRepository` and if required `gitBranch` (for a different branch than master) and `gitSshKeyCredentialsId` (for protected repositories). For protected repositories the `testRepository` needs to contain the ssh git url.
* `testServerUrl` is passed as environment variable `TARGET_SERVER_URL` to the test execution. Tests running against the system should read the host information from this environment variable in order to be infrastructure agnostic.
## Step configuration
We recommend to define values of step parameters via [config.yml file](../configuration.md).
In following sections the configuration is possible:
| parameter | general | step | stage |
| ----------|-----------|---------|-----------------|
|script||||
|buildTool||X|X|
|dockerEnvVars||X|X|
|dockerImage||X|X|
|dockerName||X|X|
|dockerWorkspace||X|X|
|failOnError||X|X|
|gitBranch||X|X|
|gitSshKeyCredentialsId||X|X|
|stashContent||X|X|
|testOptions||X|X|
|testRepository||X|X|
|testServerUrl||X|X|

View File

@ -1,18 +1,6 @@
# githubPublishRelease
# ${docGenStepName}
## Description
This step creates a tag in your GitHub repository together with a release.
The release can be filled with text plus additional information like:
* Closed pull request since last release
* Closed issues since last release
* link to delta information showing all commits since last release
The result looks like
![Example release](../images/githubRelease.png)
## ${docGenDescription}
## Prerequisites
@ -20,6 +8,10 @@ You need to create a personal access token within GitHub and add this to the Jen
Please see [GitHub documentation for details about creating the personal access token](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/).
## ${docGenParameters}
## ${docGenConfiguration}
## Example
Usage of pipeline step:
@ -27,52 +19,3 @@ Usage of pipeline step:
```groovy
githubPublishRelease script: this, releaseBodyHeader: "**This is the latest success!**<br />"
```
## Parameters
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
|script|yes|||
|addClosedIssues|no|`false`||
|addDeltaToLastRelease|no|`false`||
|customFilterExtension|no|``||
|excludeLabels|no|<ul><li>`duplicate`</li><li>`invalid`</li><li>`question`</li><li>`wontfix`</li></ul>||
|githubApiUrl|no|`//https://api.github.com`||
|githubOrg|yes|`script.commonPipelineEnvironment.getGitFolder()`||
|githubRepo|yes|`script.commonPipelineEnvironment.getGitRepo()`||
|githubServerUrl|no|`https://github.com`||
|githubTokenCredentialsId|yes|||
|releaseBodyHeader|no|||
|version|yes|`script.commonPipelineEnvironment.getArtifactVersion()`||
### Details
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
* All GitHub related properties allow you to overwrite the default behavior of identifying e.g. GitHub organization, GitHub repository.
* `version` defines the version number which will be written as tag as well as release name
* By defining the `releaseBodyHeader` you can specify the content which will appear for the release
* If you set `addClosedIssues` to `true`, a list of all closed issues and merged pull-requests since the last release will added below the `releaseBodyHeader`
* If you set `addDeltaToLastRelease` to `true`, a link will be added to the relese information that brings up all commits since the last release.
* By passing the parameter `customFilterExtension` it is possible to pass additional filter criteria for retrieving closed issues since the last release. Additional criteria could be for example specific `label`, or `filter` according to [GitHub API documentation](https://developer.github.com/v3/issues/).
* It is possible to exclude issues with dedicated labels using parameter `excludeLabels`. Usage is like `excludeLabels: ['label1', 'label2']`
## Step configuration
We recommend to define values of step parameters via [config.yml file](../configuration.md).
In following sections the configuration is possible:
| parameter | general | step | stage |
| ----------|-----------|---------|-----------------|
|script||||
|addClosedIssues||X|X|
|addDeltaToLastRelease||X|X|
|customFilterExtension||X|X|
|excludeLabels||X|X|
|githubApiUrl|X|X|X|
|githubOrg||X|X|
|githubRepo||X|X|
|githubServerUrl|X|X|X|
|githubTokenCredentialsId|X|X|X|
|releaseBodyHeader||X|X|
|version||X|X|

View File

@ -1,58 +1,14 @@
# handlePipelineStepErrors
# ${docGenStepName}
## Description
Used by other steps to make error analysis easier. Lists parameters and other data available to the step in which the error occurs.
## ${docGenDescription}
## Prerequisites
none
## Parameters
## ${docGenParameters}
| parameter | mandatory | default | possible values |
| -----------------|-----------|---------|-----------------|
| `stepParameters` | yes | | |
| `stepName` | yes | | |
| `echoDetails` | yes | true | true, false |
* `stepParameters` - The parameters from the step to be executed. The list of parameters is then shown in the console output.
* `stepName` - The name of the step executed to be shown in the console output.
* `echoDetails` - If set to true the following will be output to the console:
1. Step beginning: `--- Begin library step: ${stepName}.groovy ---`
2. Step end: `--- End library step: ${stepName}.groovy ---`
3. Step errors:
```log
----------------------------------------------------------
--- An error occurred in the library step: ${stepName}
----------------------------------------------------------
The following parameters were available to the step:
***
${stepParameters}
***
The error was:
***
${err}
***
Further information:
* Documentation of step ${stepName}: .../${stepName}/
* Pipeline documentation: https://...
* GitHub repository for pipeline steps: https://...
----------------------------------------------------------
```
## Step configuration
none
## Side effects
none
## Exceptions
none
## ${docGenConfiguration}
## Example
@ -61,6 +17,33 @@ handlePipelineStepErrors (stepName: 'executeHealthCheck', stepParameters: parame
def url = new Utils().getMandatoryParameter(parameters, 'url', null)
def statusCode = curl(url)
if (statusCode != '200')
error "Health Check failed: ${statusCode}"
error "Health Check failed: \${statusCode}"
}
```
## Example console output
If `echoDetails` is set to true the following information will be output to the console:
1. Step beginning: `--- Begin library step: \${stepName}.groovy ---`
1. Step end: `--- End library step: \${stepName}.groovy ---`
1. Step errors:
```log
----------------------------------------------------------
--- An error occurred in the library step: \${stepName}
----------------------------------------------------------
The following parameters were available to the step:
***
\${stepParameters}
***
The error was:
***
\${err}
***
Further information:
* Documentation of step \${stepName}: .../\${stepName}/
* Pipeline documentation: https://...
* GitHub repository for pipeline steps: https://...
----------------------------------------------------------
```

View File

@ -1,19 +1,6 @@
# healthExecuteCheck
# ${docGenStepName}
## Description
Calls the health endpoint url of the application.
The intention of the check is to verify that a suitable health endpoint is available. Such a health endpoint is required for operation purposes.
This check is used as a real-life test for your productive health endpoints.
!!! note "Check Depth"
Typically, tools performing simple health checks are not too smart. Therefore it is important to choose an endpoint for checking wisely.
This check therefore only checks if the application/service url returns `HTTP 200`.
This is in line with health check capabilities of platforms which are used for example in load balancing scenarios. Here you can find an [example for Amazon AWS](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-healthchecks.html).
## ${docGenDescription}
## Prerequisites
@ -25,6 +12,10 @@ Endpoint for health check is configured.
!!! tip
If using Spring Boot framework, ideally the provided `/health` endpoint is used and extended by development. Further information can be found in the [Spring Boot documenation for Endpoints](http://docs.spring.io/spring-boot/docs/current/reference/html/production-ready-endpoints.html)
## ${docGenParameters}
## ${docGenConfiguration}
## Example
Pipeline step:
@ -32,29 +23,3 @@ Pipeline step:
```groovy
healthExecuteCheck testServerUrl: 'https://testserver.com'
```
## Parameters
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
|script|yes|||
|healthEndpoint|no|``||
|testServerUrl|no|||
Details:
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
* Health check function is called providing full qualified `testServerUrl` (and optionally with `healthEndpoint` if endpoint is not the standard url) to the health check.
* In case response of the call is different than `HTTP 200 OK` the **health check fails and the pipeline stops**.
## Step configuration
We recommend to define values of step parameters via [config.yml file](../configuration.md).
In following sections the configuration is possible:
| parameter | general | step | stage |
| ----------|-----------|---------|-----------------|
|script||||
|healthEndpoint|X|X|X|
|testServerUrl|X|X|X|

View File

@ -1,22 +1,6 @@
# influxWriteData
# ${docGenStepName}
## Description
Since your Continuous Delivery Pipeline in Jenkins provides your productive development and delivery infrastructure you should monitor the pipeline to ensure it runs as expected. How to setup this monitoring is described in the following.
You basically need three components:
- The [InfluxDB Jenkins plugin](https://wiki.jenkins-ci.org/display/JENKINS/InfluxDB+Plugin) which allows you to send build metrics to InfluxDB servers
- The [InfluxDB](https://www.influxdata.com/time-series-platform/influxdb/) to store this data (Docker available)
- A [Grafana](http://grafana.org/) dashboard to visualize the data stored in InfluxDB (Docker available)
!!! note "no InfluxDB available?"
If you don't have an InfluxDB available yet this step will still provide you some benefit.
It will create following files for you and archive them into your build:
* `jenkins_data.json`: This file gives you build-specific information, like e.g. build result, stage where the build failed
* `influx_data.json`: This file gives you detailed information about your pipeline, e.g. stage durations, steps executed, ...
## ${docGenDescription}
## Prerequisites
@ -31,8 +15,8 @@ Very basic setup can be done like that (with user "admin" and password "adminPwd
For more advanced setup please reach out to the respective documentation:
- https://hub.docker.com/_/influxdb/ (and https://github.com/docker-library/docs/tree/master/influxdb)
- https://hub.docker.com/r/grafana/grafana/ (and https://github.com/grafana/grafana-docker)
- InfluxDB ([Docker Hub](https://hub.docker.com/_/influxdb/) [GitHub](https://github.com/docker-library/docs/tree/master/influxdb))
- Grafana ([Docker Hub](https://hub.docker.com/r/grafana/grafana/) [GitHub](https://github.com/grafana/grafana-docker))
After you have started your InfluxDB docker you need to create a database:
@ -79,37 +63,9 @@ You need to define the influxDB server in your pipeline as it is defined in the
influxDBServer=jenkins
```
## Parameters
## ${docGenParameters}
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
|script|yes|||
|artifactVersion|no|`commonPipelineEnvironment.getArtifactVersion()`||
|customData|no|`commonPipelineEnvironment.getInfluxCustomData()`||
|customDataMap|no|`commonPipelineEnvironment.getInfluxCustomDataMap()`||
|customDataMapTags|no|`commonPipelineEnvironment.getInfluxCustomDataTags()`||
|customDataTags|no|`commonPipelineEnvironment.getInfluxCustomDataTags()`||
|influxPrefix|no|||
|influxServer|no|`''`||
|wrapInNode|no|`false`||
## Step configuration
We recommend to define values of step parameters via [config.yml file](../configuration.md).
In following sections the configuration is possible:
| parameter | general | step | stage |
| ----------|-----------|---------|-----------------|
|script||||
|artifactVersion||X|X|
|customData||X|X|
|customDataMap||X|X|
|customDataMapTags||X|X|
|customDataTags||X|X|
|influxPrefix||X|X|
|influxServer||X|X|
|wrapInNode||X|X|
## ${docGenConfiguration}
## Example
@ -144,7 +100,7 @@ As a first step you need to add your InfluxDB as Data source to your Grafana:
The Influx plugin collects following data in the Piper context:
- All data as per default [InfluxDB plugin capabilities](https://wiki.jenkins.io/display/JENKINS/InfluxDB+Plugin)
- Additional data collected via `commonPipelineEnvironment.setInfluxCustomDataProperty()` and via `commonPipelineEnvironment.setPipelineMeasurement()`
- Additional data collected via `InfluxData.addField(measurement, key, value)`
!!! note "Add custom information to your InfluxDB"
You can simply add custom data collected during your pipeline runs via available data objects.
@ -169,7 +125,7 @@ Measurements are potentially pre-fixed - see parameter `influxPrefix` above.
| sonarqube_data | <ul><li>blocker_issues</li><li>critical_issues</li><li>info_issues</li><li>major_issues</li><li>minor_issues</li><li>lines_of_code</li><li>...</li></ul> | Details see [InfluxDB plugin documentation](https://wiki.jenkins.io/display/JENKINS/InfluxDB+Plugin) |
| jenkins_custom_data | Piper fills following colums by default: <br /><ul><li>build_result</li><li>build_result_key</li><li>build_step (->step in case of error)</li><li>build_error (->error message in case of error)</li></ul> | filled by `commonPipelineEnvironment.setInfluxCustomDataProperty()` |
| pipeline_data | Examples from the Piper templates:<br /><ul><li>build_duration</li><li>opa_duration</li><li>deploy_test_duration</li><li>deploy_test_duration</li><li>fortify_duration</li><li>release_duration</li><li>...</li></ul>| filled by step [`measureDuration`](durationMeasure.md) using parameter `measurementName`|
| step_data | Considered, e.g.:<br /><ul><li>build_url</li><li>bats</li><li>checkmarx</li><li>fortify</li><li>gauge</li><li>nsp</li><li>snyk</li><li>sonar</li><li>...</li></ul>| filled by `commonPipelineEnvironment.setInfluxStepData()` |
| step_data | Considered, e.g.:<br /><ul><li>build_url</li><li>bats</li><li>checkmarx</li><li>fortify</li><li>gauge</li><li>nsp</li><li>snyk</li><li>sonar</li><li>...</li></ul>| filled by `InfluxData.addField('step_data', key, value)` |
### Examples for InfluxDB queries which can be used in Grafana

View File

@ -0,0 +1,27 @@
# ${docGenStepName}
## ${docGenDescription}
## Prerequsites
When pushing to a container registry, you need to maintain the respective credentials in your Jenkins credentials store:
Kaniko expects a Docker `config.json` file containing the credential information for registries.
You can create it like explained in the Docker Success Center in the articale about [How to generate a new auth in the config.json file](https://success.docker.com/article/generate-new-auth-in-config-json-file).
Please copy this file and upload it to your Jenkins for example<br />
via _Jenkins_ -> _Credentials_ -> _System_ -> _Global credentials (unrestricted)_ -> _ Add Credentials_ ->
* Kind: _Secret file_
* File: upload your `config.json` file
* ID: specify id which you then use for the configuration of `dockerConfigJsonCredentialsId` (see below)
## Example
```groovy
kanikoExecute script:this
```
## ${docGenParameters}
## ${docGenConfiguration}

View File

@ -1,47 +1,10 @@
# mavenExecute
# ${docGenStepName}
## Description
## ${docGenDescription}
Executes a maven command inside a Docker container.
## ${docGenParameters}
## Parameters
| parameter | mandatory | default | example values |
| -------------------------------|-----------|-------------------|----------------------------|
| `script` | yes | | |
| `dockerImage` | no | 'maven:3.5-jdk-7' | |
| `globalSettingsFile` | no | | 'local_folder/settings.xml'|
| `projectSettingsFile` | no | | |
| `pomPath` | no | | 'local_folder/m2' |
| `flags` | no | | '-o' |
| `goals` | no | | 'clean install' |
| `m2Path` | no | | 'local_folder/m2' |
| `defines` | no | | '-Dmaven.tests.skip=true' |
| `logSuccessfulMavenTransfers` | no | `false` | 'true' |
* `script` defines the global script environment of the Jenkinsfile run.
Typically `this` is passed to this parameter. This allows the function
to access the commonPipelineEnvironment for retrieving, for example,
configuration parameters.
* `dockerImage` Name of the docker image that should be used.
* `globalSettingsFile` Path or url to the mvn settings file that should be used as global settings file.
* `projectSettingsFile` Path or url to the mvn settings file that should be used as project settings file.
* `pomPath` Path to the pom file that should be used.
* `flags` Flags to provide when running mvn.
* `goals` Maven goals that should be executed.
* `m2Path` Path to the location of the local repository that should be used.
* `defines` Additional properties.
* `logSuccessfulMavenTransfers` configures maven to log successful downloads. This is set to `false` by default to reduce the noise in build logs.
## Step configuration
The following parameters can also be specified as step parameters using the global configuration file:
* `dockerImage`
* `globalSettingsFile`
* `projectSettingsFile`
* `pomPath`
* `m2Path`
## ${docGenConfiguration}
## Exceptions

View File

@ -1,44 +1,18 @@
# mtaBuild
# ${docGenStepName}
## Description
## ${docGenDescription}
Executes the SAP Multitarget Application Archive Builder to create an mtar archive of the application.
## Prerequisites
Before doing this, validates that SAP Multitarget Application Archive Builder exists and the version is compatible.
While using a custom docker file, ensure that the following tools are installed:
Note that a version is formed by `major.minor.patch`, and a version is compatible to another version if the minor and patch versions are higher, but the major version is not, e.g. if 3.39.10 is the expected version, 3.39.11 and 3.40.1 would be compatible versions, but 4.0.1 would not be a compatible version.
* **SAP MTA Archive Builder 1.0.6 or compatible version** - can be downloaded from [SAP Development Tools](https://tools.hana.ondemand.com/#cloud).
* **Java 8 or compatible version** - necessary to run the *MTA Archive Builder* itself and to build Java modules.
* **NodeJS installed** - the MTA Builder uses `npm` to download node module dependencies such as `grunt`.
## Parameters
## ${docGenParameters}
| parameter | mandatory | default | possible values |
| -----------------|-----------|--------------------------------------------------------|--------------------|
| `script` | yes | | |
| `dockerImage` | no | `ppiper/mta-archive-builder` | |
| `dockerOptions` | no | '' | |
| `buildTarget` | yes | `'NEO'` | 'CF', 'NEO', 'XSA' |
| `extension` | no | | |
| `mtaJarLocation` | no | `'/opt/sap/mta/lib/mta.jar'` | |
| `applicationName`| no | | |
* `script` - The common script environment of the Jenkinsfile running. Typically the reference to the script calling the pipeline step is provided with the `this` parameter, as in `script: this`. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving, for example, configuration parameters.
* `dockerImage` - The Docker image to execute the MTA build.
Note that you can provide your own image if required, but for most cases, the default should be fine.
* `dockerOptions` Docker options to be set when starting the container. It can be a list or a string.
* `buildTarget` - The target platform to which the mtar can be deployed.
* `extension` - The path to the extension descriptor file.
* `mtaJarLocation` - The location of the SAP Multitarget Application Archive Builder jar file, including file name and extension. First, the location is retrieved from the environment variables using the environment variable `MTA_JAR_LOCATION`. If no environment variable is provided, the location is retrieved from the parameters, or the step configuration using the key `mtaJarLocation`. If SAP Multitarget Application Archive Builder is not found on one of the previous locations an AbortException is thrown.
Note that the environment variable `MTA_JAR_LOCATION` has priority. In case that the script runs on multiple nodes, SAP Multitarget Application Archive Builder must be located on all the nodes, therefore the environment variable must be also configured on all the nodes.
* `applicationName` - The name of the application which is being built. If the parameter has been provided and no `mta.yaml` exists, the `mta.yaml` will be automatically generated using this parameter and the information (`name` and `version`) from `package.json` before the actual build starts.
## Step configuration
The following parameters can also be specified as step parameters using the global configuration file:
* `dockerImage`
* `buildTarget`
* `extension`
* `mtaJarLocation`
* `applicationName`
## ${docGenConfiguration}
## Side effects
@ -47,7 +21,6 @@ The following parameters can also be specified as step parameters using the glob
## Exceptions
* `AbortException`:
* If SAP Multitarget Application Archive Builder is not found.
* If there is an invalid `buildTarget`.
* If there is no key `ID` inside the `mta.yaml` file.

View File

@ -0,0 +1,18 @@
# ${docGenStepName}
## ${docGenDescription}
## ${docGenParameters}
## ${docGenConfiguration}
## Examples
```groovy
multicloudDeploy(
script: script,
cfTargets: [[apiEndpoint: 'https://test.server.com', appName:'cfAppName', credentialsId: 'cfCredentialsId', manifest: 'cfManifest', org: 'cfOrg', space: 'cfSpace']],
neoTargets: [[credentialsId: 'my-credentials-id', host: hana.example.org, account: 'trialuser1']],
enableZeroDowntimeDeployment: 'true'
)
```

View File

@ -1,12 +1,6 @@
# neoDeploy
# ${docGenStepName}
## Description
Deploys an Application to SAP Cloud Platform (SAP CP) using the SAP Cloud Platform Console Client (Neo Java Web SDK).
Before doing this, validates that SAP Cloud Platform Console Client is installed and the version is compatible.
Note that a version is formed by `major.minor.patch`, and a version is compatible to another version if the minor and patch versions are higher, but the major version is not, e.g. if 3.39.10 is the expected version, 3.39.11 and 3.40.1 would be compatible versions, but 4.0.1 would not be a compatible version.
## ${docGenDescription}
## Prerequisites
@ -20,97 +14,9 @@ Note that a version is formed by `major.minor.patch`, and a version is compatibl
* **Java 8 or compatible version** - needed by the *Neo-Java-Web-SDK*. Java environment needs to be properly configured (JAVA_HOME, java exectutable contained in path).
## Parameters when using MTA deployment method (default - MTA)
## ${docGenParameters}
| parameter | mandatory | default | possible values |
| -------------------|-----------|-------------------------------|-------------------------------------------------|
| `script` | yes | | |
| `neo` | no | | |
| `deployMode` | yes | `'mta'` | `'mta'`, `'warParams'`, `'warPropertiesFile'` |
| `neoHome` | no | | |
| `source` | no | | |
The parameter `neo` is a map which contains the following parameters:
| parameter | mandatory | default | possible values |
| -------------------|-----------|-------------------------------|-------------------------------------------------|
| `account` | no | | |
| `credentialsId` | no | `'CI_CREDENTIALS_ID'` | |
| `host` | no | | |
## Parameters when using WAR file deployment method with .properties file (WAR_PROPERTIESFILE)
| parameter | mandatory | default | possible values |
| -------------------|-----------|-------------------------------|-------------------------------------------------|
| `script` | yes | | |
| `neo` | no | | |
| `deployMode` | yes | `'mta'` | `'mta'`, `'warParams'`, `'warPropertiesFile'` |
| `neoHome` | no | | |
| `source` | no | | |
| `warAction` | yes | `'deploy'` | `'deploy'`, `'rolling-update'` |
The parameter `neo` is a map which contains the following parameters:
| parameter | mandatory | default | possible values |
| -------------------|-----------|-------------------------------|-------------------------------------------------|
| `credentialsId` | no | `'CI_CREDENTIALS_ID'` | |
| `propertiesFile` | yes | | |
## Parameters when using WAR file deployment method without .properties file - with parameters (WAR_PARAMS)
| parameter | mandatory | default | possible values |
| -------------------|-----------|-------------------------------|-------------------------------------------------|
| `script` | yes | | |
| `neo` | no | | |
| `deployMode` | yes | `'mta'` | `'mta'`, `'warParams'`, `'warPropertiesFile'` |
| `neoHome` | no | | |
| `source` | no | | |
| `warAction` | yes | `'deploy'` | `'deploy'`, `'rolling-update'` |
The parameter `neo` is a map which contains the following parameters:
| parameter | mandatory | default | possible values |
| -------------------|-----------|-------------------------------|-------------------------------------------------|
| `account` | yes | | |
| `application` | yes | | |
| `credentialsId` | no | `'CI_CREDENTIALS_ID'` | |
| `environment` | | | |
| `host` | yes | | |
| `runtime` | yes | | |
| `runtimeVersion` | yes | | |
| `size` | no | `'lite'` | `'lite'`, `'pro'`, `'prem'`, `'prem-plus'` |
| `vmArguments` | | | |
* `script` - The common script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving e.g. configuration parameters.
* `deployMode` - The deployment mode which should be used. Available options are `'mta'` (default), `'warParams'` (deploying WAR file and passing all the deployment parameters via the function call) and `'warPropertiesFile'` (deploying WAR file and putting all the deployment parameters in a .properties file)
* `neoHome` - The path to the `neo-java-web-sdk` tool used for SAP CP deployment. If no parameter is provided, the path is retrieved from the environment variables using the environment variable `NEO_HOME`. If no parameter and no environment variable is provided, the path is retrieved from the step configuration using the step configuration key `neoHome`. If the previous configurations are not provided, the tool is expected on the `PATH`, and if it is not available on the `PATH` an AbortException is thrown.
* `source`- The path to the archive for deployment to SAP CP. If not provided `mtarFilePath` from commom pipeline environment is used instead.
* `warAction` - Action mode when using WAR file mode. Available options are `deploy` (default) and `rolling-update` which performs update of an application without downtime in one go.
The parameters for `neo`:
* `account` - The SAP Cloud Platform account to deploy to.
* `application` - Name of the application you want to manage, configure, or deploy
* `credentialsId` - The Jenkins credentials containing user and password used for SAP CP deployment.
* `environment` - Map of environment variables in the form of KEY: VALUE
* `host` - The SAP Cloud Platform host to deploy to.
* `propertiesFile` - The path to the .properties file in which all necessary deployment properties for the application are defined.
* `runtime` - Name of SAP Cloud Platform application runtime
* `runtimeVersion` - Version of SAP Cloud Platform application runtime
* `size` - Compute unit (VM) size. Acceptable values: lite, pro, prem, prem-plus.
* `vmArguments` - String of VM arguments passed to the JVM
The step is prepared for being executed in docker. The corresponding parameters can be applied. See step `dockerExecute` for details.
## Step configuration
The parameter `neo` including all options can also be specified as a global parameter using the global configuration file.
The following parameters can also be specified as step parameters using the global configuration file:
* `dockerImage`
* `neoHome`
* `source`
## ${docGenConfiguration}
## Side effects

View File

@ -1,7 +1,6 @@
# ${docGenStepName}
## ${docGenDescription}
## ${docGenParameters}

View File

@ -1,34 +1,14 @@
# pipelineExecute
# ${docGenStepName}
## Description
Loads a pipeline from a git repository. The idea is to set up a pipeline job in Jenkins that loads a minimal pipeline, which in turn loads the shared library and then uses this step to load the actual pipeline.
A centrally maintained pipeline script (Jenkinsfile) can be re-used by
several projects using `pipelineExecute` as outlined in the example
below.
## ${docGenDescription}
## Prerequisites
none
## Parameters
## ${docGenParameters}
| parameter | mandatory | default | possible values |
| -------------------|-----------|-----------------|-----------------|
| `repoUrl` | yes | | |
| `branch` | no | 'master' | |
| `path` | no | 'Jenkinsfile' | |
| `credentialsId` | no | An empty String | |
* `repoUrl` The url to the git repository of the pipeline to be loaded.
* `branch` The branch of the git repository from which the pipeline should be checked out.
* `path` The path to the Jenkinsfile, inside the repository, to be loaded.
* `credentialsId` The Jenkins credentials containing user and password needed to access a private git repository.
## Step configuration
none
## ${docGenConfiguration}
## Side effects

View File

@ -1,25 +1,15 @@
# pipelineRestartSteps
# ${docGenStepName}
## Description
Support of restarting failed stages or steps in a pipeline is limited in Jenkins.
This has been documented in the [Jenkins Jira issue JENKINS-33846](https://issues.jenkins-ci.org/browse/JENKINS-33846).
For declarative pipelines there is a solution available which partially addresses this topic:
https://jenkins.io/doc/book/pipeline/running-pipelines/#restart-from-a-stage.
Nonetheless, still features are missing, so it can't be used in all cases.
The more complex Piper pipelines which share a state via [`commonPipelineEnvironment`](commonPipelineEnvironment.md) will for example not work with the standard _restart-from-stage_.
The step `pipelineRestartSteps` aims to address this gap and allows individual parts of a pipeline (e.g. a failed deployment) to be restarted.
This is done in a way that the pipeline waits for user input to restart the pipeline in case of a failure. In case this user input is not provided the pipeline stops after a timeout which can be configured.
## ${docGenDescription}
## Prerequisites
none
## ${docGenParameters}
## ${docGenConfiguration}
## Example
Usage of pipeline step:
@ -37,32 +27,6 @@ pipelineRestartSteps (script: this) {
In case you cannot use `node` inside this step, please choose the parameter `timeoutInSeconds` carefully!
## Parameters
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
|script|yes|||
|sendMail|no|`true`||
|timeoutInSeconds|no|`900`||
### Details
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
* If `sendMail: true` the step `mailSendNotification` will be triggered in case of an error
* `timeoutInSeconds` defines the time period where the job waits for input. Default is 15 minutes. Once this time is passed the job enters state FAILED.
## Step configuration
We recommend to define values of step parameters via [config.yml file](../configuration.md).
In following sections the configuration is possible:
| parameter | general | step | stage |
| ----------|-----------|---------|-----------------|
|script||||
|sendMail|X|X|X|
|timeoutInSeconds|X|X|X|
## Side effects
none

View File

@ -1,22 +1,12 @@
# pipelineStashFiles
# ${docGenStepName}
## Description
This step stashes files that are needed in other build steps (on other nodes).
## ${docGenDescription}
## Prerequsites
none
## Parameters
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
| script | yes | | |
| runCheckmarx | no | false | |
| runOpaTests | no | false | |
| stashIncludes | no | see details | |
| stashExcludes | no | see details | |
## ${docGenParameters}
Details:
@ -42,14 +32,7 @@ The step is stashing files before and after the build. This is due to the fact,
* `stashIncludes: [buildDescriptor: '**/mybuild.yml]`
* `stashExcludes: [tests: '**/NOTRELEVANT.*]`
## Step configuration
The following parameters can also be specified as step parameters using the global configuration file:
* runOpaTests
* runCheckmarx
* stashExcludes
* stashIncludes
## ${docGenConfiguration}
## Explanation of pipeline step

View File

@ -0,0 +1,11 @@
# ${docGenStepName}
## ${docGenDescription}
## Prerequsites
none
## ${docGenParameters}
## ${docGenConfiguration}

View File

@ -0,0 +1,11 @@
# ${docGenStepName}
## ${docGenDescription}
## Prerequsites
none
## ${docGenParameters}
## ${docGenConfiguration}

View File

@ -0,0 +1,7 @@
# ${docGenStepName}
## ${docGenDescription}
## ${docGenParameters}
## ${docGenConfiguration}

View File

@ -1,17 +1,10 @@
# prepareDefaultValues
# ${docGenStepName}
## Description
## ${docGenDescription}
Loads the pipeline library default values from the file `resources/default_pipeline_environment.yml`.
Afterwards the values can be loaded by the method: `ConfigurationLoader.defaultStepConfiguration`
## ${docGenParameters}
## Parameters
None
## Step configuration
None
## ${docGenConfiguration}
## Exceptions

View File

@ -18,7 +18,7 @@ seleniumExecuteTests (script: this) {
### Example test using WebdriverIO
Example based on http://webdriver.io/guide/getstarted/modes.html and http://webdriver.io/guide.html
Example based on <http://webdriver.io/guide/getstarted/modes.html> and <http://webdriver.io/guide.html>
#### Configuration for Local Docker Environment

View File

@ -1,30 +1,14 @@
# setupCommonPipelineEnvironment
# ${docGenStepName}
## Description
Initializes the [`commonPipelineEnvironment`](commonPipelineEnvironment.md), which is used throughout the complete pipeline.
!!! tip
This step needs to run at the beginning of a pipeline right after the SCM checkout.
Then subsequent pipeline steps consume the information from `commonPipelineEnvironment`; it does not need to be passed to pipeline steps explicitly.
## ${docGenDescription}
## Prerequisites
* A **configuration file** with properties (default location: `.pipeline/config.properties`). The property values are used as default values in many pipeline steps.
* A **configuration file** with properties. The property values are used as default values in many pipeline steps.
## Parameters
## ${docGenParameters}
| parameter | mandatory | default | possible values |
| ------------ |-----------|-------------------------------|-----------------|
| `script` | yes | - | |
| `configFile` | no | `.pipeline/config.properties` | |
* `script` - The reference to the pipeline script (Jenkinsfile). Normally `this` needs to be provided.
* `configFile` - Property file defining project specific settings.
## Step configuration
none
## ${docGenConfiguration}
## Side effects

View File

@ -4,7 +4,9 @@
## Prerequisites
* Installed and configured [Jenkins Slack plugin](https://github.com/jenkinsci/slack-plugin).
* Installed and configured [Slack JenkinsCI integration](https://my.slack.com/services/new/jenkins-ci)
* *secret text* Jenkins credentials with the Slack token
* Installed and configured [Jenkins Slack plugin](https://github.com/jenkinsci/slack-plugin#install-instructions-for-slack).
## ${docGenParameters}

View File

@ -0,0 +1,18 @@
# ${docGenStepName}
## ${docGenDescription}
## Prerequsites
- The project needs a `sonar-project.properties` file that describes the project and defines certain settings, see [here](https://docs.sonarqube.org/display/SCAN/Advanced+SonarQube+Scanner+Usages#AdvancedSonarQubeScannerUsages-Multi-moduleProjectStructure).
- A SonarQube instance needs to be defined in the Jenkins.
## ${docGenParameters}
## ${docGenConfiguration}
## Exceptions
none
## Examples

View File

@ -1,8 +1,6 @@
# testsPublishResults
# ${docGenStepName}
## Description
This step can publish test results from various sources.
## ${docGenDescription}
## Prerequsites
@ -28,29 +26,7 @@ testsPublishResults(
)
```
Available parameters:
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
| script | yes | | |
| `failOnError` | no | `false` | `true`, `false` |
| junit | no | `false` | true, false |
| jacoco | no | `false` | true, false |
| cobertura | no | `false` | true, false |
| jmeter | no | `false` | true, false |
* `script` - The common script environment of the Jenkinsfile running.
Typically the reference to the script calling the pipeline step is provided
with the `this` parameter, as in `script: this`.
This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md)
for retrieving, for example, configuration parameters.
* `failOnError` - If `failOnError` it set to `true` the step will fail the build if JUnit detected any failing tests.
* `junit` - Publishes test results files in JUnit format with the [JUnit Plugin](https://plugins.jenkins.io/junit).
* `jacoco` - Publishes code coverage with the [JaCoCo plugin](https://plugins.jenkins.io/jacoco) .
* `cobertura` - Publishes code coverage with the [Cobertura plugin](https://plugins.jenkins.io/cobertura).
* `jmeter` - Publishes performance test results with the [Performance plugin](https://plugins.jenkins.io/performance).
Each of the parameters `junit`, `jacoco`, `cobertura` and `jmeter` can be set to `true` or `false` but also to a map of parameters to hand in different settings for the tools.
## ${docGenParameters}
### junit
@ -101,14 +77,7 @@ Each of the parameters `junit`, `jacoco`, `cobertura` and `jmeter` can be set to
| archive | no | `false` | true, false |
| allowEmptyResults | no | `true` | true, false |
## Step configuration
Following parameters can also be specified as step parameters using the global configuration file:
* `junit`
* `jacoco`
* `cobertura`
* `jmeter`
## ${docGenConfiguration}
## Side effects

View File

@ -1,41 +0,0 @@
# toolValidate
## Description
Checks the existence and compatibility of a tool, necessary for a successful pipeline execution.
In case a violation is found, an exception is raised.
## Prerequisites
none
## Parameters
| parameter | mandatory | default | possible values |
| -----------------|-----------|-----------------------------------|----------------------------|
| `tool` | yes | | 'java', 'mta', 'neo' |
| `home` | yes | | |
* `tool` The tool that is checked for existence and compatible version.
* `home` The location in the file system where Jenkins can access the tool.
## Step configuration
none
## Side effects
none
## Exceptions
* `IllegalArgumentException`:
* If at least one of the parameters `tool`, `home` is not provided.
* `AbortException`:
* If `tool` is not supported.
## Example
```groovy
toolValidate tool: 'neo', home:'/path/to/neo-java-web-sdk'
```

View File

@ -1,53 +1,15 @@
# transportRequestCreate
# ${docGenStepName}
## Description
Creates
* a Transport Request for a Change Document on the Solution Manager (type `SOLMAN`) or
* a Transport Request inside an ABAP system (type`CTS`)
The id of the transport request is availabe via [commonPipelineEnvironment.getTransportRequestId()](commonPipelineEnvironment.md)
## ${docGenDescription}
## Prerequisites
* **[Change Management Client 2.0.0 or compatible version](http://central.maven.org/maven2/com/sap/devops/cmclient/dist.cli/)** - available for download on Maven Central.
* Solution Manager version `ST720 SP08` or newer.
## Parameters
## ${docGenParameters}
| parameter | mandatory | default | possible values |
| -----------------|-----------|--------------------------------------------------------|--------------------|
| `script` | yes | | |
| `changeDocumentId` | for `SOLMAN` | | |
| `transportType` | for `CTS` | no | |
| `targetSystem` | for `CTS` | no | |
| `description` | for `CTS` | no | |
| `changeManagement/credentialsId` | yes | | |
| `changeManagement/endpoint` | yes | | |
| `changeManagement/clientOpts` | no | | |
| `changeManagement/git/from` | no | `origin/master` | |
| `changeManagement/git/to` | no | `HEAD` | |
| `changeManagement/changeDocumentLabel` | no | `ChangeDocument\s?:` | regex pattern |
| `changeManagement/git/format` | no | `%b` | see `git log --help` |
| `changeManagement/type` | no | `SOLMAN` | `SOLMAN`, `CTS` |
| `developmentSystemId` | for `SOLMAN` | | |
* `script` - The common script environment of the Jenkinsfile running. Typically the reference to the script calling the pipeline step is provided with the `this` parameter, as in `script: this`. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving, for example, configuration parameters.
* `changeDocumentId` - for `SOLMAN` only. The id of the change document to that the transport request is bound to. Typically this value is provided via commit message in the commit history.
* `changeManagement/type` Where/how the transport request is created (via SAP Solution Manager, ABAP).
* `changeManagement/credentialsId` - The credentials to connect to the service endpoint (Solution Manager, ABAP System).
* `changeManagement/endpoint` - The service endpoint (Solution Manager, ABAP System).
* `changeManagement/clientOpts`- Options forwarded to JVM used by the CM client, like `JAVA_OPTS`
* `changeManagement/git/from` - The starting point for retrieving the change document id
* `changeManagement/git/to` - The end point for retrieving the change document id
* `changeManagement/changeDocumentLabel` - For type `SOLMAN` only. A pattern used for identifying lines holding the change document id.
* `changeManagement/git/format` - Specifies what part of the commit is scanned. By default the body of the commit message is scanned.
* `description` - for `CTS` only. The description of the transport request.
* `targetSystem` - for `CTS` only. The system receiving the transport request.
* `transportType` - for type `CTS` only. Typically `W` (workbench) or `C` customizing.
* `developmentSystemId`- for `SOLMAN` only. The logical system id for which the transport request is created. The format is `<SID>~<TYPE>(/<CLIENT>)?`. For ABAP Systems the `developmentSystemId` looks like `DEV~ABAP/100`. For non-ABAP systems the `developmentSystemId` looks like e.g. `L21~EXT_SRV` or `J01~JAVA`. In case the system type is not known (in the examples provided here: `EXT_SRV` or `JAVA`) the information can be retrieved from the Solution Manager instance.
## Step configuration
## ${docGenConfiguration}
The step is configured using a customer configuration file provided as
resource in an custom shared library.

View File

@ -1,41 +1,14 @@
# transportRequestRelease
# ${docGenStepName}
## Description
Releases a Transport Request.
## ${docGenDescription}
## Prerequisites
* **[Change Management Client 2.0.0 or compatible version](http://central.maven.org/maven2/com/sap/devops/cmclient/dist.cli/)** - available for download on Maven Central.
## Parameters
## ${docGenParameters}
| parameter | mandatory | default | possible values |
| -----------------|-----------|--------------------------------------------------------|--------------------|
| `script` | yes | | |
| `changeDocumentId` | `SOLMAN` only | | |
| `transportRequestId`| yes | | |
| `changeManagement/changeDocumentLabel` | no | `ChangeDocument\s?:` | regex pattern |
| `changeManagment/transportRequestLabel` | no | `TransportRequest\s?:` | regex pattern |
| `changeManagement/credentialsId` | yes | | |
| `changeManagement/endpoint` | yes | | |
| `changeManagement/git/from` | no | `origin/master` | |
| `changeManagement/git/to` | no | `HEAD` | |
| `changeManagement/git/format` | no | `%b` | see `git log --help` |
| `changeManagement/type` | no | `SOLMAN` | `SOLMAN`, `CTS` |
* `script` - The common script environment of the Jenkinsfile running. Typically the reference to the script calling the pipeline step is provided with the `this` parameter, as in `script: this`. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving, for example, configuration parameters.
* `changeDocumentId` - for `SOLMAN` only. The id of the change document related to the transport request to release.
* `transportRequestId` - The id of the transport request to release.
* `changeManagement/changeDocumentLabel` - for `SOLMAN` only. A pattern used for identifying lines holding the change document id.
* `changeManagment/transportRequestLabel` - A pattern used for identifying lines holding the transport request id.
* `changeManagement/credentialsId` - The credentials to connect to the service endpoint (Solution Manager, ABAP System).
* `changeManagement/endpoint` - The service endpoint (Solution Manager, ABAP System).
* `changeManagement/git/from` - The starting point for retrieving the change document id and/or transport request id
* `changeManagement/git/to` - The end point for retrieving the change document id and/or transport request id
* `changeManagement/git/format` - Specifies what part of the commit is scanned. By default the body of the commit message is scanned.
## Step configuration
## ${docGenConfiguration}
The step is configured using a customer configuration file provided as
resource in an custom shared library.

View File

@ -1,46 +1,14 @@
# transportRequestUploadFile
# ${docGenStepName}
## Description
Uploads a file to a Transport Request.
## ${docGenDescription}
## Prerequisites
* **[Change Management Client 2.0.0 or compatible version](http://central.maven.org/maven2/com/sap/devops/cmclient/dist.cli/)** - available for download on Maven Central.
## Parameters
## ${docGenParameters}
| parameter | mandatory | default | possible values |
| -----------------|-----------|--------------------------------------------------------|--------------------|
| `script` | yes | | |
| `changeDocumentId` | `SOLMAN` only | | |
| `transportRequestId`| yes | | |
| `applicationId` | `SOLMAN` only | | |
| `filePath` | yes | | |
| `changeManagement/credentialsId` | yes | | |
| `changeManagement/endpoint` | yes | | |
| `changeManagement/git/from` | no | `origin/master` | |
| `changeManagement/git/to` | no | `HEAD` | |
| `changeManagement/changeDocumentLabel` | no | `ChangeDocument\s?:` | regex pattern |
| `changeManagement/transportRequestLabel` | no | `TransportRequest\s?:` | regex pattern |
| `changeManagement/git/format` | no | `%b` | see `git log --help` |
| `changeManagement/type` | no | `SOLMAN` | `SOLMAN`, `CTS` |
* `script` - The common script environment of the Jenkinsfile running. Typically the reference to the script calling the pipeline step is provided with the `this` parameter, as in `script: this`. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving, for example, configuration parameters.
* `changeDocumentId` - For type `SOLMAN` only. The id of the change document related to the transport request to release. Typically provided via commit history.
* `transportRequestId` - The id of the transport request to release. Typically provided via commit history.
* `applicationId` - For type `SOLMAN` only. The id of the application.
* `filePath` - The path of the file to upload.
* `changeManagement/credentialsId` - The credentials to connect to the service endpoint (Solution Manager, ABAP System).
* `changeManagement/endpoint` - The service endpoint (Solution Manager, ABAP System).
* `changeManagement/git/from` - The starting point for retrieving the change document id and/or transport request id
* `changeManagement/git/to` - The end point for retrieving the change document id and/or transport request id
* `changeManagement/changeDocumentLabel` - For type `SOLMAN` only. A pattern used for identifying lines holding the change document id.
* `changeManagement/transportRequestLabel` - A pattern used for identifying lines holding the transport request id.
* `changeManagement/type` Where/how the transport request is created (via SAP Solution Manager, ABAP).
* `changeManagement/git/format` - Specifies what part of the commit is scanned. By default the body of the commit message is scanned.
## Step configuration
## ${docGenConfiguration}
The step is configured using a customer configuration file provided as
resource in an custom shared library.

View File

@ -14,3 +14,75 @@ If you see an error like `fatal: Not a git repository (or any parent up to mount
Please make sure to point parameter `testOptions` to your `conf.js` file like `testOptions: './path/to/my/tests/conf.js'`
## Examples
### Passing credentials from Jenkins
When running acceptance tests in a real environment, authentication will be enabled in most cases. UIVeri5 includes [features to automatically perform the login](https://github.com/SAP/ui5-uiveri5/blob/master/docs/config/authentication.md) with credentials in the `conf.js`. However, having credentials to the acceptance system stored in plain text is not an optimal solution.
Therefore, UIVeri5 allows templating to set parameters at runtime, as shown in the following example `conf.js`:
```js
// Read environment variables
const defaultParams = {
url: process.env.TARGET_SERVER_URL,
user: process.env.TEST_USER,
pass: process.env.TEST_PASS
};
// Resolve path to specs relative to the working directory
const path = require('path');
const specs = path.relative(process.cwd(), path.join(__dirname, '*.spec.js'));
// export UIVeri5 config
exports.config = {
profile: 'integration',
baseUrl: '\${params.url}',
specs: specs,
params: defaultParams, // can be overridden via cli `--params.<key>=<value>`
auth: {
// set up authorization for CF XSUAA
'sapcloud-form': {
user: '\${params.user}',
pass: '\${params.pass}',
userFieldSelector: 'input[name="username"]',
passFieldSelector: 'input[name="password"]',
logonButtonSelector: 'input[type="submit"]',
redirectUrl: /cp.portal\/site/
}
}
};
```
While default values for `baseUrl`, `user` and `pass` are read from the environment, they can also be overridden when calling the CLI.
In a custom Pipeline, this is very simple: Just wrap the call to `uiVeri5ExecuteTests` in `withCredentials` (`TARGET_SERVER_URL` is read from `config.yml`):
```groovy
withCredentials([usernamePassword(
credentialsId: 'MY_ACCEPTANCE_CREDENTIALS',
passwordVariable: 'password',
usernameVariable: 'username'
)]) {
uiVeri5ExecuteTests script: this, testOptions: "./uiveri5/conf.js --params.user=\${username} --params.pass=\${password}"
}
```
In a Pipeline Template, a [Stage Exit](#) can be used to fetch the credentials and store them in the environment. As the environment is passed down to uiVeri5ExecuteTests, the variables will be present there. This is an example for the stage exit `.pipeline/extensions/Acceptance.groovy` where the `credentialsId` is read from the `config.yml`:
```groovy
void call(Map params) {
// read username and password from the credential store
withCredentials([usernamePassword(
credentialsId: params.config.acceptanceCredentialsId,
passwordVariable: 'password',
usernameVariable: 'username'
)]) {
// store the result in the environment variables for executeUIVeri5Test
withEnv(["TEST_USER=\${username}", "TEST_PASS=\${password}"]) {
//execute original stage as defined in the template
params.originalStage()
}
}
}
return this
```

View File

@ -0,0 +1,23 @@
# ${docGenStepName}
## ${docGenDescription}
## Prerequisites
Your company has registered an account with WhiteSource and you have enabled the use of so called `User Keys` to manage
access to your organization in WhiteSource via dedicated privileges. Scanning your products without adequate user level
access protection imposed on the WhiteSource backend would simply allow access based on the organization token.
## ${docGenParameters}
## ${docGenConfiguration}
## Exceptions
None
## Examples
```groovy
whitesourceExecuteScan script: this, scanType: 'pip', productName: 'My Whitesource Product', userTokenCredentialsId: 'companyAdminToken', orgAdminUserTokenCredentialsId: 'orgAdminToken', orgToken: 'myWhitesourceOrganizationToken'
```

View File

@ -10,11 +10,12 @@ nav:
- cloudFoundryDeploy: steps/cloudFoundryDeploy.md
- commonPipelineEnvironment: steps/commonPipelineEnvironment.md
- containerExecuteStructureTests: steps/containerExecuteStructureTests.md
- detectExecuteScan: steps/detectExecuteScan.md
- dockerExecute: steps/dockerExecute.md
- dockerExecuteOnKubernetes: steps/dockerExecuteOnKubernetes.md
- durationMeasure: steps/durationMeasure.md
- githubPublishRelease: steps/githubPublishRelease.md
- gaugeExecuteTests: steps/gaugeExecuteTests.md
- githubPublishRelease: steps/githubPublishRelease.md
- handlePipelineStepErrors: steps/handlePipelineStepErrors.md
- healthExecuteCheck: steps/healthExecuteCheck.md
- influxWriteData: steps/influxWriteData.md
@ -22,23 +23,27 @@ nav:
- mailSendNotification: steps/mailSendNotification.md
- mavenExecute: steps/mavenExecute.md
- mtaBuild: steps/mtaBuild.md
- multicloudDeploy: steps/multicloudDeploy.md
- neoDeploy: steps/neoDeploy.md
- newmanExecute: steps/newmanExecute.md
- npmExecute: steps/npmExecute.md
- pipelineExecute: steps/pipelineExecute.md
- pipelineRestartSteps: steps/pipelineRestartSteps.md
- pipelineStashFiles: steps/pipelineStashFiles.md
- pipelineStashFilesAfterBuild: steps/pipelineStashFilesAfterBuild.md
- pipelineStashFilesBeforeBuild: steps/pipelineStashFilesBeforeBuild.md
- prepareDefaultValues: steps/prepareDefaultValues.md
- seleniumExecuteTests: steps/seleniumExecuteTests.md
- setupCommonPipelineEnvironment: steps/setupCommonPipelineEnvironment.md
- slackSendNotification: steps/slackSendNotification.md
- snykExecute: steps/snykExecute.md
- sonarExecuteScan: steps/sonarExecuteScan.md
- testsPublishResults: steps/testsPublishResults.md
- toolValidate: steps/toolValidate.md
- transportRequestCreate: steps/transportRequestCreate.md
- transportRequestRelease: steps/transportRequestRelease.md
- transportRequestUploadFile: steps/transportRequestUploadFile.md
- uiVeri5ExecuteTests: steps/uiVeri5ExecuteTests.md
- whitesourceExecuteScan: steps/whitesourceExecuteScan.md
- 'Scenarios':
- 'Build and Deploy Hybrid Applications with Jenkins and SAP Solution Manager': scenarios/changeManagement.md
- 'Build and Deploy SAP UI5 or SAP Fiori Applications on SAP Cloud Platform with Jenkins': scenarios/ui5-sap-cp/Readme.md
@ -65,6 +70,5 @@ markdown_extensions:
extra_css:
- 'css/extra.css'
edit_uri: edit/master/documentation/docs
docs_dir: docs-tmp
site_dir: docs-gen
repo_url: https://github.com/SAP/jenkins-library

View File

@ -1,16 +0,0 @@
#!/bin/bash
PRIVATE_KEY="cfg/id_rsa"
chmod 600 "${PRIVATE_KEY}"
eval `ssh-agent -s`
ssh-add "${PRIVATE_KEY}"
mkdir ~/.ssh
chmod 700 ~/.ssh
ssh-keyscan github.com >> ~/.ssh/known_hosts
git config user.name "Travis CI Publisher"
git remote add docu "git@github.com:$TRAVIS_REPO_SLUG.git";
git fetch docu gh-pages:gh-pages
echo "Pushing to gh-pages of repository $TRAVIS_REPO_SLUG"
cd documentation
mkdocs gh-deploy -v --clean --remote-name docu

View File

@ -1,4 +1,8 @@
stages:
Init:
stepConditions:
slackSendNotification:
config: 'channel'
'Pull-Request Voting': {}
Build: {}
'Additional Unit Tests': {}
@ -18,3 +22,7 @@ stages:
Compliance: {}
Promote: {}
Release: {}
'Post Actions':
stepConditions:
slackSendNotification:
config: 'channel'

View File

@ -0,0 +1,41 @@
<!DOCTYPE html>
<html>
<head>
<title>${reportTitle}</title>
<style type="text/css">${style}</style>
</head>
<body>
<h1>${reportTitle}</h1>
<h2>
<span>
<i>WhiteSource product name: ${whitesourceProductName}</i><br />
<i>Filtered project names: ${whitesourceProjectNames?:''}</i>
</span>
</h2>
<div>
<h3> total number of vulnerabilities: ${totalVulnerabilities}<br />
total number of high/critical vulnerabilities with CVSS score >= ${cvssSeverityLimit}: ${totalSevereVulnerabilities}
</h3>
</div>
<p>Snapshot taken:${now}</p>
<table>
<tr>
<th>Entry #</th>
<th>Date</th>
<th>CVE</th>
<th>CVSS Score</th>
<th>CVSS Version</th>
<th>Project</th>
<th>Library file name</th>
<th>Library group ID</th>
<th>Library artifact ID</th>
<th>Library version</th>
<th>Description</th>
<th>Top fix</th>
</tr>
${vulnerabilityTable}
</table>
</body>
</html>

View File

@ -45,6 +45,8 @@ general:
# fsGroup: 1000
manualConfirmation: true
productiveBranch: 'master'
whitesource:
serviceUrl: 'https://saas.whitesourcesoftware.com/api'
#Steps Specific Configuration
steps:
@ -175,6 +177,27 @@ steps:
stashContent:
- 'tests'
testReportFilePath: 'cst-report.json'
detectExecuteScan:
detect:
projectVersion: '1'
scanners:
- signature
scanPaths:
- '.'
scanProperties:
- '--blackduck.signature.scanner.memory=4096'
- '--blackduck.timeout=6000'
- '--blackduck.trust.cert=true'
- '--detect.policy.check.fail.on.severities=BLOCKER,CRITICAL,MAJOR'
- '--detect.report.timeout=4800'
- '--logging.level.com.synopsys.integration=DEBUG'
stashContent:
- 'buildDescriptor'
- 'checkmarx'
# buildTool specific settings
golang:
dockerImage: 'golang:1.12-stretch'
dockerWorkspace: ''
dockerExecute:
dockerPullImage: true
sidecarPullImage: true
@ -219,14 +242,33 @@ steps:
languageRunner: 'js'
runCommand: 'gauge run'
testOptions: 'specs'
bundler:
dockerImage: 'ruby:2.5.3-stretch'
dockerName: 'bundler'
dockerWorkspace: ''
languageRunner: 'ruby'
runCommand: 'bundle install && bundle exec gauge run'
testOptions: 'specs'
handlePipelineStepErrors:
echoDetails: true
failOnError: true
libraryDocumentationUrl: 'https://sap.github.io/jenkins-library/'
libraryRepositoryUrl: 'https://github.com/SAP/jenkins-library/'
mandatorySteps: []
stepTimeouts: {}
healthExecuteCheck:
healthEndpoint: ''
influxWriteData:
influxServer: ''
kanikoExecute:
containerBuildOptions: '--skip-tls-verify-pull'
containerCommand: '/busybox/tail -f /dev/null'
containerPreparationCommand: 'rm /kaniko/.docker/config.json'
containerShell: '/busybox/sh'
customTlsCertificateLinks: []
dockerfile: Dockerfile
dockerImage: 'gcr.io/kaniko-project/executor:debug'
dockerOptions: "-u 0 --entrypoint=''"
karmaExecuteTests:
containerPortMappings:
'node:8-stretch':
@ -280,11 +322,78 @@ steps:
- 'tests'
npmExecute:
dockerImage: 'node:8-stretch'
whitesourceExecuteScan:
createProductFromPipeline: true
emailAddressesOfInitialProductAdmins: []
buildDescriptorExcludeList: []
parallelLimit: 15
licensingVulnerabilities: true
securityVulnerabilities: true
cvssSeverityLimit: -1
reporting: true
vulnerabilityReportFileName: 'piper_whitesource_vulnerability_report'
vulnerabilityReportTitle: 'WhiteSource Security Vulnerability Report'
projectNames: []
jreDownloadUrl: 'https://github.com/SAP/SapMachine/releases/download/sapmachine-11.0.2/sapmachine-jre-11.0.2_linux-x64_bin.tar.gz'
agentFileName: 'wss-unified-agent.jar'
agentDownloadUrl: 'https://github.com/whitesource/unified-agent-distribution/raw/master/standAlone/${config.agentFileName}'
agentParameters: ''
configFilePath: './wss-unified-agent.config'
mta:
stashContent:
- 'buildDescriptor'
- 'opensourceConfiguration'
maven:
buildDescriptorFile: './pom.xml'
dockerImage: 'maven:3.5-jdk-8'
dockerWorkspace: '/home/java'
stashContent:
- 'buildDescriptor'
- 'opensourceConfiguration'
npm:
buildDescriptorFile: './package.json'
dockerImage: 'node:8-stretch'
dockerWorkspace: '/home/node'
stashContent:
- 'buildDescriptor'
- 'opensourceConfiguration'
pip:
buildDescriptorFile: './setup.py'
dockerImage: 'python:3.7.2-stretch'
dockerWorkspace: '/home/python'
stashContent:
- 'buildDescriptor'
- 'opensourceConfiguration'
golang:
buildDescriptorFile: './Gopkg.toml'
dockerImage: 'golang:1.12-stretch'
dockerWorkspace: '/home/dep'
stashContent:
- 'buildDescriptor'
- 'opensourceConfiguration'
- 'checkmarx'
additionalInstallCommand: >-
curl --fail https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
&& mkdir -p \$GOPATH/src/${config.whitesource.projectName.substring(0, config.whitesource.projectName.lastIndexOf('/'))}
&& ln -s \$(pwd) \$GOPATH/src/${config.whitesource.projectName}
&& cd \$GOPATH/src/${config.whitesource.projectName} && dep ensure
sbt:
buildDescriptorFile: './build.sbt'
dockerImage: 'hseeberger/scala-sbt:8u181_2.12.8_1.2.8'
dockerWorkspace: '/home/scala'
stashContent:
- 'buildDescriptor'
- 'opensourceConfiguration'
verbose: false
timeout: 0
pipelineExecute:
branch: 'master'
path: 'Jenkinsfile'
credentialsId: ''
pipelineRestartSteps:
sendMail: true
timeoutInSeconds: 900
pipelineStashFilesAfterBuild:
runOpaTests: false
stashIncludes:
checkmarx: '**/*.js, **/*.scala, **/*.py, **/*.go, **/*.xml, **/*.html'
classFiles: '**/target/classes/**/*.class, **/target/test-classes/**/*.class'
@ -293,14 +402,14 @@ steps:
checkmarx: '**/*.mockserver.js, node_modules/**/*.js'
classFiles: ''
sonar: ''
noDefaultExludes: []
pipelineStashFilesBeforeBuild:
runCheckmarx: false
stashIncludes:
buildDescriptor: '**/pom.xml, **/.mvn/**, **/assembly.xml, **/.swagger-codegen-ignore, **/package.json, **/requirements.txt, **/setup.py, **/whitesource_config.py, **/mta*.y*ml, **/.npmrc, **/whitesource.*.json, **/whitesource-fs-agent.config, Dockerfile, **/VERSION, **/version.txt, **/build.sbt, **/sbtDescriptor.json, **/project/*'
buildDescriptor: '**/pom.xml, **/.mvn/**, **/assembly.xml, **/.swagger-codegen-ignore, **/package.json, **/requirements.txt, **/setup.py, **/mta*.y*ml, **/.npmrc, Dockerfile, **/VERSION, **/version.txt, **/Gopkg.*, **/build.sbt, **/sbtDescriptor.json, **/project/*'
deployDescriptor: '**/manifest*.y*ml, **/*.mtaext.y*ml, **/*.mtaext, **/xs-app.json, helm/**, *.y*ml'
git: '**/gitmetadata/**'
git: '.git/**'
opa5: '**/*.*'
opensourceConfiguration: '**/srcclr.yml, **/vulas-custom.properties, **/.nsprc, **/.retireignore, **/.retireignore.json, **/.snyk'
opensourceConfiguration: '**/srcclr.yml, **/vulas-custom.properties, **/.nsprc, **/.retireignore, **/.retireignore.json, **/.snyk, **/wss-unified-agent.config, **/vendor/**/*'
pipelineConfigAndTests: '.pipeline/**'
securityDescriptor: '**/xs-security.json'
tests: '**/pom.xml, **/*.json, **/*.xml, **/src/**, **/node_modules/**, **/specs/**, **/env/**, **/*.js, **/tests/**'
@ -313,6 +422,8 @@ steps:
pipelineConfigAndTests: ''
securityDescriptor: ''
tests: ''
noDefaultExludes:
- 'git'
seleniumExecuteTests:
buildTool: 'npm'
containerPortMappings:
@ -334,6 +445,10 @@ steps:
dockerImage: 'node:8-stretch'
dockerName: 'npm'
dockerWorkspace: '/home/node'
bundler:
dockerImage: 'ruby:2.5.3-stretch'
dockerName: 'bundler'
dockerWorkspace: ''
slackSendNotification:
color: "${buildStatus == 'SUCCESS'?'#008000':'#E60000'}"
defaultMessage: "${buildStatus}: Job ${env.JOB_NAME} <${env.BUILD_URL}|#${env.BUILD_NUMBER}>"
@ -348,6 +463,12 @@ steps:
- 'opensourceConfiguration'
toJson: false
toHtml: false
sonarExecuteScan:
dockerImage: 'maven:3.5-jdk-8'
instance: 'SonarCloud'
options: []
pullRequestProvider: 'github'
sonarScannerDownloadUrl: 'https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-3.3.0.1492-linux.zip'
testsPublishResults:
failOnError: false
junit:

60
resources/piper-os.css Normal file
View File

@ -0,0 +1,60 @@
body {
font-family: Arial, Verdana;
}
table {
border-collapse: collapse;
}
div.code {
font-family: "Courier New", "Lucida Console";
}
th {
border-top: 1px solid #ddd;
}
th, td {
padding: 12px;
text-align: left;
border-bottom: 1px solid #ddd;
border-right: 1px solid #ddd;
}
tr:nth-child(even) {
background-color: #f2f2f2;
}
.bold {
font-weight: bold;
}
.nobullets {
list-style-type:none;
padding-left: 0;
padding-bottom: 0;
margin: 0;
}
.notok {
background-color: #ffe5e5;
padding: 5px
}
.warn {
background-color: #ffff99;
padding: 5px
}
.ok {
background-color: #e1f5a9;
padding: 5px
}
.green{
color: olivedrab;
}
.red{
color: orangered;
}
.risk-yellow{
padding: 5px;
color: rgba(255, 255, 0, 0.6);
}
.risk-grey{
background-color: rgba(212, 212, 212, 0.7);
padding: 5px;
}
.risk-black{
background-color: rgba(0, 0, 0, 0.75);
padding: 5px;
}

View File

@ -0,0 +1,5 @@
package com.sap.piper
enum CloudPlatform {
NEO, CLOUD_FOUNDRY
}

View File

@ -5,14 +5,17 @@ import com.cloudbees.groovy.cps.NonCPS
@API
class ConfigurationHelper implements Serializable {
def static SEPARATOR = '/'
static ConfigurationHelper newInstance(Script step, Map config = [:]) {
new ConfigurationHelper(step, config)
}
ConfigurationHelper loadStepDefaults() {
ConfigurationHelper loadStepDefaults(Map compatibleParameters = [:]) {
this.step.prepareDefaultValues()
this.config = ConfigurationLoader.defaultGeneralConfiguration()
mixin(ConfigurationLoader.defaultStepConfiguration(null, name))
mixin(ConfigurationLoader.defaultGeneralConfiguration(), null, compatibleParameters)
mixin(ConfigurationLoader.defaultStepConfiguration(null, name), null, compatibleParameters)
}
private Map config
@ -33,8 +36,8 @@ class ConfigurationHelper implements Serializable {
}
ConfigurationHelper mixinGeneralConfig(commonPipelineEnvironment, Set filter = null, Map compatibleParameters = [:]){
Map stepConfiguration = ConfigurationLoader.generalConfiguration([commonPipelineEnvironment: commonPipelineEnvironment])
return mixin(stepConfiguration, filter, compatibleParameters)
Map generalConfiguration = ConfigurationLoader.generalConfiguration([commonPipelineEnvironment: commonPipelineEnvironment])
return mixin(generalConfiguration, filter, compatibleParameters)
}
ConfigurationHelper mixinStageConfig(commonPipelineEnvironment, stageName, Set filter = null, Map compatibleParameters = [:]){
@ -58,21 +61,24 @@ class ConfigurationHelper implements Serializable {
return this
}
private Map handleCompatibility(Map compatibleParameters, String paramStructure = '', Map configMap ) {
private Map handleCompatibility(Map compatibleParameters, String paramStructure = '', Map configMap, Map newConfigMap = [:] ) {
Map newConfig = [:]
compatibleParameters.each {entry ->
if (entry.getValue() instanceof Map) {
paramStructure = (paramStructure ? paramStructure + '.' : '') + entry.getKey()
newConfig[entry.getKey()] = handleCompatibility(entry.getValue(), paramStructure, configMap)
def internalParamStructure = (paramStructure ? paramStructure + '.' : '') + entry.getKey()
newConfig[entry.getKey()] = handleCompatibility(entry.getValue(), internalParamStructure, configMap, newConfig)
} else {
def configSubMap = configMap
for(String key in paramStructure.tokenize('.')){
configSubMap = configSubMap?.get(key)
}
if (configSubMap == null || (configSubMap != null && configSubMap[entry.getKey()] == null)) {
newConfig[entry.getKey()] = configMap[entry.getValue()]
def value = configMap[entry.getValue()]
if(null == value)
value = newConfigMap[entry.getValue()]
if (value != null) {
newConfig[entry.getKey()] = value
def paramName = (paramStructure ? paramStructure + '.' : '') + entry.getKey()
if (configMap[entry.getValue()] != null) {
this.step.echo ("[INFO] The parameter '${entry.getValue()}' is COMPATIBLE to the parameter '${paramName}'")
}
}
@ -84,9 +90,16 @@ class ConfigurationHelper implements Serializable {
Map dependingOn(dependentKey){
return [
mixin: {key ->
def parts = tokenizeKey(key)
def targetMap = config
if(parts.size() > 1) {
key = parts.last()
parts.remove(key)
targetMap = getConfigPropertyNested(config, (parts as Iterable).join(SEPARATOR))
}
def dependentValue = config[dependentKey]
if(config[key] == null && dependentValue && config[dependentValue])
config[key] = config[dependentValue][key]
if(targetMap[key] == null && dependentValue && config[dependentValue])
targetMap[key] = config[dependentValue][key]
return this
}
]
@ -114,34 +127,38 @@ class ConfigurationHelper implements Serializable {
handleValidationFailures()
MapUtils.traverse(config, { v -> (v instanceof GString) ? v.toString() : v })
if(config.verbose) step.echo "[${name}] Configuration: ${config}"
return config
return MapUtils.deepCopy(config)
}
/* private */ def getConfigPropertyNested(key) {
return getConfigPropertyNested(config, key)
}
/* private */ static getConfigPropertyNested(Map config, key) {
def separator = '/'
List parts = tokenizeKey(key)
// reason for cast to CharSequence: String#tokenize(./.) causes a deprecation warning.
List parts = (key in String) ? (key as CharSequence).tokenize(separator) : ([key] as List)
if (config[parts.head()] != null) {
if(config[parts.head()] != null) {
if(config[parts.head()] in Map && ! parts.tail().isEmpty()) {
return getConfigPropertyNested(config[parts.head()], (parts.tail() as Iterable).join(separator))
if (config[parts.head()] in Map && !parts.tail().isEmpty()) {
return getConfigPropertyNested(config[parts.head()], (parts.tail() as Iterable).join(SEPARATOR))
}
if (config[parts.head()].class == String) {
return (config[parts.head()] as String).trim()
}
}
return config[parts.head()]
}
/* private */ static tokenizeKey(String key) {
// reason for cast to CharSequence: String#tokenize(./.) causes a deprecation warning.
List parts = (key in String) ? (key as CharSequence).tokenize(SEPARATOR) : ([key] as List)
return parts
}
private void existsMandatoryProperty(key, errorMessage) {
def paramValue = getConfigPropertyNested(config, key)
@ -169,7 +186,7 @@ class ConfigurationHelper implements Serializable {
ConfigurationHelper withPropertyInValues(String key, Set values){
withMandatoryProperty(key)
def value = config[key]
def value = config[key] instanceof GString ? config[key].toString() : config[key]
if(! (value in values) ) {
throw new IllegalArgumentException("Invalid ${key} = '${value}'. Valid '${key}' values are: ${values}.")
}

View File

@ -26,7 +26,11 @@ class ConfigurationLoader implements Serializable {
@NonCPS
static Map generalConfiguration(script){
try {
return script?.commonPipelineEnvironment?.configuration?.general ?: [:]
} catch (groovy.lang.MissingPropertyException mpe) {
return [:]
}
}
@NonCPS
@ -43,7 +47,12 @@ class ConfigurationLoader implements Serializable {
private static Map loadConfiguration(script, String type, String entryName, ConfigurationType configType){
switch (configType) {
case ConfigurationType.CUSTOM_CONFIGURATION:
try {
return script?.commonPipelineEnvironment?.configuration?.get(type)?.get(entryName) ?: [:]
} catch (groovy.lang.MissingPropertyException mpe) {
return [:]
}
case ConfigurationType.DEFAULT_CONFIGURATION:
return DefaultValueCache.getInstance()?.getDefaultValues()?.get(type)?.get(entryName) ?: [:]
default:

View File

@ -0,0 +1,34 @@
package com.sap.piper
enum DeploymentType {
NEO_ROLLING_UPDATE('rolling-update'), CF_BLUE_GREEN('blue-green'), CF_STANDARD('standard'), NEO_DEPLOY('deploy')
private String value
public DeploymentType(String value){
this.value = value
}
@Override
public String toString(){
return value
}
static DeploymentType selectFor(CloudPlatform cloudPlatform, boolean enableZeroDowntimeDeployment) {
switch (cloudPlatform) {
case CloudPlatform.NEO:
if (enableZeroDowntimeDeployment) return NEO_ROLLING_UPDATE
return NEO_DEPLOY
case CloudPlatform.CLOUD_FOUNDRY:
if (enableZeroDowntimeDeployment) return CF_BLUE_GREEN
return CF_STANDARD
default:
throw new RuntimeException("Unknown cloud platform: ${cloudPlatform}")
}
}
}

View File

@ -0,0 +1,128 @@
package com.sap.piper
import com.cloudbees.groovy.cps.NonCPS
import groovy.transform.Field
import java.util.regex.Matcher
import java.util.regex.Pattern
@Field
def name = Pattern.compile("(.*)name=['\"](.*?)['\"](.*)", Pattern.DOTALL)
@Field
def version = Pattern.compile("(.*)version=['\"](.*?)['\"](.*)", Pattern.DOTALL)
@Field
def method = Pattern.compile("(.*)\\(\\)", Pattern.DOTALL)
def getMavenGAV(file = 'pom.xml') {
def result = [:]
def descriptor = readMavenPom(file: file)
def group = descriptor.getGroupId()
def artifact = descriptor.getArtifactId()
def version = descriptor.getVersion()
result['packaging'] = descriptor.getPackaging()
result['group'] = (null != group && group.length() > 0) ? group : sh(returnStdout: true, script: "mvn -f ${file} help:evaluate -Dexpression=project.groupId | grep -Ev '(^\\s*\\[|Download|Java\\w+:)'").trim()
result['artifact'] = (null != artifact && artifact.length() > 0) ? artifact : sh(returnStdout: true, script: "mvn -f ${file} help:evaluate -Dexpression=project.artifactId | grep -Ev '(^\\s*\\[|Download|Java\\w+:)'").trim()
result['version'] = (null != version && version.length() > 0) ? version : sh(returnStdout: true, script: "mvn -f ${file} help:evaluate -Dexpression=project.version | grep ^[0-9].*").trim()
echo "loaded ${result} from ${file}"
return result
}
def getNpmGAV(file = 'package.json') {
def result = [:]
def descriptor = readJSON(file: file)
if (descriptor.name.startsWith('@')) {
def packageNameArray = descriptor.name.split('/')
if (packageNameArray.length != 2)
error "Unable to parse package name '${descriptor.name}'"
result['group'] = packageNameArray[0]
result['artifact'] = packageNameArray[1]
} else {
result['group'] = ''
result['artifact'] = descriptor.name
}
result['version'] = descriptor.version
echo "loaded ${result} from ${file}"
return result
}
def getDlangGAV(file = 'dub.json') {
def result = [:]
def descriptor = readJSON(file: file)
result['group'] = 'com.sap.dlang'
result['artifact'] = descriptor.name
result['version'] = descriptor.version
result['packaging'] = 'tar.gz'
echo "loaded ${result} from ${file}"
return result
}
def getSbtGAV(file = 'sbtDescriptor.json') {
def result = [:]
def descriptor = readJSON(file: file)
result['group'] = descriptor.group
result['artifact'] = descriptor.artifactId
result['version'] = descriptor.version
result['packaging'] = descriptor.packaging
echo "loaded ${result} from ${file}"
return result
}
def getPipGAV(file = 'setup.py') {
def result = [:]
def descriptor = readFile(file: file)
result['group'] = ''
result['packaging'] = ''
result['artifact'] = matches(name, descriptor)
result['version'] = matches(version, descriptor)
if (result['version'] == '' || matches(method, result['version'])) {
file = file.replace('setup.py', 'version.txt')
result['version'] = getVersionFromFile(file)
}
echo "loaded ${result} from ${file}"
return result
}
def getGoGAV(file = 'Gopkg.toml', URI repoUrl) {
def name = "${repoUrl.getHost()}${repoUrl.getPath().replaceAll(/\.git/, '')}"
def path = file.substring(0, file.lastIndexOf('/') + 1)
def module = path?.replaceAll(/\./, '')?.replaceAll('/', '')
def result = [:]
result['group'] = ''
result['packaging'] = ''
result['artifact'] = "${name}${module?'.':''}${module?:''}".toString()
file = path + 'version.txt'
result['version'] = getVersionFromFile(file)
if (!result['version']) {
file = path + 'VERSION'
result['version'] = getVersionFromFile(file)
}
echo "loaded ${result} from ${file}"
return result
}
private getVersionFromFile(file) {
try {
def versionString = readFile(file: file)
if (versionString) {
return versionString.trim()
}
} catch (java.nio.file.NoSuchFileException e) {
echo "Failed to load version string from file ${file} due to ${e}"
}
return ''
}
@NonCPS
private def matches(regex, input) {
def m = new Matcher(regex, input)
return m.matches() ? m.group(2) : ''
}

View File

@ -1,21 +0,0 @@
package com.sap.piper
import hudson.AbortException
class EnvironmentUtils implements Serializable {
static boolean isEnvironmentVariable(script, variable) {
return !getEnvironmentVariable(script, variable).isEmpty()
}
static String getEnvironmentVariable(script, variable) {
try {
def envVar = script.sh returnStdout: true, script: """#!/bin/bash --login
echo \$$variable"""
return envVar.trim()
} catch(AbortException e) {
throw new AbortException("There was an error requesting the environment variable '$variable'. Reason: $e.message.")
}
}
}

View File

@ -1,91 +0,0 @@
package com.sap.piper
import hudson.AbortException
class FileUtils implements Serializable {
static boolean directoryOrFileExists(script, dirOrFile) {
if (!dirOrFile) throw new IllegalArgumentException("The parameter 'dirOrFile' can not be null or empty.")
def returnStatus = script.sh returnStatus: true, script: """
set +x
if [ -d $dirOrFile ]; then
echo \"$dirOrFile exists.\"
exit 0
elif [ -f $dirOrFile ]; then
echo \"$dirOrFile exists.\"
exit 0
else
echo \"$dirOrFile does not exist.\"
exit 1
fi
"""
return returnStatus == 0
}
static boolean isDirectory(script, dir) {
if (!dir) throw new IllegalArgumentException("The parameter 'dir' can not be null or empty.")
def returnStatus = script.sh returnStatus: true, script: """
set +x
if [ -d $dir ]; then
echo \"$dir is a directory.\"
exit 0
else
echo \"$dir is not a directory.\"
exit 1
fi
"""
return returnStatus == 0
}
static boolean isDirectoryEmpty(script, dir) {
if (!dir) throw new IllegalArgumentException("The parameter 'dir' can not be null or empty.")
def returnStatus = script.sh returnStatus: true, script: """
set +x
if [ -z "\$(ls -A $dir)" ]; then
echo "$dir is empty."
exit 1
else
echo "$dir is not empty."
exit 0
fi
"""
return returnStatus == 1
}
static boolean isFile(script, filePath) {
if (!filePath) throw new IllegalArgumentException("The parameter 'filePath' can not be null or empty.")
def returnStatus = script.sh returnStatus: true, script: """
set +x
if [ -f $filePath ]; then
echo \"$filePath is a file.\"
exit 0
else
echo \"$filePath is not a file.\"
exit 1
fi
"""
return returnStatus == 0
}
static validateDirectoryOrFileExists(script, dirOrFile) {
if (!dirOrFile) throw new IllegalArgumentException("The parameter 'dirOrFile' can not be null or empty.")
if (!directoryOrFileExists(script, dirOrFile)) throw new AbortException("Validation failed. '$dirOrFile' does not exist.")
}
static validateDirectory(script, dir) {
if (!dir) throw new IllegalArgumentException("The parameter 'dir' can not be null or empty.")
validateDirectoryOrFileExists(script, dir)
if (!isDirectory(script, dir)) throw new AbortException("Validation failed. '$dir' is not a directory.")
}
static validateDirectoryIsNotEmpty(script, dir) {
validateDirectory(script, dir)
if (isDirectoryEmpty(script, dir)) throw new AbortException("Validation failed. '$dir' is empty.")
}
static validateFile(script, filePath) {
if (!filePath) throw new IllegalArgumentException("The parameter 'filePath' can not be null or empty.")
validateDirectoryOrFileExists(script, filePath)
if (!isFile(script, filePath)) throw new AbortException("Validation failed. '$filePath' is not a file.")
}
}

View File

@ -33,10 +33,12 @@ String getGitCommitId() {
return sh(returnStdout: true, script: 'git rev-parse HEAD').trim()
}
String[] extractLogLines(String filter = '',
String[] extractLogLines(
String filter = '',
String from = 'origin/master',
String to = 'HEAD',
String format = '%b') {
String format = '%b'
) {
// Checks below: there was an value provided from outside, but the value was null.
// Throwing an exception is more transparent than making a fallback to the defaults

View File

@ -3,6 +3,11 @@ package com.sap.piper
import com.cloudbees.groovy.cps.NonCPS
@NonCPS
String getPrettyJsonString(object) {
String groovyObjectToPrettyJsonString(object) {
return groovy.json.JsonOutput.prettyPrint(groovy.json.JsonOutput.toJson(object))
}
@NonCPS
def jsonStringToGroovyObject(text) {
return new groovy.json.JsonSlurperClassic().parseText(text)
}

View File

@ -62,4 +62,56 @@ class MapUtils implements Serializable {
}
m.putAll(updates)
}
static private def getByPath(Map m, def key) {
List path = key in CharSequence ? key.tokenize('/') : key
def value = m.get(path.head())
if (path.size() == 1) return value
if (value in Map) return getByPath(value, path.tail())
return null
}
/*
* Provides a new map with the same content like the original map.
* Nested Collections and Maps are copied. Values with are not
* Collections/Maps are not copied/cloned.
* &lt;paranoia&gt;&/ltThe keys are also not copied/cloned, even if they are
* Maps or Collections;paranoia&gt;
*/
static deepCopy(Map original) {
Map copy = [:]
for (def e : original.entrySet()) {
if(e.value == null) {
copy.put(e.key, e.value)
} else {
copy.put(e.key, deepCopy(e.value))
}
}
copy
}
/* private */ static deepCopy(Set original) {
Set copy = []
for(def e : original)
copy << deepCopy(e)
copy
}
/* private */ static deepCopy(List original) {
List copy = []
for(def e : original)
copy << deepCopy(e)
copy
}
/*
* In fact not a copy, but a catch all for everything not matching
* with the other signatures
*/
/* private */ static deepCopy(def original) {
original
}
}

View File

@ -7,23 +7,30 @@ import groovy.text.SimpleTemplateEngine
import java.nio.charset.StandardCharsets
import java.security.MessageDigest
@NonCPS
def getMandatoryParameter(Map map, paramName, defaultValue = null) {
def paramValue = map[paramName]
if (paramValue == null)
paramValue = defaultValue
if (paramValue == null)
throw new Exception("ERROR - NO VALUE AVAILABLE FOR ${paramName}")
return paramValue
def stash(name, include = '**/*.*', exclude = '', useDefaultExcludes = true) {
echo "Stash content: ${name} (include: ${include}, exclude: ${exclude}, useDefaultExcludes: ${useDefaultExcludes})"
Map stashParams = [
name : name,
includes: include,
excludes: exclude
]
//only set the optional parameter if default excludes should not be applied
if (!useDefaultExcludes) {
stashParams.useDefaultExcludes = useDefaultExcludes
}
steps.stash stashParams
}
def stash(name, include = '**/*.*', exclude = '') {
echo "Stash content: ${name} (include: ${include}, exclude: ${exclude})"
steps.stash name: name, includes: include, excludes: exclude
@NonCPS
def runClosures(Map closures) {
def closuresToRun = closures.values().asList()
Collections.shuffle(closuresToRun) // Shuffle the list so no one tries to rely on the order of execution
for (int i = 0; i < closuresToRun.size(); i++) {
(closuresToRun[i] as Closure).run()
}
}
def stashList(script, List stashes) {
@ -46,9 +53,9 @@ def stashList(script, List stashes) {
}
}
def stashWithMessage(name, msg, include = '**/*.*', exclude = '') {
def stashWithMessage(name, msg, include = '**/*.*', exclude = '', useDefaultExcludes = true) {
try {
stash(name, include, exclude)
stash(name, include, exclude, useDefaultExcludes)
} catch (e) {
echo msg + name + " (${e.getMessage()})"
}
@ -71,7 +78,7 @@ def unstashAll(stashContent) {
def unstashedContent = []
if (stashContent) {
for (i = 0; i < stashContent.size(); i++) {
if(stashContent[i]) {
if (stashContent[i]) {
unstashedContent += unstash(stashContent[i])
}
}
@ -101,8 +108,18 @@ void pushToSWA(Map parameters, Map config) {
}
@NonCPS
static String fillTemplate(String templateText, Map binding){
static String fillTemplate(String templateText, Map binding) {
def engine = new SimpleTemplateEngine()
String result = engine.createTemplate(templateText).make(binding)
return result
}
static String downloadSettingsFromUrl(script, String url, String targetFile = 'settings.xml') {
if (script.fileExists(targetFile)) {
throw new RuntimeException("Trying to download settings file to ${targetFile}, but a file with this name already exists. Please specify a unique file name.")
}
def settings = script.httpRequest(url)
script.writeFile(file: targetFile, text: settings.getContent())
return targetFile
}

View File

@ -1,52 +0,0 @@
package com.sap.piper
import hudson.AbortException
class Version implements Serializable {
final def major
final def minor
final def patch
Version(major, minor, patch = -1) {
if (major < 0) throw new IllegalArgumentException("The parameter 'major' can not have a value less than 0.")
if (minor < 0) throw new IllegalArgumentException("The parameter 'minor' can not have a value less than 0.")
this.major = major
this.minor = minor
this.patch = patch
}
Version(text) {
if (!text) throw new IllegalArgumentException("The parameter 'text' can not be null or empty.")
def group = text =~ /(\d+[.]\d+[.]\d+)/
if (!group) throw new AbortException("The version '$text' has an unexpected format. The expected format is <major.minor.patch>.")
def i = group[0].size()-1
def versionNumbers = group[0][i].split("\\.")
major = versionNumbers[0].toInteger()
minor = versionNumbers[1].toInteger()
patch = versionNumbers[2].toInteger()
}
@Override
boolean equals(version) {
if (!version) throw new IllegalArgumentException("The parameter 'version' can not be null.")
return major == version.major && minor == version.minor && patch == version.patch
}
boolean isHigher(version) {
if (!version) throw new IllegalArgumentException("The parameter 'version' can not be null.")
return major > version.major || major == version.major && ( minor > version.minor || minor == version.minor && patch > version.patch)
}
boolean isCompatibleVersion(version) {
if (!version) throw new IllegalArgumentException("The parameter 'version' can not be null.")
return this == version || isHigher(version) && major == version.major
}
@Override
String toString() {
return patch != -1 ? "$major.$minor.$patch".toString() : "$major.$minor".toString()
}
}

View File

@ -1,73 +0,0 @@
package com.sap.piper
import hudson.AbortException
class VersionUtils implements Serializable {
def static getVersion(script, name, executable, versionOption) {
return new Version(getVersionDesc(script, name, executable, versionOption))
}
def static getVersionDesc(script, name, executable, versionOption) {
def toolVersion
try {
toolVersion = script.sh returnStdout: true, script: """#!/bin/bash
$executable $versionOption"""
} catch(AbortException e) {
throw new AbortException("The verification of $name failed. Please check '$executable'. $e.message.")
}
return toolVersion
}
def static verifyVersion(script, name, executable, String version, versionOption) {
script.echo "Verifying $name version $version or compatible version."
Version installedVersion = getVersion(script, name, executable, versionOption)
if (!installedVersion.isCompatibleVersion(new Version(version))) {
throw new AbortException("The installed version of $name is ${installedVersion.toString()}. Please install version $version or a compatible version.")
}
script.echo "Verification success. $name version ${installedVersion.toString()} is installed."
}
def static verifyVersion(script, name, String versionDesc, String versionExpected) {
script.echo "Verifying $name version $versionExpected or compatible version."
Version versionAvailable = new Version(versionDesc)
if (!versionAvailable.isCompatibleVersion(new Version(versionExpected))) {
throw new AbortException("The installed version of $name is ${versionAvailable.toString()}. Please install version $versionExpected or a compatible version.")
}
script.echo "Verification success. $name version ${versionAvailable.toString()} is installed."
}
def static verifyVersion(script, name, executable, Map versions, versionOption) {
def versionDesc = getVersionDesc(script, name, executable, versionOption)
verifyVersion(script, name, versionDesc, versions)
}
def static verifyVersion(script, name, String versionDesc, Map versions) {
for (def entry : versions) {
if (versionDesc.contains(entry.getKey())) {
def installedVersion = new Version(versionDesc)
def expectedVersion = entry.getValue()
script.echo "Verifying $name version $expectedVersion or compatible version."
if (!installedVersion.isCompatibleVersion(new Version(expectedVersion))) {
throw new AbortException("The installed version of $name is ${installedVersion.toString()}. Please install version $expectedVersion or a compatible version.")
}
script.echo "Verification success. $name version ${installedVersion.toString()} is installed."
}
}
script.echo "Verification success."
}
}

View File

@ -0,0 +1,129 @@
package com.sap.piper
import com.cloudbees.groovy.cps.NonCPS
class WhitesourceConfigurationHelper implements Serializable {
static def extendUAConfigurationFile(script, utils, config, path) {
def mapping = []
def parsingClosure = { fileReadPath -> return script.readProperties (file: fileReadPath) }
def serializationClosure = { configuration -> serializeUAConfig(configuration) }
def inputFile = config.whitesource.configFilePath.replaceFirst('\\./', '')
def suffix = utils.generateSha1("${path}${inputFile}")
def targetFile = "${inputFile}.${suffix}"
if(config.whitesource.productName.startsWith('DIST - ')) {
mapping += [
[name: 'checkPolicies', value: false, force: true],
[name: 'forceCheckAllDependencies', value: false, force: true]
]
} else {
mapping += [
[name: 'checkPolicies', value: true, force: true],
[name: 'forceCheckAllDependencies', value: true, force: true]
]
}
if(config.verbose)
mapping += [name: 'log.level', value: 'debug']
mapping += [
[name: 'apiKey', value: config.whitesource.orgToken, force: true],
[name: 'productName', value: config.whitesource.productName, force: true],
[name: 'productVersion', value: config.whitesource.productVersion?:'', force: true],
[name: 'projectName', value: config.whitesource.projectName, force: true],
[name: 'projectVersion', value: config.whitesource.productVersion?:'', force: true],
[name: 'productToken', value: config.whitesource.productToken, omitIfPresent: 'projectToken', force: true],
[name: 'userKey', value: config.whitesource.userKey, force: true],
[name: 'forceUpdate', value: true, force: true],
[name: 'offline', value: false, force: true],
[name: 'ignoreSourceFiles', value: true, force: true],
[name: 'resolveAllDependencies', value: false, force: true],
[name: 'failErrorLevel', value: 'ALL', force: true],
[name: 'case.sensitive.glob', value: false],
[name: 'followSymbolicLinks', value: true]
]
switch (config.scanType) {
case 'pip':
mapping += [
[name: 'python.resolveDependencies', value: true, force: true],
[name: 'python.ignoreSourceFiles', value: true, force: true],
[name: 'python.ignorePipInstallErrors', value: false],
[name: 'python.installVirtualenv', value: true],
[name: 'python.resolveHierarchyTree', value: true],
[name: 'python.requirementsFileIncludes', value: 'requirements.txt'],
[name: 'python.resolveSetupPyFiles', value: true],
[name: 'python.runPipenvPreStep', value: true],
[name: 'python.pipenvDevDependencies', value: true],
[name: 'python.IgnorePipenvInstallErrors', value: false],
[name: 'includes', value: '**/*.py **/*.txt'],
[name: 'excludes', value: '**/*sources.jar **/*javadoc.jar']
]
break
case 'sbt':
mapping += [
[name: 'sbt.resolveDependencies', value: true, force: true],
[name: 'sbt.ignoreSourceFiles', value: true, force: true],
[name: 'sbt.aggregateModules', value: false, force: true],
[name: 'sbt.runPreStep', value: true],
[name: 'includes', value: '**/*.jar'],
[name: 'excludes', value: '**/*sources.jar **/*javadoc.jar']
]
break
case 'golang':
mapping += [
[name: 'go.resolveDependencies', value: true, force: true],
[name: 'go.ignoreSourceFiles', value: true, force: true],
[name: 'go.collectDependenciesAtRuntime', value: false],
[name: 'go.dependencyManager', value: 'dep'],
[name: 'includes', value: '**/*.lock'],
[name: 'excludes', value: '**/*sources.jar **/*javadoc.jar']
]
break
default:
script.echo "[Warning][Whitesource] Configuration for scanType: '${config.scanType}' is not yet hardened, please do a quality assessment of your scan results."
}
rewriteConfiguration(script, utils, config, mapping, suffix, path, inputFile, targetFile, parsingClosure, serializationClosure)
}
static private def rewriteConfiguration(script, utils, config, mapping, suffix, path, inputFile, targetFile, parsingClosure, serializationClosure) {
def inputFilePath = "${path}${inputFile}"
def outputFilePath = "${path}${targetFile}"
def moduleSpecificFile = parsingClosure(inputFilePath)
if (!moduleSpecificFile && inputFilePath != config.whitesource.configFilePath)
moduleSpecificFile = parsingClosure(config.whitesource.configFilePath)
if (!moduleSpecificFile)
moduleSpecificFile = [:]
mapping.each {
entry ->
def dependentValue = entry.omitIfPresent ? moduleSpecificFile[entry.omitIfPresent] : null
if ((entry.omitIfPresent && !dependentValue || !entry.omitIfPresent) && (entry.force || moduleSpecificFile[entry.name] == null) && entry.value != 'null')
moduleSpecificFile[entry.name] = entry.value.toString()
}
def output = serializationClosure(moduleSpecificFile)
if(config.verbose)
script.echo "Writing config file ${outputFilePath} with content:\n${output}"
script.writeFile file: outputFilePath, text: output
if(config.stashContent && config.stashContent.size() > 0) {
def stashName = "modified whitesource config ${suffix}".toString()
utils.stashWithMessage (
stashName,
"Stashing modified Whitesource configuration",
outputFilePath.replaceFirst('\\./', '')
)
config.stashContent += [stashName]
}
config.whitesource.configFilePath = outputFilePath
}
@NonCPS
static private def serializeUAConfig(configuration) {
Properties p = new Properties()
p.putAll(configuration)
new StringWriter().with{ w -> p.store(w, null); w }.toString()
}
}

View File

@ -0,0 +1,41 @@
package com.sap.piper.analytics
import com.cloudbees.groovy.cps.NonCPS
class InfluxData implements Serializable{
// each Map in influxCustomDataMap represents a measurement in Influx.
// Additional measurements can be added as a new Map entry of influxCustomDataMap
protected Map fields = [jenkins_custom_data: [:], pipeline_data: [:], step_data: [:]]
// each Map in influxCustomDataMapTags represents tags for certain measurement in Influx.
// Tags are required in Influx for easier querying data
protected Map tags = [jenkins_custom_data: [:], pipeline_data: [:], step_data: [:]]
public Map getFields(){ return fields }
public Map getTags(){ return tags }
protected static InfluxData instance
@NonCPS
public static InfluxData getInstance(){
if(!instance) instance = new InfluxData()
return instance
}
public static void addField(String measurement, String key, value) {
add(getInstance().getFields(), measurement, key, value)
}
public static void addTag(String measurement, String key, value) {
add(getInstance().getTags(), measurement, key, value)
}
protected static void add(Map dataMap, String measurement, String field, value) {
if (!dataMap[measurement]) dataMap[measurement] = [:]
dataMap[measurement][field] = value
}
public static void reset(){
instance = null
}
}

View File

@ -1,7 +1,8 @@
package com.sap.piper.analytics
import com.cloudbees.groovy.cps.NonCPS
import org.jenkinsci.plugins.workflow.steps.MissingContextVariableException
import org.jenkinsci.plugins.workflow.steps.FlowInterruptedException
class Telemetry implements Serializable{
@ -45,53 +46,39 @@ class Telemetry implements Serializable{
}
protected static void piperOsDefaultReporting(Script steps, Map payload) {
def swaEndpoint = 'https://webanalytics.cfapps.eu10.hana.ondemand.com/tracker/log'
Map swaPayload = [
'idsite': '827e8025-1e21-ae84-c3a3-3f62b70b0130',
'url': 'https://github.com/SAP/jenkins-library',
'action_name': payload.actionName,
'event_type': payload.eventType,
'custom3': payload.step, // custom3 = step name (passed as parameter step)
'custom4': payload.jobUrlSha1, // custom4 = job url hashed (calculated)
'custom5': payload.buildUrlSha1, // custom5 = build url hashed (calculated)
'custom10': payload.stageName // custom10 = stage name
]
// step related parameters
for(def key : [1, 2, 3, 4, 5]){ // custom11 - custom15 = step related parameter 1 - 5 (passed as parameter stepParam1 - stepParam5)
if (payload["stepParam${key}"] != null) swaPayload.put("custom1${key}", payload["stepParam${key}"])
}
try {
def swaCustom = [:]
/* SWA custom parameters:
custom3 = step name (passed as parameter step)
custom4 = job url hashed (calculated)
custom5 = build url hashed (calculated)
custom10 = stage name
custom11 = step related parameter 1 (passed as parameter stepParam1)
custom12 = step related parameter 2 (passed as parameter stepParam2)
custom13 = step related parameter 3 (passed as parameter stepParam3)
custom14 = step related parameter 4 (passed as parameter stepParam4)
custom15 = step related parameter 5 (passed as parameter stepParam5)
*/
def swaUrl = 'https://webanalytics.cfapps.eu10.hana.ondemand.com/tracker/log'
def idsite = '827e8025-1e21-ae84-c3a3-3f62b70b0130'
def url = 'https://github.com/SAP/jenkins-library'
swaCustom.custom3 = payload.step
swaCustom.custom4 = payload.jobUrlSha1
swaCustom.custom5 = payload.buildUrlSha1
swaCustom.custom10 = payload.stageName
swaCustom.custom11 = payload.stepParam1
swaCustom.custom12 = payload.stepParam2
swaCustom.custom13 = payload.stepParam3
swaCustom.custom14 = payload.stepParam4
swaCustom.custom15 = payload.stepParam5
def options = []
options.push("-G")
options.push("-v \"${swaUrl}\"")
options.push("--data-urlencode \"action_name=${payload.actionName}\"")
options.push("--data-urlencode \"idsite=${idsite}\"")
options.push("--data-urlencode \"url=${url}\"")
options.push("--data-urlencode \"event_type=${payload.eventType}\"")
for(def key : ['custom3', 'custom4', 'custom5', 'custom10', 'custom11', 'custom12', 'custom13', 'custom14', 'custom15']){
if (swaCustom[key] != null) options.push("--data-urlencode \"${key}=${swaCustom[key]}\"")
steps.timeout(
time: 10,
unit: 'SECONDS'
){
steps.httpRequest(url: "${swaEndpoint}?${getPayloadString(swaPayload)}", timeout: 5, quiet: true)
}
options.push("--connect-timeout 5")
options.push("--max-time 20")
steps.sh(returnStatus: true, script: "#!/bin/sh +x\ncurl ${options.join(' ')} > /dev/null 2>&1 || echo '[${payload.step}] Telemetry Report to SWA failed!'")
} catch (MissingContextVariableException noNode) {
steps.echo "[${payload.step}] Telemetry Report to SWA skipped, no node available!"
} catch (FlowInterruptedException ignore){
// telemetry reporting timed out. This should not break anything though.
steps.echo "[${payload.step}] Telemetry Report with listener failed: timeout"
}
}
@NonCPS
private static String getPayloadString(Map payload){
return payload
.collect { entry -> return "${entry.key}=${URLEncoder.encode(entry.value.toString(), "UTF-8")}" }
.join('&')
}
}

View File

@ -15,7 +15,7 @@ public class StepHelpers {
}
transportRequestId = script.commonPipelineEnvironment.getTransportRequestId()
transportRequestId = script.commonPipelineEnvironment.getValue('transportRequestId')
if(transportRequestId?.trim()) {
script.echo "[INFO] Transport request id '${transportRequestId}' retrieved from common pipeline environment."
@ -33,7 +33,7 @@ public class StepHelpers {
configuration.changeManagement.git.format
)
script.commonPipelineEnvironment.setTransportRequestId(transportRequestId)
script.commonPipelineEnvironment.setValue('transportRequestId', "${transportRequestId}")
script.echo "[INFO] Transport request id '${transportRequestId}' retrieved from commit history"
} catch(ChangeManagementException ex) {

View File

@ -0,0 +1,108 @@
package com.sap.piper.integration
import com.cloudbees.groovy.cps.NonCPS
import com.sap.piper.JsonUtils
class WhitesourceOrgAdminRepository implements Serializable {
final Script script
final internalWhitesource
final Map config
WhitesourceOrgAdminRepository(Script script, Map config) {
this.script = script
this.config = config
if(!this.config.whitesource?.serviceUrl && !this.config.whitesourceAccessor)
script.error "Parameter 'whitesource.serviceUrl' must be provided as part of the configuration."
if(this.config.whitesourceAccessor instanceof String) {
def clazz = this.class.classLoader.loadClass(this.config.whitesourceAccessor)
this.internalWhitesource = clazz?.newInstance(this.script, this.config)
}
}
def fetchProductMetaInfo() {
def requestBody = [
requestType: "getOrganizationProductVitals",
orgToken: config.whitesource.orgToken
]
def parsedResponse = issueHttpRequest(requestBody)
findProductMeta(parsedResponse)
}
def findProductMeta(parsedResponse) {
def foundMetaProduct = null
for (product in parsedResponse.productVitals) {
if (product.name == config.whitesource.productName) {
foundMetaProduct = product
break
}
}
return foundMetaProduct
}
def createProduct() {
def requestBody = [
requestType: "createProduct",
orgToken: config.whitesource.orgToken,
productName: config.whitesource.productName
]
def parsedResponse = issueHttpRequest(requestBody)
def metaInfo = parsedResponse
def groups = []
def users = []
config.whitesource.emailAddressesOfInitialProductAdmins.each {
email -> users.add(["email": email])
}
requestBody = [
"requestType" : "setProductAssignments",
"productToken" : metaInfo.productToken,
"productMembership" : ["userAssignments":[], "groupAssignments":groups],
"productAdmins" : ["userAssignments":users],
"alertsEmailReceivers" : ["userAssignments":[]]
]
issueHttpRequest(requestBody)
return metaInfo
}
def issueHttpRequest(requestBody) {
def response = internalWhitesource ? internalWhitesource.httpWhitesource(requestBody) : httpWhitesource(requestBody)
def parsedResponse = new JsonUtils().jsonStringToGroovyObject(response.content)
if(parsedResponse?.errorCode){
script.error "[WhiteSource] Request failed with error message '${parsedResponse.errorMessage}' (${parsedResponse.errorCode})."
}
return parsedResponse
}
@NonCPS
protected def httpWhitesource(requestBody) {
requestBody["userKey"] = config.whitesource.orgAdminUserKey
def serializedBody = new JsonUtils().groovyObjectToPrettyJsonString(requestBody)
def params = [
url : config.whitesource.serviceUrl,
httpMode : 'POST',
acceptType : 'APPLICATION_JSON',
contentType: 'APPLICATION_JSON',
requestBody: serializedBody,
quiet : !config.verbose,
timeout : config.whitesource.timeout
]
if (script.env.HTTP_PROXY)
params["httpProxy"] = script.env.HTTP_PROXY
if (config.verbose)
script.echo "Sending http request with parameters ${params}"
def response = script.httpRequest(params)
if (config.verbose)
script.echo "Received response ${response}"
return response
}
}

View File

@ -0,0 +1,206 @@
package com.sap.piper.integration
import com.cloudbees.groovy.cps.NonCPS
import com.sap.piper.JsonUtils
class WhitesourceRepository implements Serializable {
final Script script
final Map config
WhitesourceRepository(Script script, Map config) {
this.script = script
this.config = config
if(!config?.whitesource?.serviceUrl)
script.error "Parameter 'whitesource.serviceUrl' must be provided as part of the configuration."
}
List fetchVulnerabilities(whitesourceProjectsMetaInformation) {
def fetchedVulnerabilities = []
if (config.whitesource.projectNames) {
for (int i = 0; i < whitesourceProjectsMetaInformation.size(); i++) {
fetchSecurityAlertsPerItem(whitesourceProjectsMetaInformation[i].token, "getProjectAlertsByType", fetchedVulnerabilities)
}
} else {
fetchSecurityAlertsPerItem(config.whitesource.productToken, "getProductAlertsByType", fetchedVulnerabilities)
}
sortVulnerabilitiesByScore(fetchedVulnerabilities)
return fetchedVulnerabilities
}
private fetchSecurityAlertsPerItem(token, type, List<Object> fetchedVulnerabilities) {
def requestBody = [
requestType : type,
alertType : "SECURITY_VULNERABILITY",
projectToken: token
]
def response = fetchWhitesourceResource(requestBody)
fetchedVulnerabilities.addAll(response.alerts)
}
protected def fetchWhitesourceResource(Map requestBody) {
final def response = httpWhitesource(requestBody)
def parsedResponse = new JsonUtils().jsonStringToGroovyObject(response.content)
if(parsedResponse?.errorCode){
script.error "[WhiteSource] Request failed with error message '${parsedResponse.errorMessage}' (${parsedResponse.errorCode})."
}
return parsedResponse
}
@NonCPS
void sortLibrariesAlphabeticallyGAV(List libraries) {
script.echo "found a total of ${libraries.size()} dependencies (direct and indirect)"
libraries.sort { o1, o2 ->
String groupID1 = o1.groupId
String groupID2 = o2.groupId
def comparisionResult = groupID1 <=> groupID2;
if (comparisionResult != 0) {
comparisionResult
} else {
String artifactID1 = o1.artifactId
String artifactID2 = o2.artifactId
artifactID1 <=> artifactID2
}
}
}
@NonCPS
void sortVulnerabilitiesByScore(List vulnerabilities) {
script.echo "${vulnerabilities.size() > 0 ? 'WARNING: ' : ''}found a total of ${vulnerabilities.size()} vulnerabilities"
vulnerabilities.sort { o1, o2 ->
def cvss3score1 = o1.vulnerability.cvss3_score == 0 ? o1.vulnerability.score : o1.vulnerability.cvss3_score
def cvss3score2 = o2.vulnerability.cvss3_score == 0 ? o2.vulnerability.score : o2.vulnerability.cvss3_score
def comparisionResult = cvss3score1 <=> cvss3score2
if (comparisionResult != 0) {
-comparisionResult
} else {
def score1 = o1.vulnerability.score
def score2 = o2.vulnerability.score
-(score1 <=> score2)
}
}
}
List fetchProjectsMetaInfo() {
def projectsMetaInfo = []
if(config.whitesource.projectNames){
def requestBody = [
requestType: "getProductProjectVitals",
productToken: config.whitesource.productToken
]
def response = fetchWhitesourceResource(requestBody)
if(response?.projectVitals) {
projectsMetaInfo.addAll(findProjectsMeta(response.projectVitals))
} else {
script.error "[WhiteSource] Could not fetch any projects for product '${config.whitesource.productName}' from backend, response was ${response}"
}
}
return projectsMetaInfo
}
List findProjectsMeta(projectVitals) {
def matchedProjects = []
for (int i = 0; i < config.whitesource.projectNames?.size(); i++) {
def requestedProjectName = config.whitesource.projectNames[i].trim()
def matchedProjectInfo = null
for (int j = 0; j < projectVitals.size(); j++) {
def projectResponse = projectVitals[j]
if (projectResponse.name == requestedProjectName) {
matchedProjectInfo = projectResponse
break
}
}
if (matchedProjectInfo != null) {
matchedProjects.add(matchedProjectInfo)
} else {
script.error "[WhiteSource] Could not fetch/find requested project '${requestedProjectName}' for product '${config.whitesource.productName}'"
}
}
return matchedProjects
}
void fetchReportForProduct(reportName) {
def headers = [[name: 'Cache-Control', value: 'no-cache, no-store, must-revalidate'], [name: 'Pragma', value: 'no-cache']]
def requestContent = [
requestType: "getProductRiskReport",
productToken: config.whitesource.productToken
]
//fetchFileFromWhiteSource(reportName, requestContent)
httpWhitesource(requestContent, 'APPLICATION_OCTETSTREAM', headers, reportName)
}
def fetchProductLicenseAlerts() {
def requestContent = [
requestType: "getProductAlertsByType",
alertType: "REJECTED_BY_POLICY_RESOURCE",
productToken: config.whitesource.productToken
]
def parsedResponse = fetchWhitesourceResource(requestContent)
return parsedResponse
}
def fetchProjectLicenseAlerts(String projectToken) {
def requestContent = [
requestType: "getProjectAlertsByType",
alertType: "REJECTED_BY_POLICY_RESOURCE",
projectToken: projectToken
]
def parsedResponse = fetchWhitesourceResource(requestContent)
return parsedResponse
}
@NonCPS
protected def httpWhitesource(requestBody, acceptType = 'APPLICATION_JSON', customHeaders = null, outputFile = null) {
handleAdditionalRequestParameters(requestBody)
def serializedBody = new JsonUtils().groovyObjectToPrettyJsonString(requestBody)
def params = [
url : config.whitesource.serviceUrl,
httpMode : 'POST',
acceptType : acceptType,
contentType: 'APPLICATION_JSON',
requestBody: serializedBody,
quiet : !config.verbose,
timeout : config.whitesource.timeout
]
if(customHeaders) params["customHeaders"] = customHeaders
if (outputFile) params["outputFile"] = outputFile
if (script.env.HTTP_PROXY) params["httpProxy"] = script.env.HTTP_PROXY
if(config.verbose)
script.echo "Sending http request with parameters ${params}"
def response = script.httpRequest(params)
if(config.verbose)
script.echo "Received response ${response}"
return response
}
@NonCPS
protected void handleAdditionalRequestParameters(params) {
if(config.whitesource.userKey)
params["userKey"] = config.whitesource.userKey
}
}

View File

@ -1,122 +0,0 @@
package com.sap.piper.jenkins
import com.cloudbees.groovy.cps.NonCPS
class JenkinsController implements Serializable {
def script
String jenkinsUrl
def timeout
JenkinsController(script, String jenkinsUrl = "http://localhost:8080", timeout = 3600) {
this.script = script
this.jenkinsUrl = jenkinsUrl
this.timeout = timeout
}
def waitForJenkinsStarted() {
def timeout = 120
def timePerLoop = 5
for (int i = 0; i < timeout; i += timePerLoop) {
script.sleep timePerLoop
try {
if (retrieveJenkinsStatus() == 'NORMAL') {
return true
}
} catch (Exception e) {
script.echo "Could not retrieve status for Jenkins at ${jenkinsUrl}/api/json. Message: ${e.getMessage()}. Retrying..."
e.printStackTrace()
continue
}
return false
}
script.error("Timeout: Jenkins did not start within the expected time frame.")
}
private retrieveJenkinsStatus() {
def apiUrl = "${jenkinsUrl}/api/json"
script.echo "Checking Jenkins Status"
def response = getTextFromUrl(apiUrl)
def result = script.readJSON text: response
return result.mode
}
//Trigger scanning of the multi branch builds
def buildJob(String jobName) {
script.sh "curl -s -X POST ${jenkinsUrl}/job/${URLEncoder.encode(jobName, 'UTF-8')}/build"
}
def waitForSuccess(String jobName, String branch) {
if (this.waitForJobStatus(jobName, branch, 'SUCCESS')) {
this.printConsoleText(jobName, branch)
script.echo "Build was successful"
} else {
this.printConsoleText(jobName, branch)
script.error("Build of ${jobName} ${branch} was not successfull")
}
}
def getBuildUrl(String jobName, String branch) {
return "${jenkinsUrl}/job/${URLEncoder.encode(jobName, 'UTF-8')}/job/${URLEncoder.encode(branch, 'UTF-8')}/lastBuild/"
}
def waitForJobStatus(String jobName, String branch, String status) {
def buildUrl = getBuildUrl(jobName, branch)
def timePerLoop = 10
for (int i = 0; i < timeout; i += timePerLoop) {
script.sleep timePerLoop
try {
script.echo "Checking Build Status of ${jobName} ${branch}"
def buildInformation = retrieveBuildInformation(jobName, branch)
if (buildInformation.building) {
script.echo "Build is still in progress"
continue
}
if (buildInformation.result == status) {
return true
}
} catch (Exception e) {
script.echo "Could not retrieve status for ${buildUrl}. Message: ${e.getMessage()}. Retrying..."
continue
}
return false
}
script.error("Timeout: Build of job ${jobName}, branch ${branch} did not finish in the expected time frame.")
}
def getConsoleText(String jobName, String branch) {
def consoleUrl = this.getBuildUrl(jobName, branch) + "/consoleText"
return getTextFromUrl(consoleUrl)
}
def printConsoleText(String jobName, String branch) {
String consoleOutput = getConsoleText(jobName, branch)
script.echo '***********************************************'
script.echo '** Begin Output of Example Application Build **'
script.echo '***********************************************'
script.echo consoleOutput
script.echo '*********************************************'
script.echo '** End Output of Example Application Build **'
script.echo '*********************************************'
}
def retrieveBuildInformation(String jobName, String branch) {
def buildUrl = getBuildUrl(jobName, branch)
def url = "${buildUrl}/api/json"
script.echo "Checking Build Status of ${jobName} ${branch}"
script.echo "${jenkinsUrl}/job/${URLEncoder.encode(jobName, 'UTF-8')}/job/${URLEncoder.encode(branch, 'UTF-8')}/"
def response = getTextFromUrl(url)
def result = script.readJSON text: response
return result
}
@NonCPS
private static String getTextFromUrl(url) {
return new URL(url).getText()
}
}

View File

@ -1,98 +0,0 @@
package com.sap.piper.tools
import com.sap.piper.VersionUtils
import com.sap.piper.EnvironmentUtils
import com.sap.piper.FileUtils
import hudson.AbortException
class JavaArchiveDescriptor implements Serializable {
final name
final environmentKey
final stepConfigurationKey
final version
final versionOption
final javaTool
final javaOptions
JavaArchiveDescriptor(name, environmentKey, stepConfigurationKey, version, versionOption, javaTool, javaOptions = '') {
this.name = name
this.environmentKey = environmentKey
this.stepConfigurationKey = stepConfigurationKey
this.version = version
this.versionOption = versionOption
this.javaTool = javaTool
this.javaOptions = javaOptions
}
def getFile(script, configuration, log = true) {
def javaArchiveFile
if (EnvironmentUtils.isEnvironmentVariable(script, environmentKey)) {
javaArchiveFile = EnvironmentUtils.getEnvironmentVariable(script, environmentKey)
if (log) script.echo "$name file '$javaArchiveFile' retrieved from environment."
if (!isJavaArchiveFile(javaArchiveFile)) {
//script.error "The value '$javaArchiveFile' of the environment variable '$environmentKey' has an unexpected format."
javaArchiveFile += '/mta.jar' // Compatibility code
}
}
else if (configuration.containsKey(stepConfigurationKey)) {
javaArchiveFile = configuration.get(stepConfigurationKey)
if (log) script.echo "$name file '$javaArchiveFile' retrieved from configuration."
if (!isJavaArchiveFile(javaArchiveFile)) {
//script.error "The value '$javaArchiveFile' of the configuration key '$stepConfigurationKey' has an unexpected format."
javaArchiveFile += '/mta.jar' // Compatibility code
}
} else {
throw new AbortException(getMessage())
}
return javaArchiveFile
}
def isJavaArchiveFile(String javaArchiveFile) {
def group = javaArchiveFile =~ /(.+[\/\\])(.+[.]jar)/
if (!group.matches() || group[0].size() == 0) group = javaArchiveFile =~ /(.+[.]jar)/
if (!group.matches() || group[0].size() == 0) return false
return true
}
def getCall(script, configuration, log = true) {
def javaArchiveFile = getFile(script, configuration, log)
if (log) script.echo "Using $name '$javaArchiveFile'."
def javaExecutable = javaTool.getToolExecutable(script, configuration, false)
def javaCall = "$javaExecutable -jar"
if (javaOptions) javaCall += " $javaOptions"
return "$javaCall $javaArchiveFile"
}
def verify(script, configuration) {
verifyFile(script, configuration)
verifyVersion(script, configuration)
}
def verifyFile(script, configuration) {
def javaArchiveFile = getFile(script, configuration, false)
script.echo "Verifying $name '$javaArchiveFile'."
FileUtils.validateFile(script, javaArchiveFile)
script.echo "Verification success. $name '$javaArchiveFile' exists."
}
def verifyVersion(script, configuration) {
def javaArchiveCall = getCall(script, configuration, false)
VersionUtils.verifyVersion(script, name, javaArchiveCall, version, versionOption)
}
def getMessage() {
def configOptions = "Please, configure $name. $name can be set "
if (environmentKey) configOptions += "using the environment variable '$environmentKey'"
if (environmentKey && stepConfigurationKey) configOptions += ", or "
if (stepConfigurationKey) configOptions += "using the configuration key '$stepConfigurationKey'."
return configOptions
}
}

View File

@ -1,135 +0,0 @@
package com.sap.piper.tools
import com.sap.piper.VersionUtils
import com.sap.piper.EnvironmentUtils
import com.sap.piper.FileUtils
import com.sap.piper.Version
import hudson.AbortException
class ToolDescriptor implements Serializable {
final name
final environmentKey
final stepConfigurationKey
final executablePath
final executableName
final singleVersion
final multipleVersions
final versionOption
ToolDescriptor(name, environmentKey, stepConfigurationKey, executablePath, executableName, String singleVersion, versionOption) {
this.name = name
this.environmentKey = environmentKey
this.stepConfigurationKey = stepConfigurationKey
this.executablePath = executablePath
this.executableName = executableName
this.singleVersion = singleVersion
this.multipleVersions = [:]
this.versionOption = versionOption
}
ToolDescriptor(name, environmentKey, stepConfigurationKey, executablePath, executableName, Map multipleVersions, versionOption) {
this.name = name
this.environmentKey = environmentKey
this.stepConfigurationKey = stepConfigurationKey
this.executablePath = executablePath
this.executableName = executableName
this.singleVersion = ''
this.multipleVersions = multipleVersions
this.versionOption = versionOption
}
def getToolLocation(script, configuration, log = true) {
def toolLocation
if (EnvironmentUtils.isEnvironmentVariable(script, environmentKey)) {
toolLocation = EnvironmentUtils.getEnvironmentVariable(script, environmentKey)
if (log) script.echo "$name home '$toolLocation' retrieved from environment."
}
else if (configuration.containsKey(stepConfigurationKey)) {
toolLocation = configuration.get(stepConfigurationKey)
if (log) script.echo "$name home '$toolLocation' retrieved from configuration."
} else if (isOnPath(script, configuration)){
toolLocation = ''
if (log) script.echo "$name is on PATH."
} else {
throw new AbortException(getMessage())
}
return toolLocation
}
def getTool(script, configuration, log = true) {
def toolLocation = getToolLocation(script, configuration, log)
if (toolLocation) {
return "$toolLocation$executablePath$executableName"
} else {
return executableName
}
}
def getToolExecutable(script, configuration, log = true) {
def executable = getTool(script, configuration, log)
if (log) script.echo "Using $name '$executable'."
return executable
}
def verify(script, configuration) {
verifyToolLocation(script, configuration)
verifyToolExecutable(script, configuration)
verifyVersion(script, configuration)
}
def verifyToolLocation(script, configuration) {
def toolLocation = getToolLocation(script, configuration)
if (toolLocation) {
script.echo "Verifying $name location '$toolLocation'."
FileUtils.validateDirectoryIsNotEmpty(script, toolLocation)
script.echo "Verification success. $name location '$toolLocation' exists."
}
}
def verifyToolExecutable(script, configuration) {
def home = getToolLocation(script, configuration, false)
def tool = getTool(script, configuration, false)
if (home) {
script.echo "Verifying $name '$tool'."
FileUtils.validateFile(script, tool)
script.echo "Verification success. $name '$tool' exists."
}
}
def verifyVersion(script, configuration) {
def executable = getToolExecutable(script, configuration, false)
def versionDesc = VersionUtils.getVersionDesc(script, name, executable, versionOption)
if (singleVersion) VersionUtils.verifyVersion(script, name, versionDesc, singleVersion)
if (multipleVersions) VersionUtils.verifyVersion(script, name, versionDesc, multipleVersions)
}
def getMessage() {
def configOptions = "Please, configure $name home. $name home can be set "
if (environmentKey) configOptions += "using the environment variable '$environmentKey', or "
if (stepConfigurationKey) configOptions += "using the configuration key '$stepConfigurationKey', or "
configOptions += "on PATH."
return configOptions
}
def isOnPath(script, configuration) {
def exitStatus
try {
exitStatus = script.sh returnStatus: true, script: """set +x
which $executableName"""
} catch(AbortException e) {
throw new AbortException("The verification of $name failed, while checking if it was on PATH. Reason: $e.message.")
}
return exitStatus == 0
}
}

View File

@ -78,7 +78,7 @@ class ArtifactSetVersionTest extends BasePiperTest {
return closure()
})
shellRule.setReturnValue("date --universal +'%Y%m%d%H%M%S'", '20180101010203')
shellRule.setReturnValue("date --utc +'%Y%m%d%H%M%S'", '20180101010203')
shellRule.setReturnValue('git diff --quiet HEAD', 0)
helper.registerAllowedMethod('fileExists', [String.class], {true})

View File

@ -96,7 +96,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
stageName: 'acceptance',
])
// asserts
assertThat(loggingRule.log, containsString('[cloudFoundryDeploy] General parameters: deployTool=, deployType=standard, cfApiEndpoint=https://api.cf.eu10.hana.ondemand.com, cfOrg=testOrg, cfSpace=testSpace, cfCredentialsId=myCreds, deployUser=testUser'))
assertThat(loggingRule.log, containsString('[cloudFoundryDeploy] General parameters: deployTool=, deployType=standard, cfApiEndpoint=https://api.cf.eu10.hana.ondemand.com, cfOrg=testOrg, cfSpace=testSpace, cfCredentialsId=myCreds'))
}
@Test
@ -125,7 +125,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
stageName: 'acceptance'
])
// asserts
assertThat(loggingRule.log, containsString('[cloudFoundryDeploy] General parameters: deployTool=notAvailable, deployType=standard, cfApiEndpoint=https://api.cf.eu10.hana.ondemand.com, cfOrg=testOrg, cfSpace=testSpace, cfCredentialsId=myCreds, deployUser=testUser'))
assertThat(loggingRule.log, containsString('[cloudFoundryDeploy] General parameters: deployTool=notAvailable, deployType=standard, cfApiEndpoint=https://api.cf.eu10.hana.ondemand.com, cfOrg=testOrg, cfSpace=testSpace, cfCredentialsId=myCreds'))
}
@Test

View File

@ -53,7 +53,7 @@ public class CommonStepsTest extends BasePiperTest{
'piperPipeline',
'prepareDefaultValues',
'setupCommonPipelineEnvironment',
'toolValidate',
'buildSetResult'
]
List steps = getSteps().stream()
@ -104,7 +104,6 @@ public class CommonStepsTest extends BasePiperTest{
}
private static fieldRelatedWhitelist = [
'toolValidate', // step is intended to be configured by other steps
'durationMeasure', // only expects parameters via signature
'prepareDefaultValues', // special step (infrastructure)
'piperPipeline', // special step (infrastructure)
@ -114,7 +113,8 @@ public class CommonStepsTest extends BasePiperTest{
'pipelineExecute', // special step (infrastructure)
'commonPipelineEnvironment', // special step (infrastructure)
'handlePipelineStepErrors', // special step (infrastructure)
'piperStageWrapper' //intended to be called from within stages
'piperStageWrapper', //intended to be called from within stages
'buildSetResult'
]
@Test
@ -173,7 +173,8 @@ public class CommonStepsTest extends BasePiperTest{
def whitelist = [
'commonPipelineEnvironment',
'piperPipeline'
'piperPipeline',
'buildSetResult'
]
def stepsWithWrongStepName = []

View File

@ -0,0 +1,143 @@
#!groovy
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.RuleChain
import util.BasePiperTest
import util.JenkinsCredentialsRule
import util.JenkinsDockerExecuteRule
import util.JenkinsReadYamlRule
import util.JenkinsShellCallRule
import util.JenkinsStepRule
import util.Rules
import static org.hamcrest.CoreMatchers.containsString
import static org.hamcrest.CoreMatchers.is
import static org.hamcrest.Matchers.allOf
import static org.hamcrest.Matchers.hasItem
import static org.hamcrest.Matchers.not
import static org.junit.Assert.assertThat
class DetectExecuteScanTest extends BasePiperTest {
private JenkinsDockerExecuteRule dockerRule = new JenkinsDockerExecuteRule(this)
private JenkinsShellCallRule shellRule = new JenkinsShellCallRule(this)
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
private String detectProperties = ''
@Rule
public RuleChain rules = Rules
.getCommonRules(this)
.around(new JenkinsReadYamlRule(this))
.around(shellRule)
.around(dockerRule)
.around(stepRule)
.around(new JenkinsCredentialsRule(this)
.withCredentials('testCredentials', 'testToken')
)
@Before
void init() {
detectProperties = ''
helper.registerAllowedMethod('synopsys_detect', [String.class], {s ->
detectProperties = s
})
}
@Test
void testDetectDefault() {
stepRule.step.detectExecuteScan([
apiTokenCredentialsId: 'testCredentials',
projectName: 'testProject',
serverUrl: 'https://test.blackducksoftware.com',
juStabUtils: utils,
script: nullScript
])
//ToDo: assert unstashing
assertThat(detectProperties, containsString("--detect.project.name='testProject'"))
assertThat(detectProperties, containsString("--detect.project.version.name='1'"))
assertThat(detectProperties, containsString("--blackduck.url=https://test.blackducksoftware.com"))
assertThat(detectProperties, containsString("--blackduck.api.token=testToken"))
assertThat(detectProperties, containsString("--detect.blackduck.signature.scanner.paths=."))
assertThat(detectProperties, containsString("--blackduck.signature.scanner.memory=4096"))
assertThat(detectProperties, containsString("--blackduck.timeout=6000"))
assertThat(detectProperties, containsString("--blackduck.trust.cert=true"))
assertThat(detectProperties, containsString("--detect.report.timeout=4800"))
}
@Test
void testDetectCustomPaths() {
stepRule.step.detectExecuteScan([
apiTokenCredentialsId: 'testCredentials',
projectName: 'testProject',
scanPaths: ['test1/', 'test2/'],
serverUrl: 'https://test.blackducksoftware.com',
juStabUtils: utils,
script: nullScript
])
assertThat(detectProperties, containsString("--detect.blackduck.signature.scanner.paths=test1/,test2/"))
}
@Test
void testDetectSourceScanOnly() {
stepRule.step.detectExecuteScan([
apiTokenCredentialsId: 'testCredentials',
projectName: 'testProject',
scanners: ['source'],
serverUrl: 'https://test.blackducksoftware.com',
juStabUtils: utils,
script: nullScript
])
assertThat(detectProperties, not(containsString("--detect.blackduck.signature.scanner.paths=.")))
assertThat(detectProperties, containsString("--detect.source.path=."))
}
@Test
void testDetectGolang() {
stepRule.step.detectExecuteScan([
buildTool: 'golang',
apiTokenCredentialsId: 'testCredentials',
projectName: 'testProject',
serverUrl: 'https://test.blackducksoftware.com',
juStabUtils: utils,
script: nullScript
])
assertThat(dockerRule.dockerParams.dockerImage, is('golang:1.12-stretch'))
assertThat(dockerRule.dockerParams.dockerWorkspace, is(''))
assertThat(dockerRule.dockerParams.stashContent, allOf(hasItem('buildDescriptor'),hasItem('checkmarx')))
assertThat(shellRule.shell, hasItem('curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh'))
assertThat(shellRule.shell, hasItem('ln --symbolic $(pwd) $GOPATH/src/hub'))
assertThat(shellRule.shell, hasItem('cd $GOPATH/src/hub && dep ensure'))
}
@Test
void testCustomScanProperties() {
def detectProps = [
'--blackduck.signature.scanner.memory=1024'
]
stepRule.step.detectExecuteScan([
//scanProperties: detectProps,
scanProperties: ['--blackduck.signature.scanner.memory=1024', '--myNewOne'],
apiTokenCredentialsId: 'testCredentials',
projectName: 'testProject',
serverUrl: 'https://test.blackducksoftware.com',
juStabUtils: utils,
script: nullScript
])
assertThat(detectProperties, containsString("--detect.project.name='testProject'"))
assertThat(detectProperties, containsString("--detect.project.version.name='1'"))
assertThat(detectProperties, containsString("--blackduck.signature.scanner.memory=1024"))
assertThat(detectProperties, not(containsString("--blackduck.signature.scanner.memory=4096")))
assertThat(detectProperties, not(containsString("--detect.report.timeout=4800")))
assertThat(detectProperties, containsString("--myNewOne"))
}
}

View File

@ -240,7 +240,7 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
],
containerName: 'mavenexecute',
containerPortMappings: [
'selenium/standalone-chrome': [[containerPort: 4444, hostPort: 4444]]
'selenium/standalone-chrome': [[containerPort: 4444]]
],
containerWorkspaces: [
'selenium/standalone-chrome': ''
@ -263,8 +263,7 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
hasItem('maven:3.5-jdk-8-alpine'),
hasItem('selenium/standalone-chrome'),
))
// assertThat(portList, is(null))
assertThat(portList, hasItem([[name: 'selenium0', containerPort: 4444, hostPort: 4444]]))
assertThat(portList, hasItem([[name: 'selenium0', containerPort: 4444]]))
assertThat(containerCommands.size(), is(1))
assertThat(envList, hasItem(hasItem(allOf(hasEntry('name', 'customEnvKey'), hasEntry ('value','customEnvValue')))))
}

View File

@ -1,10 +1,16 @@
#!groovy
import com.sap.piper.analytics.InfluxData
import org.junit.Rule
import org.junit.Test
import util.BasePiperTest
import static org.junit.Assert.assertTrue
import static org.hamcrest.Matchers.hasKey
import static org.hamcrest.Matchers.is
import static org.hamcrest.Matchers.not
import static org.junit.Assert.assertThat
import org.junit.rules.RuleChain
import util.Rules
@ -27,8 +33,12 @@ class DurationMeasureTest extends BasePiperTest {
stepRule.step.durationMeasure(script: nullScript, measurementName: 'test') {
bodyExecuted = true
}
assertTrue(nullScript.commonPipelineEnvironment.getPipelineMeasurement('test') != null)
assertTrue(bodyExecuted)
// doesnt work
//assertThat(InfluxData.getInstance().getFields(), hasEntry('pipeline_data', hasEntry('test', is(anything()))))
assertThat(InfluxData.getInstance().getFields(), hasKey('pipeline_data'))
assertThat(InfluxData.getInstance().getFields().pipeline_data, hasKey('test'))
assertThat(InfluxData.getInstance().getFields().pipeline_data.test, is(not(null)))
assertThat(bodyExecuted, is(true))
assertJobStatusSuccess()
}
}

View File

@ -72,14 +72,6 @@ class FioriOnCloudPlatformPipelineTest extends BasePiperTest {
// needed since we have dockerExecute inside mtaBuild
JenkinsUtils.metaClass.static.isPluginActive = {def s -> false}
//
// Things we validate:
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*echo \\$JAVA_HOME.*', '/opt/sap/java')
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*echo \\$MTA_JAR_LOCATION.*', '/opt/sap')
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*echo \\$NEO_HOME.*', '/opt/sap/neo')
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, ".*bin/java -version.*", '1.8.0') // the java version
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, ".*bin/java -jar .*mta.jar", '1.36.0') // the mta version
//
// there is a check for the mta.yaml file and for the deployable test.mtar file
helper.registerAllowedMethod('fileExists', [String],{
@ -132,7 +124,7 @@ class FioriOnCloudPlatformPipelineTest extends BasePiperTest {
//
// the mta build call:
assertThat(shellRule.shell, hasItem(
allOf( containsString('java -jar /opt/sap/mta.jar'),
allOf( containsString('java -jar /opt/sap/mta/lib/mta.jar'), // default mtaJarLocation
containsString('--mtar test.mtar'),
containsString('--build-target=NEO'),
containsString('build'))))

View File

@ -1,4 +1,6 @@
#!groovy
import hudson.AbortException
import static org.hamcrest.Matchers.is
import static org.hamcrest.Matchers.not
import static org.hamcrest.Matchers.containsString
@ -81,4 +83,82 @@ class HandlePipelineStepErrorsTest extends BasePiperTest {
assertThat(loggingRule.log, containsString('[something:anything]'))
}
}
@Test
void testHandleErrorsIgnoreFailure() {
def errorOccured = false
try {
stepRule.step.handlePipelineStepErrors([
stepName: 'test',
stepParameters: [jenkinsUtilsStub: jenkinsUtils, script: nullScript],
failOnError: false
]) {
throw new AbortException('TestError')
}
} catch (err) {
errorOccured = true
}
assertThat(errorOccured, is(false))
assertThat(nullScript.currentBuild.result, is('UNSTABLE'))
}
@Test
void testHandleErrorsIgnoreFailureBlacklist() {
def errorOccured = false
//define blacklist in defaults
helper.registerAllowedMethod("readYaml", [Map], { Map m ->
return [steps: [handlePipelineStepErrors: [mandatorySteps: ['step1', 'test']]]]
})
try {
stepRule.step.handlePipelineStepErrors([
stepName: 'test',
stepParameters: [jenkinsUtilsStub: jenkinsUtils, script: nullScript],
failOnError: false
]) {
throw new AbortException('TestError')
}
} catch (err) {
errorOccured = true
}
assertThat(errorOccured, is(true))
}
@Test
void testHandleErrorsIgnoreFailureNoScript() {
def errorOccured = false
try {
stepRule.step.handlePipelineStepErrors([
stepName: 'test',
stepParameters: [jenkinsUtilsStub: jenkinsUtils],
failOnError: false
]) {
throw new AbortException('TestError')
}
} catch (err) {
errorOccured = true
}
assertThat(errorOccured, is(false))
}
@Test
void testHandleErrorsTimeout() {
def timeout = 0
helper.registerAllowedMethod('timeout', [Map.class, Closure.class], {m, body ->
timeout = m.time
throw new org.jenkinsci.plugins.workflow.steps.FlowInterruptedException(hudson.model.Result.ABORTED, new jenkins.model.CauseOfInterruption.UserInterruption('Test'))
})
stepRule.step.handlePipelineStepErrors([
stepName: 'test',
stepParameters: [jenkinsUtilsStub: jenkinsUtils, script: nullScript],
failOnError: false,
stepTimeouts: [test: 10]
]) {
//do something
}
assertThat(timeout, is(10))
assertThat(nullScript.currentBuild.result, is('UNSTABLE'))
}
}

View File

@ -1,5 +1,7 @@
#!groovy
import com.sap.piper.DefaultValueCache
import com.sap.piper.analytics.InfluxData
import org.junit.Before
import org.junit.Rule
import org.junit.Test
@ -147,9 +149,9 @@ class InfluxWriteDataTest extends BasePiperTest {
void testInfluxCustomDataFromCPE() {
nullScript.commonPipelineEnvironment.reset()
nullScript.commonPipelineEnvironment.setArtifactVersion('1.2.3')
nullScript.commonPipelineEnvironment.setInfluxCustomDataTagsEntry('tag1', 'testTag1')
nullScript.commonPipelineEnvironment.setInfluxCustomDataMapEntry('test_data', 'key1', 'keyValue1')
nullScript.commonPipelineEnvironment.setInfluxCustomDataMapTagsEntry('test_data', 'tag1', 'tagValue1')
InfluxData.addTag('jenkins_custom_data', 'tag1', 'testTag1')
InfluxData.addField('test_data', 'key1', 'keyValue1')
InfluxData.addTag('test_data', 'tag1', 'tagValue1')
stepRule.step.influxWriteData(
//juStabUtils: utils,
script: nullScript,

View File

@ -0,0 +1,143 @@
#!groovy
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.ExpectedException
import org.junit.rules.RuleChain
import util.*
import static org.hamcrest.Matchers.*
import static org.junit.Assert.assertThat
class KanikoExecuteTest extends BasePiperTest {
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
private JenkinsShellCallRule shellRule = new JenkinsShellCallRule(this)
private JenkinsReadFileRule readFileRule = new JenkinsReadFileRule(this, 'test/resources/kaniko/')
private JenkinsWriteFileRule writeFileRule = new JenkinsWriteFileRule(this)
private JenkinsDockerExecuteRule dockerExecuteRule = new JenkinsDockerExecuteRule(this)
@Rule
public RuleChain rules = Rules
.getCommonRules(this)
.around(new JenkinsReadYamlRule(this))
.around(shellRule)
.around(readFileRule)
.around(writeFileRule)
.around(dockerExecuteRule)
.around(stepRule)
def fileMap = [:]
@Before
void init() {
binding.variables.env.WORKSPACE = '/path/to/current/workspace'
helper.registerAllowedMethod('file', [Map], { m ->
fileMap = m
return m
})
helper.registerAllowedMethod('withCredentials', [List, Closure], { l, c ->
binding.setProperty(fileMap.variable, 'config.json')
try {
c()
} finally {
binding.setProperty(fileMap.variable, null)
}
})
UUID.metaClass.static.randomUUID = { -> 1}
}
@Test
void testDefaults() {
stepRule.step.kanikoExecute(
script: nullScript
)
assertThat(shellRule.shell, hasItem('#!/busybox/sh rm /kaniko/.docker/config.json'))
assertThat(shellRule.shell, hasItem(allOf(
startsWith('#!/busybox/sh'),
containsString('mv 1-config.json /kaniko/.docker/config.json'),
containsString('/kaniko/executor'),
containsString('--dockerfile /path/to/current/workspace/Dockerfile'),
containsString('--context /path/to/current/workspace'),
containsString('--skip-tls-verify-pull'),
containsString('--no-push')
)))
assertThat(writeFileRule.files.values()[0], is('{"auths":{}}'))
assertThat(dockerExecuteRule.dockerParams, allOf(
hasEntry('containerCommand', '/busybox/tail -f /dev/null'),
hasEntry('containerShell', '/busybox/sh'),
hasEntry('dockerImage', 'gcr.io/kaniko-project/executor:debug'),
hasEntry('dockerOptions', "-u 0 --entrypoint=''")
))
}
@Test
void testCustomDockerCredentials() {
stepRule.step.kanikoExecute(
script: nullScript,
dockerConfigJsonCredentialsId: 'myDockerConfigJson'
)
assertThat(fileMap.credentialsId, is('myDockerConfigJson'))
assertThat(writeFileRule.files.values()[0], allOf(
containsString('docker.my.domain.com:4444'),
containsString('"auth": "myAuth"'),
containsString('"email": "my.user@domain.com"')
))
}
@Test
void testCustomImage() {
stepRule.step.kanikoExecute(
script: nullScript,
containerImageNameAndTag: 'my.docker.registry/path/myImageName:myTag'
)
assertThat(shellRule.shell, hasItem(allOf(
startsWith('#!/busybox/sh'),
containsString('mv 1-config.json /kaniko/.docker/config.json'),
containsString('/kaniko/executor'),
containsString('--dockerfile /path/to/current/workspace/Dockerfile'),
containsString('--context /path/to/current/workspace'),
containsString('--skip-tls-verify-pull'),
containsString('--destination my.docker.registry/path/myImageName:myTag')
)))
}
@Test
void testPreserveDestination() {
stepRule.step.kanikoExecute(
script: nullScript,
containerBuildOptions: '--destination my.docker.registry/path/myImageName:myTag'
)
assertThat(shellRule.shell, hasItem(allOf(
startsWith('#!/busybox/sh'),
containsString('mv 1-config.json /kaniko/.docker/config.json'),
containsString('/kaniko/executor'),
containsString('--dockerfile /path/to/current/workspace/Dockerfile'),
containsString('--context /path/to/current/workspace'),
containsString('--destination my.docker.registry/path/myImageName:myTag')
)))
}
@Test
void testCustomCertificates() {
stepRule.step.kanikoExecute(
script: nullScript,
customTlsCertificateLinks: ['http://link.one', 'http://link.two']
)
assertThat(shellRule.shell, hasItem(allOf(
startsWith('#!/busybox/sh'),
containsString('rm /kaniko/.docker/config.json'),
containsString('wget http://link.one -O - >> /kaniko/ssl/certs/ca-certificates.crt'),
containsString('wget http://link.two -O - >> /kaniko/ssl/certs/ca-certificates.crt'),
)))
}
}

View File

@ -13,19 +13,18 @@ import util.JenkinsLoggingRule
import util.JenkinsReadYamlRule
import util.JenkinsShellCallRule
import util.JenkinsStepRule
import util.JenkinsWriteFileRule
import util.Rules
public class MtaBuildTest extends BasePiperTest {
def toolMtaValidateCalled = false
def toolJavaValidateCalled = false
private ExpectedException thrown = new ExpectedException()
private JenkinsLoggingRule loggingRule = new JenkinsLoggingRule(this)
private JenkinsShellCallRule shellRule = new JenkinsShellCallRule(this)
private JenkinsDockerExecuteRule dockerExecuteRule = new JenkinsDockerExecuteRule(this)
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
private JenkinsReadYamlRule readYamlRule = new JenkinsReadYamlRule(this).registerYaml('mta.yaml', defaultMtaYaml() )
private JenkinsWriteFileRule writeFileRule = new JenkinsWriteFileRule(this)
@Rule
public RuleChain ruleChain = Rules
@ -36,12 +35,15 @@ public class MtaBuildTest extends BasePiperTest {
.around(shellRule)
.around(dockerExecuteRule)
.around(stepRule)
.around(writeFileRule)
@Before
void init() {
helper.registerAllowedMethod('fileExists', [String], { s -> s == 'mta.yaml' })
helper.registerAllowedMethod('httpRequest', [String.class], { s -> new SettingsStub()})
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*\\$MTA_JAR_LOCATION.*', '')
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*\\$JAVA_HOME.*', '')
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*which java.*', 0)
@ -89,9 +91,6 @@ public class MtaBuildTest extends BasePiperTest {
stepRule.step.mtaBuild(script: nullScript, mtaJarLocation: '/mylocation/mta/mta.jar', buildTarget: 'NEO')
assert shellRule.shell.find { c -> c.contains('-jar /mylocation/mta/mta.jar --mtar')}
assert loggingRule.log.contains("SAP Multitarget Application Archive Builder file '/mylocation/mta/mta.jar' retrieved from configuration.")
assert loggingRule.log.contains("Using SAP Multitarget Application Archive Builder '/mylocation/mta/mta.jar'.")
}
@ -130,19 +129,6 @@ public class MtaBuildTest extends BasePiperTest {
}
@Test
void mtaJarLocationFromEnvironmentTest() {
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*\\$MTA_JAR_LOCATION.*', '/env/mta/mta.jar')
stepRule.step.mtaBuild(script: nullScript, buildTarget: 'NEO')
assert shellRule.shell.find { c -> c.contains("-jar /env/mta/mta.jar --mtar")}
assert loggingRule.log.contains("SAP Multitarget Application Archive Builder file '/env/mta/mta.jar' retrieved from environment.")
assert loggingRule.log.contains("Using SAP Multitarget Application Archive Builder '/env/mta/mta.jar'.")
}
@Test
void mtaJarLocationFromCustomStepConfigurationTest() {
@ -151,9 +137,7 @@ public class MtaBuildTest extends BasePiperTest {
stepRule.step.mtaBuild(script: nullScript,
buildTarget: 'NEO')
assert shellRule.shell.find(){ c -> c.contains("-jar /config/mta/mta.jar --mtar")}
assert loggingRule.log.contains("SAP Multitarget Application Archive Builder file '/config/mta/mta.jar' retrieved from configuration.")
assert loggingRule.log.contains("Using SAP Multitarget Application Archive Builder '/config/mta/mta.jar'.")
assert shellRule.shell.find(){ c -> c.contains('java -jar /config/mta/mta.jar --mtar')}
}
@ -163,9 +147,7 @@ public class MtaBuildTest extends BasePiperTest {
stepRule.step.mtaBuild(script: nullScript,
buildTarget: 'NEO')
assert shellRule.shell.find(){ c -> c.contains("-jar /opt/sap/mta/lib/mta.jar --mtar")}
assert loggingRule.log.contains("SAP Multitarget Application Archive Builder file '/opt/sap/mta/lib/mta.jar' retrieved from configuration.")
assert loggingRule.log.contains("Using SAP Multitarget Application Archive Builder '/opt/sap/mta/lib/mta.jar'.")
assert shellRule.shell.find(){ c -> c.contains('java -jar /opt/sap/mta/lib/mta.jar --mtar')}
}
@ -204,6 +186,46 @@ public class MtaBuildTest extends BasePiperTest {
assert 'something' == dockerExecuteRule.dockerParams.dockerOptions
}
@Test
void canConfigureMavenUserSettings() {
stepRule.step.mtaBuild(script: nullScript, projectSettingsFile: 'settings.xml')
assert shellRule.shell.find(){ c -> c.contains('cp settings.xml $HOME/.m2/settings.xml')}
}
@Test
void canConfigureMavenUserSettingsFromRemoteSource() {
stepRule.step.mtaBuild(script: nullScript, projectSettingsFile: 'https://some.host/my-settings.xml')
assert shellRule.shell.find(){ c -> c.contains('cp project-settings.xml $HOME/.m2/settings.xml')}
}
@Test
void canConfigureMavenGlobalSettings() {
stepRule.step.mtaBuild(script: nullScript, globalSettingsFile: 'settings.xml')
assert shellRule.shell.find(){ c -> c.contains('cp settings.xml $M2_HOME/conf/settings.xml')}
}
@Test
void canConfigureNpmRegistry() {
stepRule.step.mtaBuild(script: nullScript, defaultNpmRegistry: 'myNpmRegistry.com')
assert shellRule.shell.find(){ c -> c.contains('npm config set registry myNpmRegistry.com')}
}
@Test
void canConfigureMavenGlobalSettingsFromRemoteSource() {
stepRule.step.mtaBuild(script: nullScript, globalSettingsFile: 'https://some.host/my-settings.xml')
assert shellRule.shell.find(){ c -> c.contains('cp global-settings.xml $M2_HOME/conf/settings.xml')}
}
@Test
void buildTargetFromDefaultStepConfigurationTest() {
@ -297,4 +319,9 @@ public class MtaBuildTest extends BasePiperTest {
'''
}
class SettingsStub {
String getContent() {
return "<xml>sometext</xml>"
}
}
}

View File

@ -0,0 +1,256 @@
import com.sap.piper.JenkinsUtils
import com.sap.piper.Utils
import hudson.AbortException
import org.junit.Assert
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.ExpectedException
import org.junit.rules.RuleChain
import util.*
class MulticloudDeployTest extends BasePiperTest {
private ExpectedException thrown = new ExpectedException().none()
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
private JenkinsMockStepRule neoDeployRule = new JenkinsMockStepRule(this, 'neoDeploy')
private JenkinsMockStepRule cloudFoundryDeployRule = new JenkinsMockStepRule(this, 'cloudFoundryDeploy')
private JenkinsReadMavenPomRule readMavenPomRule = new JenkinsReadMavenPomRule(this, 'test/resources/deploy')
private Map neo1 = [:]
private Map neo2 = [:]
private Map cloudFoundry1 = [:]
private Map cloudFoundry2 = [:]
@Rule
public RuleChain ruleChain = Rules
.getCommonRules(this)
.around(new JenkinsReadYamlRule(this))
.around(thrown)
.around(stepRule)
.around(neoDeployRule)
.around(cloudFoundryDeployRule)
.around(readMavenPomRule)
private Map neoDeployParameters = [:]
private Map cloudFoundryDeployParameters = [:]
@Before
void init() {
neo1 = [
host: 'test.deploy.host1.com',
account: 'trialuser1',
credentialsId: 'credentialsId1'
]
neo2 = [
host: 'test.deploy.host2.com',
account: 'trialuser2',
credentialsId: 'credentialsId2'
]
cloudFoundry1 = [
appName:'testAppName1',
manifest: 'test.yml',
org: 'testOrg1',
space: 'testSpace1',
credentialsId: 'cfCredentialsId1'
]
cloudFoundry2 = [
appName:'testAppName2',
manifest: 'test.yml',
org: 'testOrg2',
space: 'testSpace2',
credentialsId: 'cfCredentialsId2'
]
nullScript.commonPipelineEnvironment.configuration = [
general: [
neoTargets: [
neo1, neo2
],
cfTargets: [
cloudFoundry1, cloudFoundry2
]
],
stages: [
acceptance: [
org: 'testOrg',
space: 'testSpace',
deployUser: 'testUser'
]
],
steps: [
cloudFoundryDeploy: [
deployTool: 'cf_native',
deployType: 'blue-green',
keepOldInstance: true,
cf_native: [
dockerImage: 's4sdk/docker-cf-cli',
dockerWorkspace: '/home/piper'
]
]
]
]
}
@Test
void errorNoTargetsDefined() {
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = []
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
thrown.expect(Exception)
thrown.expectMessage('Deployment skipped because no targets defined!')
stepRule.step.multicloudDeploy(
script: nullScript,
stage: 'test'
)
}
@Test
void errorNoSourceForNeoDeploymentTest() {
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = [neo1]
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
thrown.expect(Exception)
thrown.expectMessage('ERROR - NO VALUE AVAILABLE FOR source')
stepRule.step.multicloudDeploy(
script: nullScript,
stage: 'test'
)
}
@Test
void neoDeploymentTest() {
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = [neo1]
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
stepRule.step.multicloudDeploy(
script: nullScript,
stage: 'test',
source: 'file.mtar'
)
assert neoDeployRule.hasParameter('script', nullScript)
assert neoDeployRule.hasParameter('warAction', 'deploy')
assert neoDeployRule.hasParameter('source', 'file.mtar')
assert neoDeployRule.hasParameter('neo', neo1)
}
@Test
void neoRollingUpdateTest() {
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = []
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
def neoParam = [
host: 'test.param.deploy.host.com',
account: 'trialparamNeoUser',
credentialsId: 'paramNeoCredentialsId'
]
stepRule.step.multicloudDeploy(
script: nullScript,
stage: 'test',
neoTargets: [neoParam],
source: 'file.mtar',
enableZeroDowntimeDeployment: true
)
assert neoDeployRule.hasParameter('script', nullScript)
assert neoDeployRule.hasParameter('warAction', 'rolling-update')
assert neoDeployRule.hasParameter('source', 'file.mtar')
assert neoDeployRule.hasParameter('neo', neoParam)
}
@Test
void cfDeploymentTest() {
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = []
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
def cloudFoundry = [
appName:'paramTestAppName',
manifest: 'test.yml',
org: 'paramTestOrg',
space: 'paramTestSpace',
credentialsId: 'paramCfCredentialsId'
]
stepRule.step.multicloudDeploy([
script: nullScript,
stage: 'acceptance',
cfTargets: [cloudFoundry]
])
assert cloudFoundryDeployRule.hasParameter('script', nullScript)
assert cloudFoundryDeployRule.hasParameter('deployType', 'standard')
assert cloudFoundryDeployRule.hasParameter('cloudFoundry', cloudFoundry)
assert cloudFoundryDeployRule.hasParameter('mtaPath', nullScript.commonPipelineEnvironment.mtarFilePath)
assert cloudFoundryDeployRule.hasParameter('deployTool', 'cf_native')
}
@Test
void cfBlueGreenDeploymentTest() {
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = []
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = [cloudFoundry1]
stepRule.step.multicloudDeploy([
script: nullScript,
stage: 'acceptance',
enableZeroDowntimeDeployment: true
])
assert cloudFoundryDeployRule.hasParameter('script', nullScript)
assert cloudFoundryDeployRule.hasParameter('deployType', 'blue-green')
assert cloudFoundryDeployRule.hasParameter('cloudFoundry', cloudFoundry1)
assert cloudFoundryDeployRule.hasParameter('mtaPath', nullScript.commonPipelineEnvironment.mtarFilePath)
assert cloudFoundryDeployRule.hasParameter('deployTool', 'cf_native')
}
@Test
void multicloudDeploymentTest() {
stepRule.step.multicloudDeploy([
script: nullScript,
stage: 'acceptance',
enableZeroDowntimeDeployment: true,
source: 'file.mtar'
])
assert neoDeployRule.hasParameter('script', nullScript)
assert neoDeployRule.hasParameter('warAction', 'rolling-update')
assert neoDeployRule.hasParameter('source', 'file.mtar')
assert neoDeployRule.hasParameter('neo', neo1)
assert neoDeployRule.hasParameter('script', nullScript)
assert neoDeployRule.hasParameter('warAction', 'rolling-update')
assert neoDeployRule.hasParameter('source', 'file.mtar')
assert neoDeployRule.hasParameter('neo', neo2)
assert cloudFoundryDeployRule.hasParameter('script', nullScript)
assert cloudFoundryDeployRule.hasParameter('deployType', 'blue-green')
assert cloudFoundryDeployRule.hasParameter('cloudFoundry', cloudFoundry1)
assert cloudFoundryDeployRule.hasParameter('mtaPath', nullScript.commonPipelineEnvironment.mtarFilePath)
assert cloudFoundryDeployRule.hasParameter('deployTool', 'cf_native')
assert cloudFoundryDeployRule.hasParameter('script', nullScript)
assert cloudFoundryDeployRule.hasParameter('deployType', 'blue-green')
assert cloudFoundryDeployRule.hasParameter('cloudFoundry', cloudFoundry2)
assert cloudFoundryDeployRule.hasParameter('mtaPath', nullScript.commonPipelineEnvironment.mtarFilePath)
assert cloudFoundryDeployRule.hasParameter('deployTool', 'cf_native')
}
}

View File

@ -6,16 +6,15 @@ import static org.hamcrest.Matchers.containsString
import static org.hamcrest.Matchers.not
import org.hamcrest.Matchers
import org.hamcrest.BaseMatcher
import org.hamcrest.Description
import org.jenkinsci.plugins.credentialsbinding.impl.CredentialNotFoundException
import org.junit.Assert
import org.junit.Before
import org.junit.BeforeClass
import org.junit.ClassRule
import org.junit.Rule
import org.junit.Test
import org.junit.rules.ExpectedException
import org.junit.rules.RuleChain
import org.junit.rules.TemporaryFolder
import util.BasePiperTest
import util.CommandLineMatcher
import util.JenkinsCredentialsRule
@ -27,27 +26,21 @@ import util.JenkinsShellCallRule
import util.JenkinsShellCallRule.Type
import util.JenkinsStepRule
import util.JenkinsWithEnvRule
import util.JenkinsFileExistsRule
import util.Rules
class NeoDeployTest extends BasePiperTest {
def toolJavaValidateCalled = false
@ClassRule
public static TemporaryFolder tmp = new TemporaryFolder()
private ExpectedException thrown = new ExpectedException().none()
private JenkinsLoggingRule loggingRule = new JenkinsLoggingRule(this)
private JenkinsShellCallRule shellRule = new JenkinsShellCallRule(this)
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
private JenkinsLockRule lockRule = new JenkinsLockRule(this)
@Rule
public RuleChain ruleChain = Rules
.getCommonRules(this)
.around(new JenkinsReadYamlRule(this))
.around(new JenkinsPropertiesRule(this, propertiesFileName, configProperties))
.around(new JenkinsPropertiesRule(this, warPropertiesFileName, warProperties))
.around(thrown)
.around(loggingRule)
.around(shellRule)
@ -55,41 +48,27 @@ class NeoDeployTest extends BasePiperTest {
.withCredentials('myCredentialsId', 'anonymous', '********')
.withCredentials('CI_CREDENTIALS_ID', 'defaultUser', '********'))
.around(stepRule)
.around(lockRule)
.around(new JenkinsLockRule(this))
.around(new JenkinsWithEnvRule(this))
.around(new JenkinsFileExistsRule(this, ['warArchive.war', 'archive.mtar', 'war.properties']))
private static workspacePath
private static warArchiveName
private static propertiesFileName
private static archiveName
private static configProperties
private static warArchiveName = 'warArchive.war'
private static warPropertiesFileName = 'war.properties'
private static archiveName = 'archive.mtar'
private static warProperties
@BeforeClass
static void createTestFiles() {
workspacePath = "${tmp.getRoot()}"
warArchiveName = 'warArchive.war'
propertiesFileName = 'config.properties'
archiveName = 'archive.mtar'
configProperties = new Properties()
configProperties.put('account', 'trialuser123')
configProperties.put('host', 'test.deploy.host.com')
configProperties.put('application', 'testApp')
tmp.newFile(warArchiveName) << 'dummy war archive'
tmp.newFile(propertiesFileName) << 'dummy properties file'
tmp.newFile(archiveName) << 'dummy archive'
}
@Before
void init() {
warProperties = new Properties()
warProperties.put('account', 'trialuser123')
warProperties.put('host', 'test.deploy.host.com')
warProperties.put('application', 'testApp')
helper.registerAllowedMethod('dockerExecute', [Map, Closure], null)
helper.registerAllowedMethod('fileExists', [String], { s -> return new File(workspacePath, s).exists() })
helper.registerAllowedMethod('pwd', [], { return workspacePath })
helper.registerAllowedMethod('pwd', [], { return './' })
nullScript.commonPipelineEnvironment.configuration = [steps: [neoDeploy: [neo: [host: 'test.deploy.host.com', account: 'trialuser123']]]]
}
@ -97,14 +76,6 @@ class NeoDeployTest extends BasePiperTest {
@Test
void straightForwardTestConfigViaParameters() {
boolean notifyOldConfigFrameworkUsed = true
def utils = new Utils() {
void pushToSWA(Map parameters, Map config) {
notifyOldConfigFrameworkUsed = parameters.stepParam4
}
}
stepRule.step.neoDeploy(script: nullScript,
source: archiveName,
neo:[credentialsId: 'myCredentialsId'],
@ -119,8 +90,6 @@ class NeoDeployTest extends BasePiperTest {
.hasSingleQuotedOption('user', 'anonymous')
.hasSingleQuotedOption('password', '\\*\\*\\*\\*\\*\\*\\*\\*')
.hasSingleQuotedOption('source', '.*'))
assert !notifyOldConfigFrameworkUsed
}
@Test
@ -152,7 +121,9 @@ class NeoDeployTest extends BasePiperTest {
@Test
void archivePathFromCPETest() {
nullScript.commonPipelineEnvironment.setMtarFilePath('archive.mtar')
stepRule.step.neoDeploy(script: nullScript)
Assert.assertThat(shellRule.shell,
@ -162,7 +133,9 @@ class NeoDeployTest extends BasePiperTest {
@Test
void archivePathFromParamsHasHigherPrecedenceThanCPETest() {
nullScript.commonPipelineEnvironment.setMtarFilePath('archive2.mtar')
stepRule.step.neoDeploy(script: nullScript,
source: "archive.mtar")
@ -410,17 +383,13 @@ class NeoDeployTest extends BasePiperTest {
deployMode: 'warPropertiesFile',
warAction: 'deploy',
neo: [
propertiesFile: propertiesFileName,
application: 'testApp',
runtime: 'neo-javaee6-wp',
runtimeVersion: '2.125',
size: 'lite'
propertiesFile: warPropertiesFileName
]
)
Assert.assertThat(shellRule.shell,
new CommandLineMatcher().hasProlog("neo.sh deploy")
.hasArgument("config.properties")
.hasArgument('war.properties')
.hasSingleQuotedOption('user', 'defaultUser')
.hasSingleQuotedOption('password', '\\*\\*\\*\\*\\*\\*\\*\\*')
.hasSingleQuotedOption('source', '.*\\.war'))
@ -436,16 +405,12 @@ class NeoDeployTest extends BasePiperTest {
deployMode: 'warPropertiesFile',
warAction: 'rolling-update',
neo: [
propertiesFile: propertiesFileName,
application: 'testApp',
runtime: 'neo-javaee6-wp',
runtimeVersion: '2.125',
size: 'lite'
propertiesFile: warPropertiesFileName
])
Assert.assertThat(shellRule.shell,
new CommandLineMatcher().hasProlog("neo.sh rolling-update")
.hasArgument('config.properties')
.hasArgument('war.properties')
.hasSingleQuotedOption('user', 'defaultUser')
.hasSingleQuotedOption('password', '\\*\\*\\*\\*\\*\\*\\*\\*')
.hasSingleQuotedOption('source', '.*\\.war'))
@ -486,4 +451,70 @@ class NeoDeployTest extends BasePiperTest {
size: 'lite'
])
}
@Test
void dontSwallowExceptionWhenUnableToProvideLogsTest() {
thrown.expect(AbortException)
thrown.expectMessage('Something went wrong during neo deployment')
thrown.expect(new BaseMatcher() {
def expectedException = AbortException
def expectedText = 'Cannot provide logs.'
boolean matches(def ex) {
def suppressed = ex.getSuppressed()
return (suppressed.size() == 1 &&
suppressed[0] in expectedException &&
suppressed[0].message == expectedText)
}
void describeTo(Description d) {
d.appendText(" a suppressed ${expectedException} with message ${expectedText}.")
}
})
loggingRule.expect('Unable to provide the logs.')
helper.registerAllowedMethod('fileExists', [String],
{ f ->
f == 'archive.mtar'
}
)
helper.registerAllowedMethod('sh', [Map],
{ m ->
if(m.script.toString().contains('neo.sh deploy-mta'))
throw new AbortException('Something went wrong during neo deployment.')
}
)
helper.registerAllowedMethod("sh", [String],
{ cmd ->
if (cmd == 'cat logs/neo/*')
throw new AbortException('Cannot provide logs.')
}
)
stepRule.step.neoDeploy(script: nullScript,
source: archiveName,
neo:[credentialsId: 'myCredentialsId'],
deployMode: 'mta',
utils: utils,
)
}
@Test
void deployModeAsGStringTest() {
Map deployProps = [deployMode: 'warPropertiesFile']
stepRule.step.neoDeploy(script: nullScript,
utils: utils,
neo: [credentialsId: 'myCredentialsId',
propertiesFile: warPropertiesFileName],
deployMode: "$deployProps.deployMode",
source: archiveName)
}
}

View File

@ -11,6 +11,7 @@ import util.JenkinsReadYamlRule
import util.JenkinsStepRule
class PipelineExecuteTest extends BasePiperTest {
private ExpectedException thrown = new ExpectedException().none()
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
@ -27,9 +28,6 @@ class PipelineExecuteTest extends BasePiperTest {
@Before
void init() {
pipelinePath = null
checkoutParameters.clear()
load = null
helper.registerAllowedMethod('deleteDir', [], null)
helper.registerAllowedMethod('checkout', [Map], { m ->
@ -44,17 +42,19 @@ class PipelineExecuteTest extends BasePiperTest {
@Test
void straightForwardTest() {
stepRule.step.pipelineExecute(repoUrl: "https://test.com/myRepo.git")
assert load == "Jenkinsfile"
assert checkoutParameters.branch == 'master'
assert checkoutParameters.repoUrl == "https://test.com/myRepo.git"
assert checkoutParameters.credentialsId == ''
assert checkoutParameters.path == 'Jenkinsfile'
}
@Test
void parameterizeTest() {
stepRule.step.pipelineExecute(repoUrl: "https://test.com/anotherRepo.git",
branch: 'feature',
path: 'path/to/Jenkinsfile',
@ -65,11 +65,11 @@ class PipelineExecuteTest extends BasePiperTest {
assert checkoutParameters.repoUrl == "https://test.com/anotherRepo.git"
assert checkoutParameters.credentialsId == 'abcd1234'
assert checkoutParameters.path == 'path/to/Jenkinsfile'
}
@Test
void noRepoUrlTest() {
thrown.expect(Exception)
thrown.expectMessage("ERROR - NO VALUE AVAILABLE FOR repoUrl")

View File

@ -22,22 +22,6 @@ class PipelineStashFilesAfterBuildTest extends BasePiperTest {
@Test
void testStashAfterBuild() {
helper.registerAllowedMethod("fileExists", [String.class], {
searchTerm ->
return false
})
stepRule.step.pipelineStashFilesAfterBuild(
script: nullScript,
juStabUtils: utils
)
// asserts
assertFalse(loggingRule.log.contains('Stash content: checkmarx'))
assertThat(loggingRule.log, containsString('Stash content: classFiles'))
assertThat(loggingRule.log, containsString('Stash content: sonar'))
}
@Test
void testStashAfterBuildWithCheckmarx() {
helper.registerAllowedMethod("fileExists", [String.class], {
searchTerm ->
return true
@ -52,21 +36,4 @@ class PipelineStashFilesAfterBuildTest extends BasePiperTest {
assertThat(loggingRule.log, containsString('Stash content: classFiles'))
assertThat(loggingRule.log, containsString('Stash content: sonar'))
}
@Test
void testStashAfterBuildWithCheckmarxConfig() {
helper.registerAllowedMethod("fileExists", [String.class], {
searchTerm ->
return true
})
stepRule.step.pipelineStashFilesAfterBuild(
script: [commonPipelineEnvironment: [configuration: [steps: [executeCheckmarxScan: [checkmarxProject: 'TestProject']]]]],
juStabUtils: utils,
)
// asserts
assertThat(loggingRule.log, containsString('Stash content: checkmarx'))
assertThat(loggingRule.log, containsString('Stash content: classFiles'))
assertThat(loggingRule.log, containsString('Stash content: sonar'))
}
}

View File

@ -22,27 +22,7 @@ class PipelineStashFilesBeforeBuildTest extends BasePiperTest {
.around(stepRule)
@Test
void testStashBeforeBuildNoOpa() {
stepRule.step.pipelineStashFilesBeforeBuild(script: nullScript, juStabUtils: utils)
// asserts
assertEquals('mkdir -p gitmetadata', shellRule.shell[0])
assertEquals('cp -rf .git/* gitmetadata', shellRule.shell[1])
assertEquals('chmod -R u+w gitmetadata', shellRule.shell[2])
assertThat(loggingRule.log, containsString('Stash content: buildDescriptor'))
assertThat(loggingRule.log, containsString('Stash content: deployDescriptor'))
assertThat(loggingRule.log, containsString('Stash content: git'))
assertFalse(loggingRule.log.contains('Stash content: opa5'))
assertThat(loggingRule.log, containsString('Stash content: opensourceConfiguration'))
assertThat(loggingRule.log, containsString('Stash content: pipelineConfigAndTests'))
assertThat(loggingRule.log, containsString('Stash content: securityDescriptor'))
assertThat(loggingRule.log, containsString('Stash content: tests'))
}
@Test
void testStashBeforeBuildOpa() {
void testStashBeforeBuild() {
stepRule.step.pipelineStashFilesBeforeBuild(script: nullScript, juStabUtils: utils, runOpaTests: true)
@ -56,4 +36,21 @@ class PipelineStashFilesBeforeBuildTest extends BasePiperTest {
assertThat(loggingRule.log, containsString('Stash content: securityDescriptor'))
assertThat(loggingRule.log, containsString('Stash content: tests'))
}
@Test
void testStashBeforeBuildCustomConfig() {
stepRule.step.pipelineStashFilesBeforeBuild(script: nullScript, juStabUtils: utils, runOpaTests: true, stashIncludes: ['myStash': '**.myTest'])
// asserts
assertThat(loggingRule.log, containsString('Stash content: buildDescriptor'))
assertThat(loggingRule.log, containsString('Stash content: deployDescriptor'))
assertThat(loggingRule.log, containsString('Stash content: git'))
assertThat(loggingRule.log, containsString('Stash content: opa5'))
assertThat(loggingRule.log, containsString('Stash content: opensourceConfiguration'))
assertThat(loggingRule.log, containsString('Stash content: pipelineConfigAndTests'))
assertThat(loggingRule.log, containsString('Stash content: securityDescriptor'))
assertThat(loggingRule.log, containsString('Stash content: tests'))
assertThat(loggingRule.log, containsString('Stash content: myStash'))
}
}

View File

@ -0,0 +1,122 @@
#!groovy
package stages
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.ExpectedException
import org.junit.rules.RuleChain
import util.BasePiperTest
import util.JenkinsLoggingRule
import util.JenkinsReadYamlRule
import util.JenkinsStepRule
import util.Rules
import static org.hamcrest.Matchers.*
import static org.junit.Assert.assertThat
class PiperPipelineStageInitTest extends BasePiperTest {
private JenkinsStepRule jsr = new JenkinsStepRule(this)
private JenkinsLoggingRule jlr = new JenkinsLoggingRule(this)
private ExpectedException thrown = ExpectedException.none()
@Rule
public RuleChain rules = Rules
.getCommonRules(this)
.around(new JenkinsReadYamlRule(this))
.around(thrown)
.around(jlr)
.around(jsr)
private List stepsCalled = []
@Before
void init() {
binding.variables.env.STAGE_NAME = 'Init'
binding.setVariable('scm', {})
helper.registerAllowedMethod('deleteDir', [], null)
helper.registerAllowedMethod("findFiles", [Map.class], { map ->
switch (map.glob) {
case 'pom.xml':
return [new File('pom.xml')].toArray()
default:
return [].toArray()
}
})
helper.registerAllowedMethod('piperStageWrapper', [Map.class, Closure.class], {m, body ->
assertThat(m.stageName, is('Init'))
return body()
})
helper.registerAllowedMethod('checkout', [Closure.class], {c ->
stepsCalled.add('checkout')
return [
GIT_COMMIT: 'abcdef12345',
GIT_URL: 'some.url'
]
})
helper.registerAllowedMethod('setupCommonPipelineEnvironment', [Map.class], {m -> stepsCalled.add('setupCommonPipelineEnvironment')})
helper.registerAllowedMethod('piperInitRunStageConfiguration', [Map.class], {m -> stepsCalled.add('piperInitRunStageConfiguration')})
helper.registerAllowedMethod('slackSendNotification', [Map.class], {m -> stepsCalled.add('slackSendNotification')})
helper.registerAllowedMethod('artifactSetVersion', [Map.class], {m -> stepsCalled.add('artifactSetVersion')})
helper.registerAllowedMethod('pipelineStashFilesBeforeBuild', [Map.class], {m -> stepsCalled.add('pipelineStashFilesBeforeBuild')})
}
@Test
void testInitNoBuildTool() {
thrown.expectMessage('ERROR - NO VALUE AVAILABLE FOR buildTool')
jsr.step.piperPipelineStageInit(script: nullScript, juStabUtils: utils)
}
@Test
void testInitBuildToolDoesNotMatchProject() {
thrown.expect(hudson.AbortException)
thrown.expectMessage(containsString("buildTool configuration 'npm' does not fit to your project"))
jsr.step.piperPipelineStageInit(script: nullScript, juStabUtils: utils, buildTool: 'npm')
}
@Test
void testInitDefault() {
jsr.step.piperPipelineStageInit(script: nullScript, juStabUtils: utils, buildTool: 'maven')
assertThat(stepsCalled, hasItems(
'checkout',
'setupCommonPipelineEnvironment',
'piperInitRunStageConfiguration',
'artifactSetVersion',
'pipelineStashFilesBeforeBuild'
))
assertThat(stepsCalled, not(hasItems('slackSendNotification')))
}
@Test
void testInitNotOnProductiveBranch() {
binding.variables.env.BRANCH_NAME = 'anyOtherBranch'
jsr.step.piperPipelineStageInit(script: nullScript, juStabUtils: utils, buildTool: 'maven')
assertThat(stepsCalled, hasItems(
'checkout',
'setupCommonPipelineEnvironment',
'piperInitRunStageConfiguration',
'pipelineStashFilesBeforeBuild'
))
assertThat(stepsCalled, not(hasItems('artifactSetVersion')))
}
@Test
void testInitWithSlackNotification() {
nullScript.commonPipelineEnvironment.configuration = [runStep: [Init: [slackSendNotification: true]]]
jsr.step.piperPipelineStageInit(script: nullScript, juStabUtils: utils, buildTool: 'maven')
assertThat(stepsCalled, hasItems(
'checkout',
'setupCommonPipelineEnvironment',
'piperInitRunStageConfiguration',
'artifactSetVersion',
'slackSendNotification',
'pipelineStashFilesBeforeBuild'
))
}
}

View File

@ -0,0 +1,69 @@
#!groovy
package stages
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.ExpectedException
import org.junit.rules.RuleChain
import util.BasePiperTest
import util.JenkinsReadYamlRule
import util.JenkinsStepRule
import util.Rules
import static org.hamcrest.Matchers.*
import static org.junit.Assert.assertThat
class PiperPipelineStagePostTest extends BasePiperTest {
private JenkinsStepRule jsr = new JenkinsStepRule(this)
private ExpectedException thrown = ExpectedException.none()
@Rule
public RuleChain rules = Rules
.getCommonRules(this)
.around(new JenkinsReadYamlRule(this))
.around(thrown)
.around(jsr)
private List stepsCalled = []
@Before
void init() {
binding.variables.env.STAGE_NAME = 'Release'
helper.registerAllowedMethod('piperStageWrapper', [Map.class, Closure.class], {m, body ->
assertThat(m.stageName, is('Release'))
return body()
})
helper.registerAllowedMethod('influxWriteData', [Map.class], {m -> stepsCalled.add('influxWriteData')})
helper.registerAllowedMethod('slackSendNotification', [Map.class], {m -> stepsCalled.add('slackSendNotification')})
helper.registerAllowedMethod('mailSendNotification', [Map.class], {m -> stepsCalled.add('mailSendNotification')})
}
@Test
void testPostDefault() {
jsr.step.piperPipelineStagePost(script: nullScript, juStabUtils: utils)
assertThat(stepsCalled, hasItems('influxWriteData','mailSendNotification'))
assertThat(stepsCalled, not(hasItems('slackSendNotification')))
}
@Test
void testPostNotOnProductiveBranch() {
binding.variables.env.BRANCH_NAME = 'anyOtherBranch'
jsr.step.piperPipelineStagePost(script: nullScript, juStabUtils: utils)
assertThat(stepsCalled, hasItems('influxWriteData','mailSendNotification'))
assertThat(stepsCalled, not(hasItems('slackSendNotification')))
}
@Test
void testPostWithSlackNotification() {
nullScript.commonPipelineEnvironment.configuration = [runStep: ['Post Actions': [slackSendNotification: true]]]
jsr.step.piperPipelineStagePost(script: nullScript, juStabUtils: utils)
assertThat(stepsCalled, hasItems('influxWriteData','mailSendNotification','slackSendNotification'))
}
}

View File

@ -0,0 +1,237 @@
import static org.hamcrest.Matchers.containsString
import static org.hamcrest.Matchers.hasItem
import static org.hamcrest.Matchers.is
import static org.hamcrest.Matchers.allOf
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.RuleChain
import org.junit.rules.ExpectedException
import static org.junit.Assert.assertThat
import util.BasePiperTest
import util.JenkinsDockerExecuteRule
import util.JenkinsShellCallRule
import util.JenkinsReadYamlRule
import util.JenkinsStepRule
import util.JenkinsLoggingRule
import util.Rules
class SonarExecuteScanTest extends BasePiperTest {
private ExpectedException thrown = ExpectedException.none()
private JenkinsReadYamlRule readYamlRule = new JenkinsReadYamlRule(this)
private JenkinsStepRule jsr = new JenkinsStepRule(this)
private JenkinsLoggingRule jlr = new JenkinsLoggingRule(this)
private JenkinsShellCallRule jscr = new JenkinsShellCallRule(this)
private JenkinsDockerExecuteRule jedr = new JenkinsDockerExecuteRule(this)
@Rule
public RuleChain rules = Rules
.getCommonRules(this)
.around(readYamlRule)
.around(thrown)
.around(jedr)
.around(jscr)
.around(jlr)
.around(jsr)
def sonarInstance
@Before
void init() throws Exception {
sonarInstance = null
helper.registerAllowedMethod("withSonarQubeEnv", [String.class, Closure.class], { string, closure ->
sonarInstance = string
return closure()
})
helper.registerAllowedMethod("unstash", [String.class], { stashInput -> return []})
helper.registerAllowedMethod("fileExists", [String.class], { file -> return file })
helper.registerAllowedMethod('string', [Map], { m -> m })
helper.registerAllowedMethod('withCredentials', [List, Closure], { l, c ->
try {
binding.setProperty(l[0].variable, 'TOKEN_'+l[0].credentialsId)
c()
} finally {
binding.setProperty(l[0].variable, null)
}
})
nullScript.commonPipelineEnvironment.setArtifactVersion('1.2.3-20180101')
}
@Test
void testWithDefaults() throws Exception {
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils
)
// asserts
assertThat('Sonar instance is not set to the default value', sonarInstance, is('SonarCloud'))
assertThat('Sonar project version is not set to the default value', jscr.shell, hasItem(containsString('sonar-scanner -Dsonar.projectVersion=1')))
assertThat('Docker image is not set to the default value', jedr.dockerParams.dockerImage, is('maven:3.5-jdk-8'))
assertJobStatusSuccess()
}
@Test
void testWithCustomVersion() throws Exception {
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
projectVersion: '2'
)
// asserts
assertThat('Sonar project version is not set to the custom value', jscr.shell, hasItem(containsString('sonar-scanner -Dsonar.projectVersion=2')))
assertJobStatusSuccess()
}
@Test
void testWithCustomOptions() throws Exception {
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
options: '-Dsonar.host.url=localhost'
)
// asserts
assertThat('Sonar options are not set to the custom value', jscr.shell, hasItem(containsString('sonar-scanner -Dsonar.host.url=localhost')))
assertJobStatusSuccess()
}
@Test
void testWithCustomOptionsList() throws Exception {
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
options: ['sonar.host.url=localhost']
)
// asserts
assertThat('Sonar options are not set to the custom value', jscr.shell, hasItem(containsString('sonar-scanner -Dsonar.host.url=localhost')))
assertJobStatusSuccess()
}
@Test
void testWithCustomInstance() throws Exception {
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
instance: 'MySonarInstance'
)
// asserts
assertThat('Sonar instance is not set to the custom value', sonarInstance.toString(), is('MySonarInstance'))
assertJobStatusSuccess()
}
@Test
void testWithPRHandling() throws Exception {
binding.setVariable('env', [
'CHANGE_ID': '42',
'CHANGE_TARGET': 'master',
'BRANCH_NAME': 'feature/anything'
])
nullScript.commonPipelineEnvironment.setGithubOrg('testOrg')
//nullScript.commonPipelineEnvironment.setGithubRepo('testRepo')
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
//githubOrg: 'testOrg',
githubRepo: 'testRepo'
)
// asserts
assertThat(jscr.shell, hasItem(allOf(
containsString('-Dsonar.pullrequest.key=42'),
containsString('-Dsonar.pullrequest.base=master'),
containsString('-Dsonar.pullrequest.branch=feature/anything'),
containsString('-Dsonar.pullrequest.provider=github'),
containsString('-Dsonar.pullrequest.github.repository=testOrg/testRepo')
)))
assertJobStatusSuccess()
}
@Test
void testWithPRHandlingWithoutMandatory() throws Exception {
thrown.expect(Exception)
thrown.expectMessage('ERROR - NO VALUE AVAILABLE FOR githubRepo')
binding.setVariable('env', ['CHANGE_ID': '42'])
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
githubOrg: 'testOrg'
)
// asserts
assertJobStatusFailure()
}
@Test
void testWithLegacyPRHandling() throws Exception {
binding.setVariable('env', ['CHANGE_ID': '42'])
nullScript.commonPipelineEnvironment.setGithubOrg('testOrg')
//nullScript.commonPipelineEnvironment.setGithubRepo('testRepo')
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
legacyPRHandling: true,
githubTokenCredentialsId: 'githubId',
//githubOrg: 'testOrg',
githubRepo: 'testRepo'
)
// asserts
assertThat(jscr.shell, hasItem(allOf(
containsString('-Dsonar.analysis.mode=preview'),
containsString('-Dsonar.github.pullRequest=42'),
containsString('-Dsonar.github.oauth=TOKEN_githubId'),
containsString('-Dsonar.github.repository=testOrg/testRepo')
)))
assertJobStatusSuccess()
}
@Test
void testWithLegacyPRHandlingWithoutMandatory() throws Exception {
thrown.expect(Exception)
thrown.expectMessage('ERROR - NO VALUE AVAILABLE FOR githubTokenCredentialsId')
binding.setVariable('env', ['CHANGE_ID': '42'])
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
legacyPRHandling: true,
githubOrg: 'testOrg',
githubRepo: 'testRepo'
)
// asserts
assertJobStatusFailure()
}
@Test
void testWithSonarAuth() throws Exception {
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
sonarTokenCredentialsId: 'githubId'
)
// asserts
assertThat(jscr.shell, hasItem(containsString('-Dsonar.login=TOKEN_githubId')))
assertJobStatusSuccess()
}
@Test
void testWithSonarCloudOrganization() throws Exception {
jsr.step.sonarExecuteScan(
script: nullScript,
juStabUtils: utils,
organization: 'TestOrg-github'
)
// asserts
assertThat(jscr.shell, hasItem(containsString('-Dsonar.organization=TestOrg-github')))
assertJobStatusSuccess()
}
}

View File

@ -1,270 +0,0 @@
import org.apache.commons.exec.*
import hudson.AbortException
import org.junit.Before
import org.junit.Rule
import org.junit.Test
import org.junit.rules.ExpectedException
import org.junit.rules.RuleChain
import util.BasePiperTest
import util.JenkinsLoggingRule
import util.JenkinsReadYamlRule
import util.JenkinsStepRule
import util.Rules
class ToolValidateTest extends BasePiperTest {
private ExpectedException thrown = new ExpectedException().none()
private JenkinsLoggingRule loggingRule = new JenkinsLoggingRule(this)
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
@Rule
public RuleChain ruleChain = Rules
.getCommonRules(this)
.around(new JenkinsReadYamlRule(this))
.around(thrown)
.around(loggingRule)
.around(stepRule)
def home = 'home'
@Test
void nullHomeTest() {
thrown.expect(IllegalArgumentException)
thrown.expectMessage("The parameter 'home' can not be null or empty.")
stepRule.step.toolValidate(tool: 'java')
}
@Test
void emptyHomeTest() {
thrown.expect(IllegalArgumentException)
thrown.expectMessage("The parameter 'home' can not be null or empty.")
stepRule.step.toolValidate(tool: 'java', home: '')
}
@Test
void nullToolTest() {
helper.registerAllowedMethod('sh', [Map], { Map m -> return 0 })
thrown.expect(IllegalArgumentException)
thrown.expectMessage("The parameter 'tool' can not be null or empty.")
stepRule.step.toolValidate(tool: null, home: home)
}
@Test
void emptyToolTest() {
helper.registerAllowedMethod('sh', [Map], { Map m -> return 0 })
thrown.expect(IllegalArgumentException)
thrown.expectMessage("The parameter 'tool' can not be null or empty.")
stepRule.step.toolValidate(tool: '', home: home)
}
@Test
void invalidToolTest() {
helper.registerAllowedMethod('sh', [Map], { Map m -> return 0 })
thrown.expect(AbortException)
thrown.expectMessage("The tool 'test' is not supported.")
stepRule.step.toolValidate(tool: 'test', home: home)
}
@Test
void unableToValidateJavaTest() {
thrown.expect(AbortException)
thrown.expectMessage('The verification of Java failed.')
helper.registerAllowedMethod('sh', [Map], { Map m -> getNoVersion(m) })
stepRule.step.toolValidate(tool: 'java', home: home)
}
@Test
void unableToValidateMtaTest() {
thrown.expect(AbortException)
thrown.expectMessage('The verification of SAP Multitarget Application Archive Builder failed.')
helper.registerAllowedMethod('sh', [Map], { Map m -> getNoVersion(m) })
stepRule.step.toolValidate(tool: 'mta', home: home)
}
@Test
void unableToValidateNeoTest() {
thrown.expect(AbortException)
thrown.expectMessage('The verification of SAP Cloud Platform Console Client failed.')
helper.registerAllowedMethod('sh', [Map], { Map m -> getNoVersion(m) })
stepRule.step.toolValidate(tool: 'neo', home: home)
}
@Test
void unableToValidateCmTest() {
thrown.expect(AbortException)
thrown.expectMessage('The verification of Change Management Command Line Interface failed.')
helper.registerAllowedMethod('sh', [Map], { Map m -> getNoVersion(m) })
stepRule.step.toolValidate(tool: 'cm', home: home)
}
@Test
void validateIncompatibleVersionJavaTest() {
thrown.expect(AbortException)
thrown.expectMessage('The installed version of Java is 1.7.0.')
helper.registerAllowedMethod('sh', [Map], { Map m -> getIncompatibleVersion(m) })
stepRule.step.toolValidate(tool: 'java', home: home)
}
@Test
void validateIncompatibleVersionMtaTest() {
thrown.expect(AbortException)
thrown.expectMessage('The installed version of SAP Multitarget Application Archive Builder is 1.0.5.')
helper.registerAllowedMethod('sh', [Map], { Map m -> getIncompatibleVersion(m) })
stepRule.step.toolValidate(tool: 'mta', home: home)
}
@Test
void validateCmIncompatibleVersionTest() {
thrown.expect(AbortException)
thrown.expectMessage('The installed version of Change Management Command Line Interface is 0.0.0.')
helper.registerAllowedMethod('sh', [Map], { Map m -> getIncompatibleVersion(m) })
binding.setVariable('tool', 'cm')
stepRule.step.toolValidate(tool: 'cm', home: home)
}
@Test
void validateJavaTest() {
helper.registerAllowedMethod('sh', [Map], { Map m -> getVersion(m) })
stepRule.step.toolValidate(tool: 'java', home: home)
assert loggingRule.log.contains('Verifying Java version 1.8.0 or compatible version.')
assert loggingRule.log.contains('Java version 1.8.0 is installed.')
}
@Test
void validateMtaTest() {
helper.registerAllowedMethod('sh', [Map], { Map m -> getVersion(m) })
stepRule.step.toolValidate(tool: 'mta', home: home)
assert loggingRule.log.contains('Verifying SAP Multitarget Application Archive Builder version 1.0.6 or compatible version.')
assert loggingRule.log.contains('SAP Multitarget Application Archive Builder version 1.0.6 is installed.')
}
@Test
void validateNeoTest() {
helper.registerAllowedMethod('sh', [Map], { Map m -> getVersion(m) })
stepRule.step.toolValidate(tool: 'neo', home: home)
}
@Test
void validateCmTest() {
helper.registerAllowedMethod('sh', [Map], { Map m -> getVersion(m) })
stepRule.step.toolValidate(tool: 'cm', home: home)
assert loggingRule.log.contains('Verifying Change Management Command Line Interface version 0.0.1 or compatible version.')
assert loggingRule.log.contains('Change Management Command Line Interface version 0.0.1 is installed.')
}
private getToolHome(Map m) {
if(m.script.contains('JAVA_HOME')) {
return '/env/java'
} else if(m.script.contains('MTA_JAR_LOCATION')) {
return '/env/mta/mta.jar'
} else if(m.script.contains('NEO_HOME')) {
return '/env/neo'
} else if(m.script.contains('CM_CLI_HOME')) {
return '/env/cmclient'
} else {
return 0
}
}
private getNoVersion(Map m) {
if(m.script.contains('java -version')) {
throw new AbortException('script returned exit code 127')
} else if(m.script.contains('mta.jar -v')) {
throw new AbortException('script returned exit code 127')
} else if(m.script.contains('neo.sh version')) {
throw new AbortException('script returned exit code 127')
} else if(m.script.contains('cmclient -v')) {
throw new AbortException('script returned exit code 127')
} else {
return getToolHome(m)
}
}
private getVersion(Map m) {
if(m.script.contains('java -version')) {
return '''openjdk version \"1.8.0_121\"
OpenJDK Runtime Environment (build 1.8.0_121-8u121-b13-1~bpo8+1-b13)
OpenJDK 64-Bit Server VM (build 25.121-b13, mixed mode)'''
} else if(m.script.contains('mta.jar -v')) {
return '1.0.6'
} else if(m.script.contains('neo.sh version')) {
return '''SAP Cloud Platform Console Client
SDK version : 3.39.10
Runtime : neo-java-web'''
} else if(m.script.contains('cmclient -v')) {
return '0.0.1-beta-2 : fc9729964a6acf5c1cad9c6f9cd6469727625a8e'
} else {
return getToolHome(m)
}
}
private getIncompatibleVersion(Map m) {
if(m.script.contains('java -version')) {
return '''openjdk version \"1.7.0_121\"
OpenJDK Runtime Environment (build 1.7.0_121-8u121-b13-1~bpo8+1-b13)
OpenJDK 64-Bit Server VM (build 25.121-b13, mixed mode)'''
} else if(m.script.contains('mta.jar -v')) {
return '1.0.5'
} else if(m.script.contains('neo.sh version')) {
return '''SAP Cloud Platform Console Client
SDK version : 1.126.51
Runtime : neo-java-web'''
} else if(m.script.contains('cmclient -v')) {
return '0.0.0-beta-1 : fc9729964a6acf5c1cad9c6f9cd6469727625a8e'
} else {
return getToolHome(m)
}
}
}

View File

@ -139,7 +139,7 @@ public class TransportRequestCreateTest extends BasePiperTest {
stepRule.step.transportRequestCreate(script: nullScript, changeDocumentId: '001', developmentSystemId: '001', cmUtils: cm)
assert nullScript.commonPipelineEnvironment.getTransportRequestId() == '001'
assert nullScript.commonPipelineEnvironment.getValue('transportRequestId') == '001'
assert result == [
docker: [
image: 'ppiper/cm-client',
@ -191,7 +191,7 @@ public class TransportRequestCreateTest extends BasePiperTest {
changeManagement: [type: 'CTS'],
cmUtils: cm)
assert nullScript.commonPipelineEnvironment.getTransportRequestId() == '001'
assert nullScript.commonPipelineEnvironment.getValue('transportRequestId') == '001'
assert result == [
docker: [
image: 'ppiper/cm-client',
@ -254,7 +254,7 @@ public class TransportRequestCreateTest extends BasePiperTest {
cmUtils: cm,
verbose: true)
assert nullScript.commonPipelineEnvironment.getTransportRequestId() == '001'
assert nullScript.commonPipelineEnvironment.getValue('transportRequestId') == '001'
assert result == [
docker: [
image: 'rfc',

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More