mirror of
https://github.com/SAP/jenkins-library.git
synced 2024-12-12 10:55:20 +02:00
Merge remote-tracking branch 'github/master' into HEAD
This commit is contained in:
commit
fb22711c2a
@ -31,10 +31,10 @@ jobs:
|
||||
- ./cc-test-reporter upload-coverage
|
||||
- name: Consumer Tests
|
||||
if: repo = "SAP/jenkins-library" && ( (type != pull_request && branch =~ /^master$|^it\/.*$/) || (type == pull_request && head_repo = "SAP/jenkins-library" && head_branch =~ /^it\/.*$/) )
|
||||
script: cd consumer-test && chmod +x integrationTestController.sh && ./integrationTestController.sh
|
||||
script: cd consumer-test && groovy consumerTestController.groovy
|
||||
|
||||
- stage: Docs
|
||||
name: Build & Deploy
|
||||
name: Create Documentation
|
||||
install: docker pull squidfunk/mkdocs-material:3.0.4
|
||||
before_script: documentation/bin/createDocu.sh
|
||||
script: docker run --rm -it -v ${TRAVIS_BUILD_DIR}/documentation:/docs squidfunk/mkdocs-material:3.0.4 build --clean --strict
|
||||
|
135
consumer-test/TestRunnerThread.groovy
Normal file
135
consumer-test/TestRunnerThread.groovy
Normal file
@ -0,0 +1,135 @@
|
||||
@Grab('org.yaml:snakeyaml:1.17')
|
||||
|
||||
import org.yaml.snakeyaml.Yaml
|
||||
|
||||
class TestRunnerThread extends Thread {
|
||||
|
||||
static def workspacesRootDir
|
||||
static def libraryVersionUnderTest
|
||||
static def repositoryUnderTest
|
||||
|
||||
Process currentProcess
|
||||
final StringBuilder stdOut = new StringBuilder()
|
||||
final StringBuilder stdErr = new StringBuilder()
|
||||
int lastPrintedStdOutLine = -1
|
||||
public def returnCode = -1
|
||||
public def lastCommand
|
||||
def area
|
||||
def testCase
|
||||
def uniqueName
|
||||
def testCaseRootDir
|
||||
def testCaseWorkspace
|
||||
def testCaseConfig
|
||||
|
||||
TestRunnerThread(File testCaseFile) {
|
||||
// Regex pattern expects a folder structure such as '/rootDir/areaDir/testCase.extension'
|
||||
def testCaseMatches = (testCaseFile.toString() =~
|
||||
/^[\w\-]+\\/([\w\-]+)\\/([\w\-]+)\..*\u0024/)
|
||||
this.area = testCaseMatches[0][1]
|
||||
this.testCase = testCaseMatches[0][2]
|
||||
if (!area || !testCase) {
|
||||
throw new RuntimeException("Expecting file structure '/rootDir/areaDir/testCase.yml' " +
|
||||
"but got '${testCaseFile}'.")
|
||||
}
|
||||
this.uniqueName = "${area}|${testCase}"
|
||||
this.testCaseRootDir = new File("${workspacesRootDir}/${area}/${testCase}")
|
||||
this.testCaseWorkspace = "${testCaseRootDir}/workspace"
|
||||
this.testCaseConfig = new Yaml().load(testCaseFile.text)
|
||||
}
|
||||
|
||||
void run() {
|
||||
println "[INFO] Test case '${uniqueName}' launched."
|
||||
|
||||
if (testCaseRootDir.exists() || !testCaseRootDir.mkdirs()) {
|
||||
throw new RuntimeException("Creation of dir '${testCaseRootDir}' failed.")
|
||||
}
|
||||
executeShell("git clone -b ${testCase} ${testCaseConfig.referenceAppRepoUrl} " +
|
||||
"${testCaseWorkspace}")
|
||||
addJenkinsYmlToWorkspace()
|
||||
setLibraryVersionInJenkinsfile()
|
||||
|
||||
//Commit the changed version because artifactSetVersion expects the git repo not to be dirty
|
||||
executeShell(["git", "-C", "${testCaseWorkspace}", "commit", "--all",
|
||||
'--author="piper-testing-bot <piper-testing-bot@example.com>"',
|
||||
'--message="Set piper lib version for test"'])
|
||||
|
||||
executeShell("docker run -v /var/run/docker.sock:/var/run/docker.sock " +
|
||||
"-v ${System.getenv('PWD')}/${testCaseWorkspace}:/workspace -v /tmp " +
|
||||
"-e CASC_JENKINS_CONFIG=/workspace/jenkins.yml -e CX_INFRA_IT_CF_USERNAME " +
|
||||
"-e CX_INFRA_IT_CF_PASSWORD -e BRANCH_NAME=${testCase} ppiper/jenkinsfile-runner")
|
||||
|
||||
println "*****[INFO] Test case '${uniqueName}' finished successfully.*****"
|
||||
printOutput()
|
||||
}
|
||||
|
||||
// Configure path to library-repository under test in Jenkins config
|
||||
private void addJenkinsYmlToWorkspace() {
|
||||
def sourceFile = 'jenkins.yml'
|
||||
def sourceText = new File(sourceFile).text.replaceAll(
|
||||
'__REPO_SLUG__', repositoryUnderTest)
|
||||
def target = new File("${testCaseWorkspace}/${sourceFile}")
|
||||
target.write(sourceText)
|
||||
}
|
||||
|
||||
// Force usage of library version under test by setting it in the Jenkinsfile,
|
||||
// which is then the first definition and thus has the highest precedence.
|
||||
private void setLibraryVersionInJenkinsfile() {
|
||||
def jenkinsfile = new File("${testCaseWorkspace}/Jenkinsfile")
|
||||
def manipulatedText =
|
||||
"@Library(\"piper-library-os@${libraryVersionUnderTest}\") _\n" +
|
||||
jenkinsfile.text
|
||||
jenkinsfile.write(manipulatedText)
|
||||
}
|
||||
|
||||
private void executeShell(command) {
|
||||
lastCommand = command
|
||||
def startOfCommandString = "Shell command: '${command}'\n"
|
||||
stdOut << startOfCommandString
|
||||
stdErr << startOfCommandString
|
||||
|
||||
currentProcess = command.execute()
|
||||
currentProcess.waitForProcessOutput(stdOut, stdErr)
|
||||
|
||||
returnCode = currentProcess.exitValue()
|
||||
|
||||
currentProcess = null
|
||||
|
||||
if (returnCode > 0) {
|
||||
throw new ReturnCodeNotZeroException("Test case: [${uniqueName}]; " +
|
||||
"shell command '${command} exited with return code '${returnCode}")
|
||||
}
|
||||
}
|
||||
|
||||
void printOutput() {
|
||||
println "\n[INFO] stdout output from test case ${uniqueName}:"
|
||||
stdOut.eachLine { line, i ->
|
||||
println "${i} [${uniqueName}] ${line}"
|
||||
lastPrintedStdOutLine = i
|
||||
}
|
||||
|
||||
println "\n[INFO] stderr output from test case ${uniqueName}:"
|
||||
stdErr.eachLine { line, i ->
|
||||
println "${i} [${uniqueName}] ${line}"
|
||||
}
|
||||
}
|
||||
|
||||
public void printRunningStdOut() {
|
||||
stdOut.eachLine { line, i ->
|
||||
if (i > lastPrintedStdOutLine) {
|
||||
println "${i} [${uniqueName}] ${line}"
|
||||
lastPrintedStdOutLine = i
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return uniqueName
|
||||
}
|
||||
}
|
||||
|
||||
class ReturnCodeNotZeroException extends Exception {
|
||||
ReturnCodeNotZeroException(message) {
|
||||
super(message)
|
||||
}
|
||||
}
|
232
consumer-test/consumerTestController.groovy
Normal file
232
consumer-test/consumerTestController.groovy
Normal file
@ -0,0 +1,232 @@
|
||||
import groovy.io.FileType
|
||||
|
||||
import static groovy.json.JsonOutput.toJson
|
||||
|
||||
COMMIT_HASH = null
|
||||
RUNNING_LOCALLY = false
|
||||
AUXILIARY_SLEEP_MS = 10000
|
||||
START_TIME_MS = System.currentTimeMillis()
|
||||
WORKSPACES_ROOT = 'workspaces'
|
||||
TEST_CASES_DIR = 'testCases'
|
||||
LIBRARY_VERSION_UNDER_TEST = "git log --format=%H -n 1".execute().text.trim()
|
||||
|
||||
EXCLUDED_FROM_CONSUMER_TESTING_REGEXES = [
|
||||
/^documentation\/.*$/,
|
||||
/^.travis.yml$/,
|
||||
/^test\/.*$/
|
||||
]
|
||||
|
||||
|
||||
newEmptyDir(WORKSPACES_ROOT)
|
||||
TestRunnerThread.workspacesRootDir = WORKSPACES_ROOT
|
||||
TestRunnerThread.libraryVersionUnderTest = LIBRARY_VERSION_UNDER_TEST
|
||||
TestRunnerThread.repositoryUnderTest = System.getenv('TRAVIS_REPO_SLUG') ?: 'SAP/jenkins-library'
|
||||
|
||||
def testCaseThreads
|
||||
def cli = new CliBuilder(
|
||||
usage: 'groovy consumerTestController.groovy [<options>]',
|
||||
header: 'Options:',
|
||||
footer: 'If no options are set, all tests are run centrally, i.e. on travisCI.')
|
||||
|
||||
cli.with {
|
||||
h longOpt: 'help', 'Print this help text and exit.'
|
||||
l longOpt: 'run-locally', 'Run consumer tests locally in Docker, i.e. skip reporting of GitHub status.'
|
||||
s longOpt: 'single-test', args: 1, argName: 'filePath', 'Run single test.'
|
||||
}
|
||||
|
||||
def options = cli.parse(args)
|
||||
|
||||
if (options.h) {
|
||||
cli.usage()
|
||||
return
|
||||
}
|
||||
|
||||
if (options.l) {
|
||||
RUNNING_LOCALLY = true
|
||||
}
|
||||
|
||||
if (!RUNNING_LOCALLY) {
|
||||
/*
|
||||
In case the build is performed for a pull request TRAVIS_COMMIT is a merge
|
||||
commit between the base branch and the PR branch HEAD. That commit is actually built.
|
||||
But for notifying about a build status we need the commit which is currently
|
||||
the HEAD of the PR branch.
|
||||
|
||||
In case the build is performed for a simple branch (not associated with a PR)
|
||||
In this case there is no merge commit between any base branch and HEAD of a PR branch.
|
||||
The commit which we need for notifying about a build status is in this case simply
|
||||
TRAVIS_COMMIT itself.
|
||||
*/
|
||||
COMMIT_HASH = System.getenv('TRAVIS_PULL_REQUEST_SHA') ?: System.getenv('TRAVIS_COMMIT')
|
||||
|
||||
if (changeDoesNotNeedConsumerTesting()) {
|
||||
println 'No consumer tests necessary.'
|
||||
notifyGithub("success", "No consumer tests necessary.")
|
||||
return
|
||||
} else {
|
||||
notifyGithub("pending", "Consumer tests are in progress.")
|
||||
}
|
||||
}
|
||||
|
||||
if (!System.getenv('CX_INFRA_IT_CF_USERNAME') || !System.getenv('CX_INFRA_IT_CF_PASSWORD')) {
|
||||
exitPrematurely('Environment variables CX_INFRA_IT_CF_USERNAME and CX_INFRA_IT_CF_PASSWORD need to be set.')
|
||||
}
|
||||
|
||||
if (options.s) {
|
||||
def file = new File(options.s)
|
||||
if (!file.exists()) {
|
||||
exitPrematurely("Test case configuration file '${file}' does not exist. " +
|
||||
"Please provide path to a configuration file of structure '/rootDir/areaDir/testCase.yml'.")
|
||||
}
|
||||
testCaseThreads = [new TestRunnerThread(file)]
|
||||
} else {
|
||||
testCaseThreads = listTestCaseThreads()
|
||||
}
|
||||
|
||||
testCaseThreads.each { it ->
|
||||
it.start()
|
||||
}
|
||||
|
||||
//The thread below will print to console while the test cases are running.
|
||||
//Otherwise the job would be canceled after 10 minutes without output.
|
||||
def done = false
|
||||
Thread.start {
|
||||
def outputWasPrintedPrematurely = false
|
||||
def singleTestCase = (testCaseThreads.size() == 1)
|
||||
if (singleTestCase) {
|
||||
AUXILIARY_SLEEP_MS = 1000 //for a single test case we print the running output every second
|
||||
}
|
||||
for (; ;) {
|
||||
if (singleTestCase) {
|
||||
testCaseThreads[0].printRunningStdOut()
|
||||
} else {
|
||||
println "[INFO] Consumer tests are still running."
|
||||
}
|
||||
|
||||
// Build is killed at 50 min, print log to console at minute 45
|
||||
int MINUTES_SINCE_START = (System.currentTimeMillis() - START_TIME_MS) / (1000 * 60)
|
||||
if (!singleTestCase && MINUTES_SINCE_START > 44 && !outputWasPrintedPrematurely) {
|
||||
testCaseThreads.each { thread ->
|
||||
thread.printOutput()
|
||||
}
|
||||
outputWasPrintedPrematurely = true
|
||||
}
|
||||
|
||||
sleep(AUXILIARY_SLEEP_MS)
|
||||
if (done) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
testCaseThreads.each { it ->
|
||||
it.join()
|
||||
}
|
||||
done = true
|
||||
|
||||
def failedThreads = testCaseThreads.findAll { thread ->
|
||||
thread.returnCode != 0
|
||||
}
|
||||
|
||||
def status
|
||||
def statusMessage
|
||||
if (failedThreads.size() == 0) {
|
||||
status = "success"
|
||||
statusMessage = "All consumer tests finished successfully. Congratulations!"
|
||||
} else {
|
||||
failedThreads.each { failedThread ->
|
||||
println "[ERROR] ${failedThread.uniqueName}: Process execution of command: '${failedThread.lastCommand}' failed. " +
|
||||
"Return code: ${failedThread.returnCode}."
|
||||
failedThread.printOutput()
|
||||
}
|
||||
status = "failure"
|
||||
statusMessage "The following consumer test(s) failed: ${failedThreads}"
|
||||
}
|
||||
|
||||
if (!RUNNING_LOCALLY) {
|
||||
notifyGithub(status, statusMessage)
|
||||
}
|
||||
|
||||
println statusMessage
|
||||
|
||||
if (status == "failure") {
|
||||
System.exit(1)
|
||||
}
|
||||
|
||||
|
||||
def listTestCaseThreads() {
|
||||
//Each dir that includes a yml file is a test case
|
||||
def threads = []
|
||||
new File(TEST_CASES_DIR).traverse(type: FileType.FILES, nameFilter: ~/^.+\.yml\u0024/) { file ->
|
||||
threads << new TestRunnerThread(file)
|
||||
}
|
||||
return threads
|
||||
}
|
||||
|
||||
def notifyGithub(state, description) {
|
||||
println "[INFO] Notifying about state '${state}' for commit '${COMMIT_HASH}'."
|
||||
|
||||
URL url = new URL("https://api.github.com/repos/SAP/jenkins-library/statuses/${COMMIT_HASH}")
|
||||
HttpURLConnection con = (HttpURLConnection) url.openConnection()
|
||||
con.setRequestMethod('POST')
|
||||
con.setRequestProperty("Content-Type", "application/json; utf-8");
|
||||
con.setRequestProperty('User-Agent', 'groovy-script')
|
||||
con.setRequestProperty('Authorization', "token ${System.getenv('INTEGRATION_TEST_VOTING_TOKEN')}")
|
||||
|
||||
def postBody = [
|
||||
state : state,
|
||||
target_url : System.getenv('TRAVIS_BUILD_WEB_URL'),
|
||||
description: description,
|
||||
context : "integration-tests"
|
||||
]
|
||||
|
||||
con.setDoOutput(true)
|
||||
con.getOutputStream().withStream { os ->
|
||||
os.write(toJson(postBody).getBytes("UTF-8"))
|
||||
}
|
||||
|
||||
int responseCode = con.getResponseCode()
|
||||
if (responseCode != HttpURLConnection.HTTP_CREATED) {
|
||||
exitPrematurely("[ERROR] Posting status to github failed. Expected response code " +
|
||||
"'${HttpURLConnection.HTTP_CREATED}', but got '${responseCode}'. " +
|
||||
"Response message: '${con.getResponseMessage()}'",
|
||||
34) // Error code taken from curl: CURLE_HTTP_POST_ERROR
|
||||
}
|
||||
}
|
||||
|
||||
def changeDoesNotNeedConsumerTesting() {
|
||||
if (System.getenv('TRAVIS_BRANCH') == 'master') {
|
||||
return false
|
||||
}
|
||||
|
||||
def excludesRegex = '(' + EXCLUDED_FROM_CONSUMER_TESTING_REGEXES.join('|') + ')'
|
||||
|
||||
"git remote add sap https://github.com/SAP/jenkins-library.git".execute().waitFor()
|
||||
"git fetch sap".execute().waitFor()
|
||||
def diff = "git diff --name-only sap/master ${LIBRARY_VERSION_UNDER_TEST}".execute().text.trim()
|
||||
|
||||
for (def line : diff.readLines()) {
|
||||
if (!(line ==~ excludesRegex)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
static def newEmptyDir(String dirName) {
|
||||
def dir = new File(dirName)
|
||||
if (dir.exists()) {
|
||||
if (!dir.deleteDir()) {
|
||||
exitPrematurely("Deletion of dir '${dirName}' failed.")
|
||||
}
|
||||
}
|
||||
if (!dir.mkdirs()) {
|
||||
exitPrematurely("Creation of dir '${dirName}' failed.")
|
||||
}
|
||||
}
|
||||
|
||||
static def exitPrematurely(String message, int returnCode = 1) {
|
||||
println message
|
||||
System.exit(returnCode)
|
||||
}
|
@ -1,139 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
function fail() {
|
||||
local message="$1"
|
||||
local returnCode=${2:-1}
|
||||
echo "[ERROR] ${message}" >&2
|
||||
exit "${returnCode}"
|
||||
}
|
||||
|
||||
function notify() {
|
||||
|
||||
local state=${1}
|
||||
local description=${2}
|
||||
local hash=${3}
|
||||
|
||||
echo "[INFO] Notifying about state \"${state}\" for commit \"${hash}\"."
|
||||
|
||||
curl -X POST \
|
||||
--fail \
|
||||
--silent \
|
||||
--output /dev/null \
|
||||
--data "{\"state\": \"${state}\", \"target_url\": \"${TRAVIS_BUILD_WEB_URL}\", \"description\": \"${description}\", \"context\": \"integration-tests\"}" \
|
||||
--user "${INTEGRATION_TEST_VOTING_USER}:${INTEGRATION_TEST_VOTING_TOKEN}" \
|
||||
"https://api.github.com/repos/SAP/jenkins-library/statuses/${hash}" || fail "Cannot send notification. curl return code: $?"
|
||||
}
|
||||
|
||||
function cleanup() {
|
||||
[[ -z "${notificationThreadPid}" ]] || kill -PIPE "${notificationThreadPid}" &>/dev/null
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
#
|
||||
# In case the build is performed for a pull request TRAVIS_COMMIT is a merge
|
||||
# commit between the base branch and the PR branch HEAD. That commit is actually built.
|
||||
# But for notifying about a build status we need the commit which is currently
|
||||
# the HEAD of the PR branch.
|
||||
#
|
||||
# In case the build is performed for a simple branch (not associated with a PR)
|
||||
# In this case there is no merge commit between any base branch and HEAD of a PR branch.
|
||||
# The commit which we need for notifying about a build status is in this case simply
|
||||
# TRAVIS_COMMIT itself.
|
||||
#
|
||||
COMMIT_HASH_FOR_STATUS_NOTIFICATIONS="${TRAVIS_PULL_REQUEST_SHA}"
|
||||
[[ -z "${COMMIT_HASH_FOR_STATUS_NOTIFICATIONS}" ]] && COMMIT_HASH_FOR_STATUS_NOTIFICATIONS="${TRAVIS_COMMIT}"
|
||||
|
||||
notify "pending" "Integration tests in progress." "${COMMIT_HASH_FOR_STATUS_NOTIFICATIONS}"
|
||||
|
||||
WORKSPACES_ROOT=workspaces
|
||||
[[ -e "${WORKSPACES_ROOT}" ]] && rm -rf ${WORKSPACES_ROOT}
|
||||
|
||||
TEST_CASES=$(find testCases -name '*.yml')
|
||||
|
||||
# This auxiliary thread is needed in order to produce some output while the
|
||||
# test are running. Otherwise the job will be canceled after 10 minutes without
|
||||
# output.
|
||||
while true; do sleep 10; echo "[INFO] Integration tests still running."; done &
|
||||
notificationThreadPid=$!
|
||||
|
||||
declare -a processes
|
||||
i=0
|
||||
for f in ${TEST_CASES}
|
||||
do
|
||||
testCase=$(basename "${f%.*}")
|
||||
area=$(dirname "${f#*/}")
|
||||
echo "[INFO] Running test case \"${testCase}\" in area \"${area}\"."
|
||||
TEST_CASE_ROOT="${WORKSPACES_ROOT}/${area}/${testCase}"
|
||||
[[ -e "${TEST_CASE_ROOT}" ]] && rm -rf "${TEST_CASE_ROOT}"
|
||||
mkdir -p "${TEST_CASE_ROOT}" || fail "Cannot create test case root directory for test case \"${testCase}\"." 1
|
||||
source ./runTest.sh "${testCase}" "${TEST_CASE_ROOT}" &> "${TEST_CASE_ROOT}/log.txt" &
|
||||
pid=$!
|
||||
processes[$i]="${area}/${testCase}:${pid}"
|
||||
echo "[INFO] Test case \"${testCase}\" in area \"${area}\" launched. (PID: \"${pid}\")."
|
||||
let i=i+1
|
||||
done
|
||||
|
||||
[[ "${i}" == 0 ]] && fail "No tests has been executed." 1
|
||||
|
||||
#
|
||||
# wait for the test cases and cat the log
|
||||
for p in "${processes[@]}"
|
||||
do
|
||||
area=$(dirname "${p%:*}")
|
||||
testCase=$(basename "${p%:*}")
|
||||
processId="${p#*:}"
|
||||
echo "[INFO] Waiting for test case \"${testCase}\" in area \"${area}\" (PID: \"${processId}\")."
|
||||
wait "${processId}"
|
||||
echo "[INFO] Test case \"${testCase}\" in area \"${area}\" finished (PID: \"${processId}\")."
|
||||
done
|
||||
|
||||
kill -PIPE "${notificationThreadPid}" &>/dev/null && notificationThreadPid=""
|
||||
|
||||
#
|
||||
# provide the logs
|
||||
for p in "${processes[@]}"
|
||||
do
|
||||
area=$(dirname "${p%:*}")
|
||||
testCase=$(basename "${p%:*}")
|
||||
TEST_CASE_ROOT="${WORKSPACES_ROOT}/${area}/${testCase}"
|
||||
echo "[INFO] === START === Logs for test case \"${testCase}\" ===."
|
||||
cat "${TEST_CASE_ROOT}/log.txt"
|
||||
echo "[INFO] === END === Logs for test case \"${testCase}\" ===."
|
||||
done
|
||||
|
||||
#
|
||||
# list test case status
|
||||
echo "[INFO] Build status:"
|
||||
failure="false"
|
||||
for p in "${processes[@]}"
|
||||
do
|
||||
status="UNDEFINED"
|
||||
area=$(dirname "${p%:*}")
|
||||
testCase=$(basename "${p%:*}")
|
||||
TEST_CASE_ROOT="${WORKSPACES_ROOT}/${area}/${testCase}"
|
||||
if [[ -f "${TEST_CASE_ROOT}/SUCCESS" ]]
|
||||
then
|
||||
status="SUCCESS"
|
||||
else
|
||||
status="FAILURE"
|
||||
failure="true"
|
||||
fi
|
||||
printf "[INFO] %-30s: %s\n" "${testCase}" "${status}"
|
||||
done
|
||||
|
||||
STATUS_DESCRIPTION="The integration tests failed."
|
||||
STATUS_STATE="failure"
|
||||
|
||||
if [[ "${failure}" == "false" ]]
|
||||
then
|
||||
STATUS_DESCRIPTION="The integration tests succeeded."
|
||||
STATUS_STATE="success"
|
||||
fi
|
||||
|
||||
notify "${STATUS_STATE}" "${STATUS_DESCRIPTION}" "${COMMIT_HASH_FOR_STATUS_NOTIFICATIONS}"
|
||||
|
||||
[[ "${failure}" != "false" ]] && fail "Integration tests failed." 1
|
||||
|
||||
echo "[INFO] Integration tests succeeded."
|
||||
exit 0
|
@ -1,30 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
TEST_CASE=$1
|
||||
TEST_CASE_ROOT=$2
|
||||
TEST_CASE_WORKSPACE="${TEST_CASE_ROOT}/workspace"
|
||||
|
||||
LIBRARY_VERSION_UNDER_TEST=$(git log --format="%H" -n 1)
|
||||
REPOSITORY_UNDER_TEST=${TRAVIS_REPO_SLUG:-SAP/jenkins-library}
|
||||
|
||||
git clone -b "${TEST_CASE}" https://github.com/sap/cloud-s4-sdk-book "${TEST_CASE_WORKSPACE}"
|
||||
cp -f jenkins.yml "${TEST_CASE_WORKSPACE}"
|
||||
cd "${TEST_CASE_WORKSPACE}" || exit 1
|
||||
|
||||
# Configure path to library-repository under test in Jenkins config
|
||||
sed -i -e "s:__REPO_SLUG__:${REPOSITORY_UNDER_TEST}:g" jenkins.yml
|
||||
|
||||
# Force usage of library version under test by setting it in the Jenkinsfile which is then the first definition and thus has the highest precedence
|
||||
echo "@Library(\"piper-library-os@$LIBRARY_VERSION_UNDER_TEST\") _" | cat - Jenkinsfile > temp && mv temp Jenkinsfile
|
||||
|
||||
# Commit the changed version because artifactSetVersion expects the git repo not to be dirty
|
||||
git commit --all --author="piper-testing-bot <piper-testing-bot@example.com>" --message="Set piper lib version for test"
|
||||
|
||||
docker run -v /var/run/docker.sock:/var/run/docker.sock -v "${PWD}":/workspace -v /tmp -e CASC_JENKINS_CONFIG=/workspace/jenkins.yml \
|
||||
-e CX_INFRA_IT_CF_USERNAME -e CX_INFRA_IT_CF_PASSWORD -e BRANCH_NAME="${TEST_CASE}" ppiper/jenkinsfile-runner
|
||||
|
||||
RC=$?
|
||||
|
||||
cd - &> /dev/null || { echo "[ERROR] change directory back into integration test root folder failed."; exit 1; }
|
||||
|
||||
[[ "${RC}" == 0 ]] && touch "${TEST_CASE_ROOT}/SUCCESS"
|
@ -1,2 +1,2 @@
|
||||
# Empty for the moment.
|
||||
# Might contain test configuration in the future.
|
||||
# Test case configuration
|
||||
referenceAppRepoUrl: "https://github.com/sap/cloud-s4-sdk-book"
|
||||
|
@ -1,2 +1,2 @@
|
||||
# Empty for the moment.
|
||||
# Might contain test configuration in the future.
|
||||
# Test case configuration
|
||||
referenceAppRepoUrl: "https://github.com/sap/cloud-s4-sdk-book"
|
||||
|
@ -20,12 +20,32 @@ class TemplateHelper {
|
||||
parameters.keySet().toSorted().each {
|
||||
|
||||
def props = parameters.get(it)
|
||||
t += "| `${it}` | ${props.mandatory ?: props.required ? 'yes' : 'no'} | ${(props.defaultValue ? '`' + props.defaultValue + '`' : '') } | ${props.value ?: ''} |\n"
|
||||
|
||||
def defaultValue = isComplexDefault(props.defaultValue) ? renderComplexDefaultValue(props.defaultValue) : "`${props.defaultValue}`"
|
||||
|
||||
t += "| `${it}` | ${props.mandatory ?: props.required ? 'yes' : 'no'} | ${defaultValue} | ${props.value ?: ''} |\n"
|
||||
}
|
||||
|
||||
t
|
||||
}
|
||||
|
||||
private static boolean isComplexDefault(def _default) {
|
||||
if(! (_default in Collection)) return false
|
||||
if(_default.size() == 0) return false
|
||||
for(def entry in _default) {
|
||||
if(! (entry in Map)) return false
|
||||
if(! entry.dependentParameterKey) return false
|
||||
if(! entry.key) return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
private static renderComplexDefaultValue(def _default) {
|
||||
_default
|
||||
.collect { "${it.dependentParameterKey}=`${it.key ?: '<empty>'}`:`${it.value ?: '<empty>'}`" }
|
||||
.join('<br />')
|
||||
}
|
||||
|
||||
static createParameterDescriptionSection(Map parameters) {
|
||||
def t = ''
|
||||
parameters.keySet().toSorted().each {
|
||||
@ -68,7 +88,7 @@ class Helper {
|
||||
static getConfigHelper(classLoader, roots, script) {
|
||||
|
||||
def compilerConfig = new CompilerConfiguration()
|
||||
compilerConfig.setClasspathList( roots )
|
||||
compilerConfig.setClasspathList( roots )
|
||||
|
||||
new GroovyClassLoader(classLoader, compilerConfig, true)
|
||||
.parseClass(new File(projectRoot, 'src/com/sap/piper/ConfigurationHelper.groovy'))
|
||||
@ -190,105 +210,114 @@ class Helper {
|
||||
f.eachLine {
|
||||
line ->
|
||||
|
||||
if(docuEnd) {
|
||||
docuEnd = false
|
||||
if(line ==~ /.*dependingOn.*/) {
|
||||
def dependentConfigKey = (line =~ /.*dependingOn\('(.*)'\).mixin\('(.*)'/)[0][1]
|
||||
def configKey = (line =~ /.*dependingOn\('(.*)'\).mixin\('(.*)'/)[0][2]
|
||||
if(! step.dependentConfig[configKey]) {
|
||||
step.dependentConfig[configKey] = []
|
||||
}
|
||||
step.dependentConfig[configKey] << dependentConfigKey
|
||||
}
|
||||
|
||||
if(isHeader(line)) {
|
||||
def _docu = []
|
||||
docuLines.each { _docu << it }
|
||||
_docu = Helper.trim(_docu)
|
||||
step.description = _docu.join('\n')
|
||||
} else {
|
||||
if(docuEnd) {
|
||||
docuEnd = false
|
||||
|
||||
def param = retrieveParameterName(line)
|
||||
if(isHeader(line)) {
|
||||
def _docu = []
|
||||
docuLines.each { _docu << it }
|
||||
_docu = Helper.trim(_docu)
|
||||
step.description = _docu.join('\n')
|
||||
} else {
|
||||
|
||||
if(!param) {
|
||||
throw new RuntimeException('Cannot retrieve parameter for a comment')
|
||||
def param = retrieveParameterName(line)
|
||||
|
||||
if(!param) {
|
||||
throw new RuntimeException('Cannot retrieve parameter for a comment')
|
||||
}
|
||||
|
||||
def _docu = [], _value = [], _mandatory = [], _parentObject = []
|
||||
docuLines.each { _docu << it }
|
||||
valueLines.each { _value << it }
|
||||
mandatoryLines.each { _mandatory << it }
|
||||
parentObjectLines.each { _parentObject << it }
|
||||
_parentObject << param
|
||||
param = _parentObject*.trim().join('/').trim()
|
||||
|
||||
if(step.parameters[param].docu || step.parameters[param].value)
|
||||
System.err << "[WARNING] There is already some documentation for parameter '${param}. Is this parameter documented twice?'\n"
|
||||
|
||||
step.parameters[param].docu = _docu*.trim().join(' ').trim()
|
||||
step.parameters[param].value = _value*.trim().join(' ').trim()
|
||||
step.parameters[param].mandatory = _mandatory*.trim().join(' ').trim()
|
||||
}
|
||||
docuLines.clear()
|
||||
valueLines.clear()
|
||||
mandatoryLines.clear()
|
||||
parentObjectLines.clear()
|
||||
}
|
||||
|
||||
if( line.trim() ==~ /^\/\*\*.*/ ) {
|
||||
docu = true
|
||||
}
|
||||
|
||||
if(docu) {
|
||||
def _line = line
|
||||
_line = _line.replaceAll('^\\s*', '') // leading white spaces
|
||||
if(_line.startsWith('/**')) _line = _line.replaceAll('^\\/\\*\\*', '') // start comment
|
||||
if(_line.startsWith('*/') || _line.trim().endsWith('*/')) _line = _line.replaceAll('^\\*/', '').replaceAll('\\*/\\s*$', '') // end comment
|
||||
if(_line.startsWith('*')) _line = _line.replaceAll('^\\*', '') // continue comment
|
||||
if(_line.startsWith(' ')) _line = _line.replaceAll('^\\s', '')
|
||||
if(_line ==~ /.*@possibleValues.*/) {
|
||||
mandatory = false // should be something like reset attributes
|
||||
value = true
|
||||
parentObject = false
|
||||
}
|
||||
// some remark for mandatory e.g. some parameters are only mandatory under certain conditions
|
||||
if(_line ==~ /.*@mandatory.*/) {
|
||||
value = false // should be something like reset attributes ...
|
||||
mandatory = true
|
||||
parentObject = false
|
||||
}
|
||||
// grouping config properties within a parent object for easier readability
|
||||
if(_line ==~ /.*@parentConfigKey.*/) {
|
||||
value = false // should be something like reset attributes ...
|
||||
mandatory = false
|
||||
parentObject = true
|
||||
}
|
||||
|
||||
def _docu = [], _value = [], _mandatory = [], _parentObject = []
|
||||
docuLines.each { _docu << it }
|
||||
valueLines.each { _value << it }
|
||||
mandatoryLines.each { _mandatory << it }
|
||||
parentObjectLines.each { _parentObject << it }
|
||||
_parentObject << param
|
||||
param = _parentObject*.trim().join('/').trim()
|
||||
if(value) {
|
||||
if(_line) {
|
||||
_line = (_line =~ /.*@possibleValues\s*?(.*)/)[0][1]
|
||||
valueLines << _line
|
||||
}
|
||||
}
|
||||
|
||||
if(step.parameters[param].docu || step.parameters[param].value)
|
||||
System.err << "[WARNING] There is already some documentation for parameter '${param}. Is this parameter documented twice?'\n"
|
||||
if(mandatory) {
|
||||
if(_line) {
|
||||
_line = (_line =~ /.*@mandatory\s*?(.*)/)[0][1]
|
||||
mandatoryLines << _line
|
||||
}
|
||||
}
|
||||
|
||||
step.parameters[param].docu = _docu*.trim().join(' ').trim()
|
||||
step.parameters[param].value = _value*.trim().join(' ').trim()
|
||||
step.parameters[param].mandatory = _mandatory*.trim().join(' ').trim()
|
||||
if(parentObject) {
|
||||
if(_line) {
|
||||
_line = (_line =~ /.*@parentConfigKey\s*?(.*)/)[0][1]
|
||||
parentObjectLines << _line
|
||||
}
|
||||
}
|
||||
|
||||
if(!value && !mandatory && !parentObject) {
|
||||
docuLines << _line
|
||||
}
|
||||
}
|
||||
docuLines.clear()
|
||||
valueLines.clear()
|
||||
mandatoryLines.clear()
|
||||
parentObjectLines.clear()
|
||||
}
|
||||
|
||||
if( line.trim() ==~ /^\/\*\*.*/ ) {
|
||||
docu = true
|
||||
}
|
||||
|
||||
if(docu) {
|
||||
def _line = line
|
||||
_line = _line.replaceAll('^\\s*', '') // leading white spaces
|
||||
if(_line.startsWith('/**')) _line = _line.replaceAll('^\\/\\*\\*', '') // start comment
|
||||
if(_line.startsWith('*/') || _line.trim().endsWith('*/')) _line = _line.replaceAll('^\\*/', '').replaceAll('\\*/\\s*$', '') // end comment
|
||||
if(_line.startsWith('*')) _line = _line.replaceAll('^\\*', '') // continue comment
|
||||
if(_line.startsWith(' ')) _line = _line.replaceAll('^\\s', '')
|
||||
if(_line ==~ /.*@possibleValues.*/) {
|
||||
mandatory = false // should be something like reset attributes
|
||||
value = true
|
||||
parentObject = false
|
||||
}
|
||||
// some remark for mandatory e.g. some parameters are only mandatory under certain conditions
|
||||
if(_line ==~ /.*@mandatory.*/) {
|
||||
value = false // should be something like reset attributes ...
|
||||
mandatory = true
|
||||
parentObject = false
|
||||
}
|
||||
// grouping config properties within a parent object for easier readability
|
||||
if(_line ==~ /.*@parentConfigKey.*/) {
|
||||
value = false // should be something like reset attributes ...
|
||||
if(docu && line.trim() ==~ /^.*\*\//) {
|
||||
docu = false
|
||||
value = false
|
||||
mandatory = false
|
||||
parentObject = true
|
||||
parentObject = false
|
||||
docuEnd = true
|
||||
}
|
||||
|
||||
if(value) {
|
||||
if(_line) {
|
||||
_line = (_line =~ /.*@possibleValues\s*?(.*)/)[0][1]
|
||||
valueLines << _line
|
||||
}
|
||||
}
|
||||
|
||||
if(mandatory) {
|
||||
if(_line) {
|
||||
_line = (_line =~ /.*@mandatory\s*?(.*)/)[0][1]
|
||||
mandatoryLines << _line
|
||||
}
|
||||
}
|
||||
|
||||
if(parentObject) {
|
||||
if(_line) {
|
||||
_line = (_line =~ /.*@parentConfigKey\s*?(.*)/)[0][1]
|
||||
parentObjectLines << _line
|
||||
}
|
||||
}
|
||||
|
||||
if(!value && !mandatory && !parentObject) {
|
||||
docuLines << _line
|
||||
}
|
||||
}
|
||||
|
||||
if(docu && line.trim() ==~ /^.*\*\//) {
|
||||
docu = false
|
||||
value = false
|
||||
mandatory = false
|
||||
parentObject = false
|
||||
docuEnd = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -376,7 +405,7 @@ class Helper {
|
||||
roots = [
|
||||
new File(Helper.projectRoot, "vars").getAbsolutePath(),
|
||||
new File(Helper.projectRoot, "src").getAbsolutePath()
|
||||
]
|
||||
]
|
||||
|
||||
stepsDir = null
|
||||
stepsDocuDir = null
|
||||
@ -405,7 +434,7 @@ if(args.length >= 3 && args[2].contains('.yml')) {
|
||||
|
||||
if(args.length >= 3)
|
||||
steps = (args as List).drop(argsDrop) // the first two entries are stepsDir and docuDir
|
||||
// the other parts are considered as step names
|
||||
// the other parts are considered as step names
|
||||
|
||||
|
||||
// assign parameters
|
||||
@ -529,7 +558,7 @@ def fetchMandatoryFrom(def step, def parameterName, def steps) {
|
||||
}
|
||||
|
||||
def fetchPossibleValuesFrom(def step, def parameterName, def steps) {
|
||||
return steps[step]?.parameters[parameterName]?.value ?: ''
|
||||
return steps[step]?.parameters[parameterName]?.value ?: ''
|
||||
}
|
||||
|
||||
def handleStep(stepName, prepareDefaultValuesStep, gse, customDefaults) {
|
||||
@ -549,8 +578,8 @@ def handleStep(stepName, prepareDefaultValuesStep, gse, customDefaults) {
|
||||
prepareDefaultValuesStepParams.customDefaults = customDefaults
|
||||
|
||||
def defaultConfig = Helper.getConfigHelper(getClass().getClassLoader(),
|
||||
roots,
|
||||
Helper.getDummyScript(prepareDefaultValuesStep, stepName, prepareDefaultValuesStepParams)).use()
|
||||
roots,
|
||||
Helper.getDummyScript(prepareDefaultValuesStep, stepName, prepareDefaultValuesStepParams)).use()
|
||||
|
||||
def params = [] as Set
|
||||
|
||||
@ -576,32 +605,34 @@ def handleStep(stepName, prepareDefaultValuesStep, gse, customDefaults) {
|
||||
def compatibleParams = [] as Set
|
||||
if(parentObjectMappings) {
|
||||
params.each {
|
||||
if (parentObjectMappings[it])
|
||||
compatibleParams.add(parentObjectMappings[it] + '/' + it)
|
||||
else
|
||||
compatibleParams.add(it)
|
||||
if (parentObjectMappings[it])
|
||||
compatibleParams.add(parentObjectMappings[it] + '/' + it)
|
||||
else
|
||||
compatibleParams.add(it)
|
||||
}
|
||||
if (compatibleParams)
|
||||
params = compatibleParams
|
||||
}
|
||||
|
||||
def step = [parameters:[:]]
|
||||
// 'dependentConfig' is only present here for internal reasons and that entry is removed at
|
||||
// end of method.
|
||||
def step = [parameters:[:], dependentConfig: [:]]
|
||||
|
||||
//
|
||||
// START special handling for 'script' parameter
|
||||
// ... would be better if there is no special handling required ...
|
||||
|
||||
step.parameters['script'] = [
|
||||
docu: 'The common script environment of the Jenkinsfile running. ' +
|
||||
'Typically the reference to the script calling the pipeline ' +
|
||||
'step is provided with the this parameter, as in `script: this`. ' +
|
||||
'This allows the function to access the ' +
|
||||
'commonPipelineEnvironment for retrieving, for example, configuration parameters.',
|
||||
required: true,
|
||||
docu: 'The common script environment of the Jenkinsfile running. ' +
|
||||
'Typically the reference to the script calling the pipeline ' +
|
||||
'step is provided with the this parameter, as in `script: this`. ' +
|
||||
'This allows the function to access the ' +
|
||||
'commonPipelineEnvironment for retrieving, for example, configuration parameters.',
|
||||
required: true,
|
||||
|
||||
GENERAL_CONFIG: false,
|
||||
STEP_CONFIG: false
|
||||
]
|
||||
GENERAL_CONFIG: false,
|
||||
STEP_CONFIG: false
|
||||
]
|
||||
|
||||
// END special handling for 'script' parameter
|
||||
|
||||
@ -612,9 +643,9 @@ def handleStep(stepName, prepareDefaultValuesStep, gse, customDefaults) {
|
||||
def defaultValue = Helper.getValue(defaultConfig, it.split('/'))
|
||||
|
||||
def parameterProperties = [
|
||||
defaultValue: defaultValue,
|
||||
required: requiredParameters.contains((it as String)) && defaultValue == null
|
||||
]
|
||||
defaultValue: defaultValue,
|
||||
required: requiredParameters.contains((it as String)) && defaultValue == null
|
||||
]
|
||||
|
||||
step.parameters.put(it, parameterProperties)
|
||||
|
||||
@ -628,5 +659,34 @@ def handleStep(stepName, prepareDefaultValuesStep, gse, customDefaults) {
|
||||
|
||||
Helper.scanDocu(theStep, step)
|
||||
|
||||
step.parameters.each { k, v ->
|
||||
if(step.dependentConfig.get(k)) {
|
||||
|
||||
def dependentParameterKey = step.dependentConfig.get(k)[0]
|
||||
def dependentValues = step.parameters.get(dependentParameterKey)?.value
|
||||
|
||||
if (dependentValues) {
|
||||
def the_defaults = []
|
||||
dependentValues
|
||||
.replaceAll('[\'"` ]', '')
|
||||
.split(',').each {possibleValue ->
|
||||
if (!possibleValue instanceof Boolean && defaultConfig.get(possibleValue)) {
|
||||
the_defaults <<
|
||||
[
|
||||
dependentParameterKey: dependentParameterKey,
|
||||
key: possibleValue,
|
||||
value: Helper.getValue(defaultConfig.get(possibleValue), k.split('/'))
|
||||
]
|
||||
}
|
||||
}
|
||||
v.defaultValue = the_defaults
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// 'dependentConfig' is only present for internal purposes and must not be used outside.
|
||||
step.remove('dependentConfig')
|
||||
|
||||
step
|
||||
}
|
||||
|
20
documentation/docs/steps/detectExecuteScan.md
Normal file
20
documentation/docs/steps/detectExecuteScan.md
Normal file
@ -0,0 +1,20 @@
|
||||
# ${docGenStepName}
|
||||
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequsites
|
||||
|
||||
You need to store the API token for the Detect service as _'Secret text'_ credential in your Jenkins system.
|
||||
|
||||
!!! note "minimum plugin requirement"
|
||||
This step requires [synopsys-detect-plugin](https://github.com/jenkinsci/synopsys-detect-plugin) with at least version `2.0.0`.
|
||||
|
||||
## Example
|
||||
|
||||
```groovy
|
||||
detectExecuteScan script: this, scanProperties: ['--logging.level.com.synopsys.integration=TRACE']
|
||||
```
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
27
documentation/docs/steps/kanikoExecute.md
Normal file
27
documentation/docs/steps/kanikoExecute.md
Normal file
@ -0,0 +1,27 @@
|
||||
# ${docGenStepName}
|
||||
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequsites
|
||||
|
||||
When pushing to a container registry, you need to maintain the respective credentials in your Jenkins credentials store:
|
||||
|
||||
Kaniko expects a Docker `config.json` file containing the credential information for registries.
|
||||
You can create it like explained in the Docker Success Center in the articale about [How to generate a new auth in the config.json file](https://success.docker.com/article/generate-new-auth-in-config-json-file).
|
||||
|
||||
Please copy this file and upload it to your Jenkins for example<br />
|
||||
via _Jenkins_ -> _Credentials_ -> _System_ -> _Global credentials (unrestricted)_ -> _ Add Credentials_ ->
|
||||
|
||||
* Kind: _Secret file_
|
||||
* File: upload your `config.json` file
|
||||
* ID: specify id which you then use for the configuration of `dockerConfigJsonCredentialsId` (see below)
|
||||
|
||||
## Example
|
||||
|
||||
```groovy
|
||||
kanikoExecute script:this
|
||||
```
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
7
documentation/docs/steps/piperPipelineStagePost.md
Normal file
7
documentation/docs/steps/piperPipelineStagePost.md
Normal file
@ -0,0 +1,7 @@
|
||||
# ${docGenStepName}
|
||||
|
||||
## ${docGenDescription}
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
@ -4,7 +4,9 @@
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* Installed and configured [Jenkins Slack plugin](https://github.com/jenkinsci/slack-plugin).
|
||||
* Installed and configured [Slack JenkinsCI integration](https://my.slack.com/services/new/jenkins-ci)
|
||||
* *secret text* Jenkins credentials with the Slack token
|
||||
* Installed and configured [Jenkins Slack plugin](https://github.com/jenkinsci/slack-plugin#install-instructions-for-slack).
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
|
@ -177,6 +177,27 @@ steps:
|
||||
stashContent:
|
||||
- 'tests'
|
||||
testReportFilePath: 'cst-report.json'
|
||||
detectExecuteScan:
|
||||
detect:
|
||||
projectVersion: '1'
|
||||
scanners:
|
||||
- signature
|
||||
scanPaths:
|
||||
- '.'
|
||||
scanProperties:
|
||||
- '--blackduck.signature.scanner.memory=4096'
|
||||
- '--blackduck.timeout=6000'
|
||||
- '--blackduck.trust.cert=true'
|
||||
- '--detect.policy.check.fail.on.severities=BLOCKER,CRITICAL,MAJOR'
|
||||
- '--detect.report.timeout=4800'
|
||||
- '--logging.level.com.synopsys.integration=DEBUG'
|
||||
stashContent:
|
||||
- 'buildDescriptor'
|
||||
- 'checkmarx'
|
||||
# buildTool specific settings
|
||||
golang:
|
||||
dockerImage: 'golang:1.12-stretch'
|
||||
dockerWorkspace: ''
|
||||
dockerExecute:
|
||||
dockerPullImage: true
|
||||
sidecarPullImage: true
|
||||
@ -239,6 +260,15 @@ steps:
|
||||
healthEndpoint: ''
|
||||
influxWriteData:
|
||||
influxServer: ''
|
||||
kanikoExecute:
|
||||
containerBuildOptions: '--skip-tls-verify-pull'
|
||||
containerCommand: '/busybox/tail -f /dev/null'
|
||||
containerPreparationCommand: 'rm /kaniko/.docker/config.json'
|
||||
containerShell: '/busybox/sh'
|
||||
customTlsCertificateLinks: []
|
||||
dockerfile: Dockerfile
|
||||
dockerImage: 'gcr.io/kaniko-project/executor:debug'
|
||||
dockerOptions: "-u 0 --entrypoint=''"
|
||||
karmaExecuteTests:
|
||||
containerPortMappings:
|
||||
'node:8-stretch':
|
||||
@ -341,6 +371,7 @@ steps:
|
||||
stashContent:
|
||||
- 'buildDescriptor'
|
||||
- 'opensourceConfiguration'
|
||||
- 'checkmarx'
|
||||
additionalInstallCommand: >-
|
||||
curl --fail https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
|
||||
&& mkdir -p \$GOPATH/src/${config.whitesource.projectName.substring(0, config.whitesource.projectName.lastIndexOf('/'))}
|
||||
|
@ -184,7 +184,7 @@ class ConfigurationHelper implements Serializable {
|
||||
|
||||
ConfigurationHelper withPropertyInValues(String key, Set values){
|
||||
withMandatoryProperty(key)
|
||||
def value = config[key]
|
||||
def value = config[key] instanceof GString ? config[key].toString() : config[key]
|
||||
if(! (value in values) ) {
|
||||
throw new IllegalArgumentException("Invalid ${key} = '${value}'. Valid '${key}' values are: ${values}.")
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ class WhitesourceConfigurationHelper implements Serializable {
|
||||
def parsingClosure = { fileReadPath -> return script.readProperties (file: fileReadPath) }
|
||||
def serializationClosure = { configuration -> serializeUAConfig(configuration) }
|
||||
def inputFile = config.whitesource.configFilePath.replaceFirst('\\./', '')
|
||||
def suffix = utils.generateSha1(config.whitesource.configFilePath)
|
||||
def suffix = utils.generateSha1("${path}${inputFile}")
|
||||
def targetFile = "${inputFile}.${suffix}"
|
||||
if(config.whitesource.productName.startsWith('DIST - ')) {
|
||||
mapping += [
|
||||
|
143
test/groovy/DetectExecuteScanTest.groovy
Normal file
143
test/groovy/DetectExecuteScanTest.groovy
Normal file
@ -0,0 +1,143 @@
|
||||
#!groovy
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.RuleChain
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsCredentialsRule
|
||||
import util.JenkinsDockerExecuteRule
|
||||
import util.JenkinsReadYamlRule
|
||||
import util.JenkinsShellCallRule
|
||||
import util.JenkinsStepRule
|
||||
import util.Rules
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString
|
||||
import static org.hamcrest.CoreMatchers.is
|
||||
import static org.hamcrest.Matchers.allOf
|
||||
import static org.hamcrest.Matchers.hasItem
|
||||
import static org.hamcrest.Matchers.not
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
class DetectExecuteScanTest extends BasePiperTest {
|
||||
|
||||
private JenkinsDockerExecuteRule dockerRule = new JenkinsDockerExecuteRule(this)
|
||||
private JenkinsShellCallRule shellRule = new JenkinsShellCallRule(this)
|
||||
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
|
||||
|
||||
private String detectProperties = ''
|
||||
|
||||
@Rule
|
||||
public RuleChain rules = Rules
|
||||
.getCommonRules(this)
|
||||
.around(new JenkinsReadYamlRule(this))
|
||||
.around(shellRule)
|
||||
.around(dockerRule)
|
||||
.around(stepRule)
|
||||
.around(new JenkinsCredentialsRule(this)
|
||||
.withCredentials('testCredentials', 'testToken')
|
||||
)
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
|
||||
detectProperties = ''
|
||||
helper.registerAllowedMethod('synopsys_detect', [String.class], {s ->
|
||||
detectProperties = s
|
||||
})
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDetectDefault() {
|
||||
stepRule.step.detectExecuteScan([
|
||||
apiTokenCredentialsId: 'testCredentials',
|
||||
projectName: 'testProject',
|
||||
serverUrl: 'https://test.blackducksoftware.com',
|
||||
juStabUtils: utils,
|
||||
script: nullScript
|
||||
])
|
||||
|
||||
//ToDo: assert unstashing
|
||||
|
||||
assertThat(detectProperties, containsString("--detect.project.name='testProject'"))
|
||||
assertThat(detectProperties, containsString("--detect.project.version.name='1'"))
|
||||
assertThat(detectProperties, containsString("--blackduck.url=https://test.blackducksoftware.com"))
|
||||
assertThat(detectProperties, containsString("--blackduck.api.token=testToken"))
|
||||
assertThat(detectProperties, containsString("--detect.blackduck.signature.scanner.paths=."))
|
||||
assertThat(detectProperties, containsString("--blackduck.signature.scanner.memory=4096"))
|
||||
assertThat(detectProperties, containsString("--blackduck.timeout=6000"))
|
||||
assertThat(detectProperties, containsString("--blackduck.trust.cert=true"))
|
||||
assertThat(detectProperties, containsString("--detect.report.timeout=4800"))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDetectCustomPaths() {
|
||||
stepRule.step.detectExecuteScan([
|
||||
apiTokenCredentialsId: 'testCredentials',
|
||||
projectName: 'testProject',
|
||||
scanPaths: ['test1/', 'test2/'],
|
||||
serverUrl: 'https://test.blackducksoftware.com',
|
||||
juStabUtils: utils,
|
||||
script: nullScript
|
||||
])
|
||||
|
||||
assertThat(detectProperties, containsString("--detect.blackduck.signature.scanner.paths=test1/,test2/"))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDetectSourceScanOnly() {
|
||||
stepRule.step.detectExecuteScan([
|
||||
apiTokenCredentialsId: 'testCredentials',
|
||||
projectName: 'testProject',
|
||||
scanners: ['source'],
|
||||
serverUrl: 'https://test.blackducksoftware.com',
|
||||
juStabUtils: utils,
|
||||
script: nullScript
|
||||
])
|
||||
|
||||
assertThat(detectProperties, not(containsString("--detect.blackduck.signature.scanner.paths=.")))
|
||||
assertThat(detectProperties, containsString("--detect.source.path=."))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDetectGolang() {
|
||||
stepRule.step.detectExecuteScan([
|
||||
buildTool: 'golang',
|
||||
apiTokenCredentialsId: 'testCredentials',
|
||||
projectName: 'testProject',
|
||||
serverUrl: 'https://test.blackducksoftware.com',
|
||||
juStabUtils: utils,
|
||||
script: nullScript
|
||||
])
|
||||
|
||||
assertThat(dockerRule.dockerParams.dockerImage, is('golang:1.12-stretch'))
|
||||
assertThat(dockerRule.dockerParams.dockerWorkspace, is(''))
|
||||
assertThat(dockerRule.dockerParams.stashContent, allOf(hasItem('buildDescriptor'),hasItem('checkmarx')))
|
||||
|
||||
assertThat(shellRule.shell, hasItem('curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh'))
|
||||
assertThat(shellRule.shell, hasItem('ln --symbolic $(pwd) $GOPATH/src/hub'))
|
||||
assertThat(shellRule.shell, hasItem('cd $GOPATH/src/hub && dep ensure'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCustomScanProperties() {
|
||||
def detectProps = [
|
||||
'--blackduck.signature.scanner.memory=1024'
|
||||
]
|
||||
stepRule.step.detectExecuteScan([
|
||||
//scanProperties: detectProps,
|
||||
scanProperties: ['--blackduck.signature.scanner.memory=1024', '--myNewOne'],
|
||||
apiTokenCredentialsId: 'testCredentials',
|
||||
projectName: 'testProject',
|
||||
serverUrl: 'https://test.blackducksoftware.com',
|
||||
juStabUtils: utils,
|
||||
script: nullScript
|
||||
])
|
||||
|
||||
assertThat(detectProperties, containsString("--detect.project.name='testProject'"))
|
||||
assertThat(detectProperties, containsString("--detect.project.version.name='1'"))
|
||||
assertThat(detectProperties, containsString("--blackduck.signature.scanner.memory=1024"))
|
||||
assertThat(detectProperties, not(containsString("--blackduck.signature.scanner.memory=4096")))
|
||||
assertThat(detectProperties, not(containsString("--detect.report.timeout=4800")))
|
||||
assertThat(detectProperties, containsString("--myNewOne"))
|
||||
}
|
||||
}
|
143
test/groovy/KanikoExecuteTest.groovy
Normal file
143
test/groovy/KanikoExecuteTest.groovy
Normal file
@ -0,0 +1,143 @@
|
||||
#!groovy
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.ExpectedException
|
||||
import org.junit.rules.RuleChain
|
||||
import util.*
|
||||
|
||||
import static org.hamcrest.Matchers.*
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
class KanikoExecuteTest extends BasePiperTest {
|
||||
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
|
||||
private JenkinsShellCallRule shellRule = new JenkinsShellCallRule(this)
|
||||
private JenkinsReadFileRule readFileRule = new JenkinsReadFileRule(this, 'test/resources/kaniko/')
|
||||
private JenkinsWriteFileRule writeFileRule = new JenkinsWriteFileRule(this)
|
||||
private JenkinsDockerExecuteRule dockerExecuteRule = new JenkinsDockerExecuteRule(this)
|
||||
|
||||
@Rule
|
||||
public RuleChain rules = Rules
|
||||
.getCommonRules(this)
|
||||
.around(new JenkinsReadYamlRule(this))
|
||||
.around(shellRule)
|
||||
.around(readFileRule)
|
||||
.around(writeFileRule)
|
||||
.around(dockerExecuteRule)
|
||||
.around(stepRule)
|
||||
|
||||
def fileMap = [:]
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
binding.variables.env.WORKSPACE = '/path/to/current/workspace'
|
||||
|
||||
helper.registerAllowedMethod('file', [Map], { m ->
|
||||
fileMap = m
|
||||
return m
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('withCredentials', [List, Closure], { l, c ->
|
||||
binding.setProperty(fileMap.variable, 'config.json')
|
||||
try {
|
||||
c()
|
||||
} finally {
|
||||
binding.setProperty(fileMap.variable, null)
|
||||
}
|
||||
})
|
||||
|
||||
UUID.metaClass.static.randomUUID = { -> 1}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDefaults() {
|
||||
stepRule.step.kanikoExecute(
|
||||
script: nullScript
|
||||
)
|
||||
assertThat(shellRule.shell, hasItem('#!/busybox/sh rm /kaniko/.docker/config.json'))
|
||||
assertThat(shellRule.shell, hasItem(allOf(
|
||||
startsWith('#!/busybox/sh'),
|
||||
containsString('mv 1-config.json /kaniko/.docker/config.json'),
|
||||
containsString('/kaniko/executor'),
|
||||
containsString('--dockerfile /path/to/current/workspace/Dockerfile'),
|
||||
containsString('--context /path/to/current/workspace'),
|
||||
containsString('--skip-tls-verify-pull'),
|
||||
containsString('--no-push')
|
||||
)))
|
||||
|
||||
assertThat(writeFileRule.files.values()[0], is('{"auths":{}}'))
|
||||
|
||||
assertThat(dockerExecuteRule.dockerParams, allOf(
|
||||
hasEntry('containerCommand', '/busybox/tail -f /dev/null'),
|
||||
hasEntry('containerShell', '/busybox/sh'),
|
||||
hasEntry('dockerImage', 'gcr.io/kaniko-project/executor:debug'),
|
||||
hasEntry('dockerOptions', "-u 0 --entrypoint=''")
|
||||
|
||||
))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCustomDockerCredentials() {
|
||||
stepRule.step.kanikoExecute(
|
||||
script: nullScript,
|
||||
dockerConfigJsonCredentialsId: 'myDockerConfigJson'
|
||||
)
|
||||
|
||||
assertThat(fileMap.credentialsId, is('myDockerConfigJson'))
|
||||
assertThat(writeFileRule.files.values()[0], allOf(
|
||||
containsString('docker.my.domain.com:4444'),
|
||||
containsString('"auth": "myAuth"'),
|
||||
containsString('"email": "my.user@domain.com"')
|
||||
))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCustomImage() {
|
||||
stepRule.step.kanikoExecute(
|
||||
script: nullScript,
|
||||
containerImageNameAndTag: 'my.docker.registry/path/myImageName:myTag'
|
||||
)
|
||||
|
||||
assertThat(shellRule.shell, hasItem(allOf(
|
||||
startsWith('#!/busybox/sh'),
|
||||
containsString('mv 1-config.json /kaniko/.docker/config.json'),
|
||||
containsString('/kaniko/executor'),
|
||||
containsString('--dockerfile /path/to/current/workspace/Dockerfile'),
|
||||
containsString('--context /path/to/current/workspace'),
|
||||
containsString('--skip-tls-verify-pull'),
|
||||
containsString('--destination my.docker.registry/path/myImageName:myTag')
|
||||
)))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPreserveDestination() {
|
||||
stepRule.step.kanikoExecute(
|
||||
script: nullScript,
|
||||
containerBuildOptions: '--destination my.docker.registry/path/myImageName:myTag'
|
||||
)
|
||||
|
||||
assertThat(shellRule.shell, hasItem(allOf(
|
||||
startsWith('#!/busybox/sh'),
|
||||
containsString('mv 1-config.json /kaniko/.docker/config.json'),
|
||||
containsString('/kaniko/executor'),
|
||||
containsString('--dockerfile /path/to/current/workspace/Dockerfile'),
|
||||
containsString('--context /path/to/current/workspace'),
|
||||
containsString('--destination my.docker.registry/path/myImageName:myTag')
|
||||
)))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCustomCertificates() {
|
||||
stepRule.step.kanikoExecute(
|
||||
script: nullScript,
|
||||
customTlsCertificateLinks: ['http://link.one', 'http://link.two']
|
||||
)
|
||||
|
||||
assertThat(shellRule.shell, hasItem(allOf(
|
||||
startsWith('#!/busybox/sh'),
|
||||
containsString('rm /kaniko/.docker/config.json'),
|
||||
containsString('wget http://link.one -O - >> /kaniko/ssl/certs/ca-certificates.crt'),
|
||||
containsString('wget http://link.two -O - >> /kaniko/ssl/certs/ca-certificates.crt'),
|
||||
)))
|
||||
}
|
||||
}
|
@ -210,6 +210,14 @@ public class MtaBuildTest extends BasePiperTest {
|
||||
assert shellRule.shell.find(){ c -> c.contains('cp settings.xml $M2_HOME/conf/settings.xml')}
|
||||
}
|
||||
|
||||
@Test
|
||||
void canConfigureNpmRegistry() {
|
||||
|
||||
stepRule.step.mtaBuild(script: nullScript, defaultNpmRegistry: 'myNpmRegistry.com')
|
||||
|
||||
assert shellRule.shell.find(){ c -> c.contains('npm config set registry myNpmRegistry.com')}
|
||||
}
|
||||
|
||||
@Test
|
||||
void canConfigureMavenGlobalSettingsFromRemoteSource() {
|
||||
|
||||
|
@ -507,4 +507,17 @@ class NeoDeployTest extends BasePiperTest {
|
||||
utils: utils,
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
void deployModeAsGStringTest() {
|
||||
|
||||
Map deployProps = [deployMode: 'warPropertiesFile']
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
utils: utils,
|
||||
neo: [credentialsId: 'myCredentialsId',
|
||||
propertiesFile: warPropertiesFileName],
|
||||
deployMode: "$deployProps.deployMode",
|
||||
source: archiveName)
|
||||
}
|
||||
}
|
||||
|
@ -449,18 +449,18 @@ class WhitesourceExecuteScanTest extends BasePiperTest {
|
||||
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('dockerImage', 'golang:1.12-stretch'))
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('dockerWorkspace', '/home/dep'))
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('stashContent', ['buildDescriptor', 'opensourceConfiguration', 'modified whitesource config d3aa80454919391024374ba46b4df082d15ab9a3']))
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('stashContent', ['buildDescriptor', 'opensourceConfiguration', 'checkmarx', 'modified whitesource config 7d1c90ed46c66061fc8ea45dd96e209bf767f038']))
|
||||
|
||||
assertThat(shellRule.shell, Matchers.hasItems(
|
||||
assertThat(shellRule.shell, Matchers.hasItems(
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/raw/master/standAlone/wss-unified-agent.jar'),
|
||||
is('./bin/java -jar wss-unified-agent.jar -c \'./myProject/wss-unified-agent.config.d3aa80454919391024374ba46b4df082d15ab9a3\' -apiKey \'testOrgToken\' -userKey \'token-0815\' -product \'testProductName\'')
|
||||
is('./bin/java -jar wss-unified-agent.jar -c \'./myProject/wss-unified-agent.config.7d1c90ed46c66061fc8ea45dd96e209bf767f038\' -apiKey \'testOrgToken\' -userKey \'token-0815\' -product \'testProductName\'')
|
||||
))
|
||||
|
||||
assertThat(writeFileRule.files['./myProject/wss-unified-agent.config.d3aa80454919391024374ba46b4df082d15ab9a3'], containsString('apiKey=testOrgToken'))
|
||||
assertThat(writeFileRule.files['./myProject/wss-unified-agent.config.d3aa80454919391024374ba46b4df082d15ab9a3'], containsString('productName=testProductName'))
|
||||
assertThat(writeFileRule.files['./myProject/wss-unified-agent.config.d3aa80454919391024374ba46b4df082d15ab9a3'], containsString('userKey=token-0815'))
|
||||
assertThat(writeFileRule.files['./myProject/wss-unified-agent.config.d3aa80454919391024374ba46b4df082d15ab9a3'], containsString('productVersion=1'))
|
||||
assertThat(writeFileRule.files['./myProject/wss-unified-agent.config.d3aa80454919391024374ba46b4df082d15ab9a3'], containsString('projectName=github.wdf.sap.corp/test/golang.myProject'))
|
||||
assertThat(writeFileRule.files['./myProject/wss-unified-agent.config.7d1c90ed46c66061fc8ea45dd96e209bf767f038'], containsString('apiKey=testOrgToken'))
|
||||
assertThat(writeFileRule.files['./myProject/wss-unified-agent.config.7d1c90ed46c66061fc8ea45dd96e209bf767f038'], containsString('productName=testProductName'))
|
||||
assertThat(writeFileRule.files['./myProject/wss-unified-agent.config.7d1c90ed46c66061fc8ea45dd96e209bf767f038'], containsString('userKey=token-0815'))
|
||||
assertThat(writeFileRule.files['./myProject/wss-unified-agent.config.7d1c90ed46c66061fc8ea45dd96e209bf767f038'], containsString('productVersion=1'))
|
||||
assertThat(writeFileRule.files['./myProject/wss-unified-agent.config.7d1c90ed46c66061fc8ea45dd96e209bf767f038'], containsString('projectName=github.wdf.sap.corp/test/golang.myProject'))
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -506,7 +506,7 @@ class WhitesourceExecuteScanTest extends BasePiperTest {
|
||||
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('dockerImage', 'golang:1.12-stretch'))
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('dockerWorkspace', '/home/dep'))
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('stashContent', ['buildDescriptor', 'opensourceConfiguration', 'modified whitesource config d3aa80454919391024374ba46b4df082d15ab9a3']))
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('stashContent', ['buildDescriptor', 'opensourceConfiguration', 'checkmarx', 'modified whitesource config d3aa80454919391024374ba46b4df082d15ab9a3']))
|
||||
|
||||
assertThat(shellRule.shell, Matchers.hasItems(
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/raw/master/standAlone/wss-unified-agent.jar'),
|
||||
|
@ -420,4 +420,34 @@ class ConfigurationHelperTest {
|
||||
.use()
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithPropertyInValuesString() {
|
||||
Map config = ['key1':'value1']
|
||||
Set possibleValues = ['value1', 'value2', 'value3']
|
||||
|
||||
ConfigurationHelper.newInstance(mockScript, config).collectValidationFailures()
|
||||
.withPropertyInValues('key1', possibleValues)
|
||||
.use()
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithPropertyInValuesGString() {
|
||||
String value = 'value1'
|
||||
Map config = ['key1':"$value"]
|
||||
Set possibleValues = ['value1', 'value2', 'value3']
|
||||
|
||||
ConfigurationHelper.newInstance(mockScript, config).collectValidationFailures()
|
||||
.withPropertyInValues('key1', possibleValues)
|
||||
.use()
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithPropertyInValuesInt() {
|
||||
Map config = ['key1':3]
|
||||
Set possibleValues = [1, 2, 3]
|
||||
|
||||
ConfigurationHelper.newInstance(mockScript, config).collectValidationFailures()
|
||||
.withPropertyInValues('key1', possibleValues)
|
||||
.use()
|
||||
}
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ class WhitesourceConfigurationHelperTest extends BasePiperTest {
|
||||
void testExtendConfigurationFileUnifiedAgentConfigDeeper() {
|
||||
helper.registerAllowedMethod('readProperties', [Map], { m -> if (!m.file.contains('testModule')) return new Properties() else return null })
|
||||
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'none', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./testModule/")
|
||||
assertThat(jwfr.files['./testModule/config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
|
||||
assertThat(jwfr.files['./testModule/config.13954509c7675edfce373138f51c68464d1abcac'],
|
||||
allOf(
|
||||
not(containsString("log.level=debug")),
|
||||
containsString("apiKey=abcd"),
|
||||
|
@ -209,6 +209,66 @@ steps: {}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void testConditionConfigKeys() {
|
||||
helper.registerAllowedMethod('libraryResource', [String.class], {s ->
|
||||
if(s == 'testDefault.yml') {
|
||||
return '''
|
||||
stages:
|
||||
testStage1:
|
||||
stepConditions:
|
||||
firstStep:
|
||||
configKeys:
|
||||
- myKey1_1
|
||||
- myKey1_2
|
||||
testStage2:
|
||||
stepConditions:
|
||||
secondStep:
|
||||
configKeys:
|
||||
- myKey2_1
|
||||
testStage3:
|
||||
stepConditions:
|
||||
thirdStep:
|
||||
configKeys:
|
||||
- myKey3_1
|
||||
'''
|
||||
} else {
|
||||
return '''
|
||||
general: {}
|
||||
steps: {}
|
||||
'''
|
||||
}
|
||||
})
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [
|
||||
general: [myKey1_1: 'myVal1_1'],
|
||||
stages: [:],
|
||||
steps: [thirdStep: [myKey3_1: 'myVal3_1']]
|
||||
]
|
||||
|
||||
jsr.step.piperInitRunStageConfiguration(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
stageConfigResource: 'testDefault.yml'
|
||||
)
|
||||
|
||||
assertThat(nullScript.commonPipelineEnvironment.configuration.runStage.keySet(),
|
||||
allOf(
|
||||
containsInAnyOrder(
|
||||
'testStage1',
|
||||
'testStage3'
|
||||
),
|
||||
hasSize(2)
|
||||
)
|
||||
)
|
||||
|
||||
assertThat(nullScript.commonPipelineEnvironment.configuration.runStep.testStage1.firstStep, is(true))
|
||||
assertThat(nullScript.commonPipelineEnvironment.configuration.runStep.testStage2?.secondStep, is(false))
|
||||
assertThat(nullScript.commonPipelineEnvironment.configuration.runStep.testStage3.thirdStep, is(true))
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void testConditionFilePattern() {
|
||||
helper.registerAllowedMethod('libraryResource', [String.class], {s ->
|
||||
@ -391,4 +451,27 @@ steps: {}
|
||||
assertThat(nullScript.commonPipelineEnvironment.configuration.runStage.Acceptance, is(true))
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetConfigValue() {
|
||||
|
||||
def config = [
|
||||
invalidKey: 'invalidValue',
|
||||
stringKey: 'stringValue',
|
||||
listKey: [
|
||||
'listValue1',
|
||||
'listValue2'
|
||||
],
|
||||
nested: [
|
||||
key: 'nestedValue'
|
||||
]
|
||||
]
|
||||
|
||||
assertThat(jsr.step.piperInitRunStageConfiguration.getConfigValue(config, 'stringKey'), is('stringValue'))
|
||||
assertThat(jsr.step.piperInitRunStageConfiguration.getConfigValue(config, 'listKey'), is(['listValue1','listValue2']))
|
||||
assertThat(jsr.step.piperInitRunStageConfiguration.getConfigValue(config, 'nested/key'), is('nestedValue'))
|
||||
assertThat(jsr.step.piperInitRunStageConfiguration.getConfigValue(config, 'invalidKey/key'), is(nullValue()))
|
||||
|
||||
//assertThat(jsr.step.piperInitRunStageConfiguration.getConfigValue(config, 'nested/key'), is('nestedValue'))
|
||||
}
|
||||
}
|
||||
|
@ -160,6 +160,9 @@ class PiperPipelineTest extends BasePiperTest {
|
||||
helper.registerAllowedMethod('piperPipelineStageRelease', [Map.class], {m ->
|
||||
stepsCalled.add('piperPipelineStageRelease')
|
||||
})
|
||||
helper.registerAllowedMethod('piperPipelineStagePost', [Map.class], {m ->
|
||||
stepsCalled.add('piperPipelineStagePost')
|
||||
})
|
||||
|
||||
nullScript.prepareDefaultValues(script: nullScript)
|
||||
|
||||
@ -231,7 +234,8 @@ class PiperPipelineTest extends BasePiperTest {
|
||||
'piperPipelineStageCompliance',
|
||||
'input',
|
||||
'piperPipelineStagePromote',
|
||||
'piperPipelineStageRelease'
|
||||
'piperPipelineStageRelease',
|
||||
'piperPipelineStagePost'
|
||||
))
|
||||
}
|
||||
}
|
||||
|
8
test/resources/kaniko/config.json
Normal file
8
test/resources/kaniko/config.json
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"auths": {
|
||||
"docker.my.domain.com:4444": {
|
||||
"auth": "myAuth",
|
||||
"email": "my.user@domain.com"
|
||||
}
|
||||
}
|
||||
}
|
154
vars/detectExecuteScan.groovy
Normal file
154
vars/detectExecuteScan.groovy
Normal file
@ -0,0 +1,154 @@
|
||||
import com.sap.piper.GenerateDocumentation
|
||||
import com.sap.piper.Utils
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
|
||||
import groovy.transform.Field
|
||||
|
||||
import static com.sap.piper.Prerequisites.checkScript
|
||||
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
@Field Set GENERAL_CONFIG_KEYS = [
|
||||
'detect',
|
||||
/**
|
||||
* Jenkins 'Secret text' credentials ID containing the API token used to authenticate with the Synopsis Detect (formerly BlackDuck) Server.
|
||||
* @parentConfigKey detect
|
||||
*/
|
||||
'apiTokenCredentialsId',
|
||||
/**
|
||||
* Defines the tool which is used for building the artifact.<br />
|
||||
* Currently, it is possible to select two behaviors of the step:
|
||||
* <br />
|
||||
* 1. Golang-specific behavior (`buildTool: golang`). Assumption here is that project uses the dependency management tool _dep_<br />
|
||||
* 2. Custom-specific behavior for all other values of `buildTool`
|
||||
*
|
||||
* @possibleValues `golang`, any other build tool
|
||||
*/
|
||||
'buildTool',
|
||||
/**
|
||||
* Name of the Synopsis Detect (formerly BlackDuck) project.
|
||||
* @parentConfigKey detect
|
||||
*/
|
||||
'projectName',
|
||||
/**
|
||||
* Version of the Synopsis Detect (formerly BlackDuck) project.
|
||||
* @parentConfigKey detect
|
||||
*/
|
||||
'projectVersion',
|
||||
/**
|
||||
* List of paths which should be scanned by the Synopsis Detect (formerly BlackDuck) scan.
|
||||
* @parentConfigKey detect
|
||||
*/
|
||||
'scanPaths',
|
||||
/**
|
||||
* Properties passed to the Synopsis Detect (formerly BlackDuck) scan. You can find details in the [Synopsis Detect documentation](https://synopsys.atlassian.net/wiki/spaces/INTDOCS/pages/622846/Using+Synopsys+Detect+Properties)
|
||||
* @parentConfigKey detect
|
||||
*/
|
||||
'scanProperties',
|
||||
/**
|
||||
* List of scanners to be used for Synopsis Detect (formerly BlackDuck) scan.
|
||||
* @possibleValues `['signature']`
|
||||
* @parentConfigKey detect
|
||||
*/
|
||||
'scanners',
|
||||
/**
|
||||
* Server url to the Synopsis Detect (formerly BlackDuck) Server.
|
||||
* @parentConfigKey detect
|
||||
*/
|
||||
'serverUrl'
|
||||
]
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([
|
||||
/** @see dockerExecute */
|
||||
'dockerImage',
|
||||
/** @see dockerExecute */
|
||||
'dockerWorkspace',
|
||||
/** If specific stashes should be considered for the scan, their names need to be passed via the parameter `stashContent`. */
|
||||
'stashContent'
|
||||
])
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
@Field Map CONFIG_KEY_COMPATIBILITY = [
|
||||
detect: [
|
||||
apiTokenCredentialsId: 'apiTokenCredentialsId',
|
||||
projectName: 'projectName',
|
||||
projectVersion: 'projectVersion',
|
||||
scanners: 'scanners',
|
||||
scanPaths: 'scanPaths',
|
||||
scanProperties: 'scanProperties',
|
||||
serverUrl: 'serverUrl'
|
||||
]
|
||||
]
|
||||
|
||||
/**
|
||||
* This step executes [Synopsis Detect](https://synopsys.atlassian.net/wiki/spaces/INTDOCS/pages/62423113/Synopsys+Detect) scans.
|
||||
*/
|
||||
@GenerateDocumentation
|
||||
void call(Map parameters = [:]) {
|
||||
handlePipelineStepErrors (stepName: STEP_NAME, stepParameters: parameters) {
|
||||
def script = checkScript(this, parameters) ?: this
|
||||
def utils = parameters.juStabUtils ?: new Utils()
|
||||
// load default & individual configuration
|
||||
Map config = ConfigurationHelper.newInstance(this)
|
||||
.loadStepDefaults()
|
||||
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS, CONFIG_KEY_COMPATIBILITY)
|
||||
.mixinStepConfig(script.commonPipelineEnvironment, STEP_CONFIG_KEYS,CONFIG_KEY_COMPATIBILITY)
|
||||
.mixinStageConfig(script.commonPipelineEnvironment, parameters.stageName?:env.STAGE_NAME, STEP_CONFIG_KEYS, CONFIG_KEY_COMPATIBILITY)
|
||||
.mixin(parameters, PARAMETER_KEYS, CONFIG_KEY_COMPATIBILITY)
|
||||
.dependingOn('buildTool').mixin('dockerImage')
|
||||
.dependingOn('buildTool').mixin('dockerWorkspace')
|
||||
.withMandatoryProperty('detect/apiTokenCredentialsId')
|
||||
.withMandatoryProperty('detect/projectName')
|
||||
.withMandatoryProperty('detect/projectVersion')
|
||||
.use()
|
||||
|
||||
config.stashContent = utils.unstashAll(config.stashContent)
|
||||
|
||||
script.commonPipelineEnvironment.setInfluxStepData('detect', false)
|
||||
|
||||
utils.pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'buildTool',
|
||||
stepParam1: config.buildTool ?: 'default'
|
||||
], config)
|
||||
|
||||
//prepare Hub Detect execution using package manager
|
||||
switch (config.buildTool) {
|
||||
case 'golang':
|
||||
dockerExecute(script: script, dockerImage: config.dockerImage, dockerWorkspace: config.dockerWorkspace, stashContent: config.stashContent) {
|
||||
sh 'curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh'
|
||||
sh 'ln --symbolic $(pwd) $GOPATH/src/hub'
|
||||
sh 'cd $GOPATH/src/hub && dep ensure'
|
||||
}
|
||||
break
|
||||
default:
|
||||
//no additional tasks are performed
|
||||
echo "[${STEP_NAME}] No preparation steps performed for scan. Please make sure to properly set configuration for `detect.scanProperties`"
|
||||
}
|
||||
|
||||
withCredentials ([string(
|
||||
credentialsId: config.detect.apiTokenCredentialsId,
|
||||
variable: 'detectApiToken'
|
||||
)]) {
|
||||
def authentication = "--blackduck.api.token=${detectApiToken}"
|
||||
config.detect.scanProperties += [
|
||||
"--detect.project.name='${config.detect.projectName}'",
|
||||
"--detect.project.version.name='${config.detect.projectVersion}'",
|
||||
"--detect.code.location.name='${config.detect.projectName}/${config.detect.projectVersion}'",
|
||||
"--blackduck.url=${config.detect.serverUrl}",
|
||||
]
|
||||
|
||||
if ('signature' in config.detect.scanners) [
|
||||
config.detect.scanProperties.add("--detect.blackduck.signature.scanner.paths=${config.detect.scanPaths.join(',')}")
|
||||
]
|
||||
|
||||
if ('source' in config.detect.scanners) [
|
||||
config.detect.scanProperties.add("--detect.source.path=${config.detect.scanPaths[0]}")
|
||||
]
|
||||
|
||||
def detectProperties = config.detect.scanProperties.join(' ') + " ${authentication}"
|
||||
|
||||
echo "[${STEP_NAME}] Running with following Detect configuration: ${detectProperties}"
|
||||
synopsys_detect detectProperties
|
||||
script.commonPipelineEnvironment.setInfluxStepData('detect', true)
|
||||
}
|
||||
}
|
||||
}
|
@ -51,7 +51,7 @@ import org.jenkinsci.plugins.workflow.steps.FlowInterruptedException
|
||||
@GenerateDocumentation
|
||||
void call(Map parameters = [:], body) {
|
||||
// load default & individual configuration
|
||||
def cpe = parameters.stepParameters?.script?.commonPipelineEnvironment ?: commonPipelineEnvironment
|
||||
def cpe = parameters.stepParameters?.script?.commonPipelineEnvironment ?: null
|
||||
Map config = ConfigurationHelper.newInstance(this)
|
||||
.loadStepDefaults()
|
||||
.mixinGeneralConfig(cpe, GENERAL_CONFIG_KEYS)
|
||||
@ -81,12 +81,25 @@ void call(Map parameters = [:], body) {
|
||||
if (config.failOnError || config.stepName in config.mandatorySteps) {
|
||||
throw ex
|
||||
}
|
||||
|
||||
if (config.stepParameters?.script) {
|
||||
config.stepParameters?.script.currentBuild.result = 'UNSTABLE'
|
||||
} else {
|
||||
currentBuild.result = 'UNSTABLE'
|
||||
}
|
||||
|
||||
echo "[${STEP_NAME}] Error in step ${config.stepName} - Build result set to 'UNSTABLE'"
|
||||
|
||||
List unstableSteps = cpe?.getValue('unstableSteps') ?: []
|
||||
if(!unstableSteps) {
|
||||
unstableSteps = []
|
||||
}
|
||||
|
||||
// add information about unstable steps to pipeline environment
|
||||
// this helps to bring this information to users in a consolidated manner inside a pipeline
|
||||
unstableSteps.add(config.stepName)
|
||||
cpe?.setValue('unstableSteps', unstableSteps)
|
||||
|
||||
} catch (Throwable error) {
|
||||
if (config.echoDetails)
|
||||
message += formatErrorMessage(config, error)
|
||||
|
127
vars/kanikoExecute.groovy
Normal file
127
vars/kanikoExecute.groovy
Normal file
@ -0,0 +1,127 @@
|
||||
import groovy.text.GStringTemplateEngine
|
||||
|
||||
import static com.sap.piper.Prerequisites.checkScript
|
||||
|
||||
import com.sap.piper.GenerateDocumentation
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.Utils
|
||||
|
||||
import groovy.transform.Field
|
||||
|
||||
@Field def STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = [
|
||||
/**
|
||||
* Defines the build options for the [kaniko](https://github.com/GoogleContainerTools/kaniko) build.
|
||||
*/
|
||||
'containerBuildOptions',
|
||||
/** @see dockerExecute */
|
||||
'containerCommand',
|
||||
/** Defines the full name of the Docker image to be created including registry, image name and tag like `my.docker.registry/path/myImageName:myTag`.*/
|
||||
'containerImageNameAndTag',
|
||||
/** @see dockerExecute */
|
||||
'containerShell',
|
||||
/**
|
||||
* Defines the command to prepare the Kaniko container.
|
||||
* By default the contained credentials are removed in order to allow anonymous access to container registries.
|
||||
*/
|
||||
'containerPreparationCommand',
|
||||
/**
|
||||
* List containing download links of custom TLS certificates. This is required to ensure trusted connections to registries with custom certificates.
|
||||
*/
|
||||
'customTlsCertificateLinks',
|
||||
/**
|
||||
* Defines the location of the Dockerfile relative to the Jenkins workspace.
|
||||
*/
|
||||
'dockerfile',
|
||||
/**
|
||||
* Defines the id of the file credentials in your Jenkins credentials store which contain the file `.docker/config.json`.
|
||||
* You can find more details about the Docker credentials in the [Docker documentation](https://docs.docker.com/engine/reference/commandline/login/).
|
||||
*/
|
||||
'dockerConfigJsonCredentialsId',
|
||||
/** @see dockerExecute */
|
||||
'dockerEnvVars',
|
||||
/** @see dockerExecute */
|
||||
'dockerOptions',
|
||||
/** @see dockerExecute */
|
||||
'dockerImage'
|
||||
]
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* Executes a [Kaniko](https://github.com/GoogleContainerTools/kaniko) build for creating a Docker container.
|
||||
*/
|
||||
@GenerateDocumentation
|
||||
void call(Map parameters = [:]) {
|
||||
handlePipelineStepErrors(stepName: STEP_NAME, stepParameters: parameters) {
|
||||
|
||||
final script = checkScript(this, parameters) ?: this
|
||||
|
||||
// load default & individual configuration
|
||||
Map config = ConfigurationHelper.newInstance(this)
|
||||
.loadStepDefaults()
|
||||
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
|
||||
.mixinStepConfig(script.commonPipelineEnvironment, STEP_CONFIG_KEYS)
|
||||
.mixinStageConfig(script.commonPipelineEnvironment, parameters.stageName?:env.STAGE_NAME, STEP_CONFIG_KEYS)
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.use()
|
||||
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME
|
||||
], config)
|
||||
|
||||
def buildOptions = new GStringTemplateEngine().createTemplate(config.containerBuildOptions).make([config: config, env: env]).toString()
|
||||
|
||||
if (!buildOptions.contains('--destination')) {
|
||||
if (config.containerImageNameAndTag) {
|
||||
buildOptions += " --destination ${config.containerImageNameAndTag}"
|
||||
} else {
|
||||
buildOptions += " --no-push"
|
||||
}
|
||||
}
|
||||
|
||||
dockerExecute(
|
||||
script: script,
|
||||
containerCommand: config.containerCommand,
|
||||
containerShell: config.containerShell,
|
||||
dockerEnvVars: config.dockerEnvVars,
|
||||
dockerImage: config.dockerImage,
|
||||
dockerOptions: config.dockerOptions
|
||||
) {
|
||||
// prepare kaniko container for running with proper Docker config.json and custom certificates
|
||||
// custom certificates will be downloaded and appended to ca-certificates.crt file used in container
|
||||
sh """#!${config.containerShell}
|
||||
${config.containerPreparationCommand}
|
||||
${getCertificateUpdate(config.customTlsCertificateLinks)}
|
||||
"""
|
||||
|
||||
def uuid = UUID.randomUUID().toString()
|
||||
if (config.dockerConfigJsonCredentialsId) {
|
||||
// write proper config.json with credentials
|
||||
withCredentials([file(credentialsId: config.dockerConfigJsonCredentialsId, variable: 'dockerConfigJson')]) {
|
||||
writeFile file: "${uuid}-config.json", text: readFile(dockerConfigJson)
|
||||
}
|
||||
} else {
|
||||
// empty config.json to allow anonymous authentication
|
||||
writeFile file: "${uuid}-config.json", text: '{"auths":{}}'
|
||||
}
|
||||
|
||||
// execute Kaniko
|
||||
sh """#!${config.containerShell}
|
||||
mv ${uuid}-config.json /kaniko/.docker/config.json
|
||||
/kaniko/executor --dockerfile ${env.WORKSPACE}/${config.dockerfile} --context ${env.WORKSPACE} ${buildOptions}"""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String getCertificateUpdate(List certLinks) {
|
||||
String certUpdate = ''
|
||||
|
||||
if (!certLinks) return certUpdate
|
||||
|
||||
certLinks.each {link ->
|
||||
certUpdate += "wget ${link} -O - >> /kaniko/ssl/certs/ca-certificates.crt\n"
|
||||
}
|
||||
return certUpdate
|
||||
}
|
@ -35,7 +35,9 @@ import static com.sap.piper.Utils.downloadSettingsFromUrl
|
||||
]
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS.plus([
|
||||
/** @see dockerExecute */
|
||||
'dockerOptions'
|
||||
'dockerOptions',
|
||||
/** Url to the npm registry that should be used for installing npm dependencies.*/
|
||||
'defaultNpmRegistry'
|
||||
])
|
||||
|
||||
/**
|
||||
@ -81,6 +83,11 @@ void call(Map parameters = [:]) {
|
||||
sh "cp ${globalSettingsFile} \$M2_HOME/conf/settings.xml"
|
||||
}
|
||||
|
||||
String defaultNpmRegistry = configuration.defaultNpmRegistry?.trim()
|
||||
if (defaultNpmRegistry) {
|
||||
sh "npm config set registry $defaultNpmRegistry"
|
||||
}
|
||||
|
||||
def mtaYamlName = "mta.yaml"
|
||||
def applicationName = configuration.applicationName
|
||||
|
||||
|
@ -60,20 +60,32 @@ void call(Map parameters = [:]) {
|
||||
stage.getValue().stepConditions.each {step ->
|
||||
def stepActive = false
|
||||
step.getValue().each {condition ->
|
||||
Map stepConfig = script.commonPipelineEnvironment.getStepConfiguration(step.getKey(), currentStage)
|
||||
switch(condition.getKey()) {
|
||||
case 'config':
|
||||
if (condition.getValue() instanceof Map) {
|
||||
condition.getValue().each {configCondition ->
|
||||
if (script.commonPipelineEnvironment.getStepConfiguration(step.getKey(), currentStage)?.get(configCondition.getKey()) in configCondition.getValue()) {
|
||||
if (getConfigValue(stepConfig, configCondition.getKey()) in configCondition.getValue()) {
|
||||
stepActive = true
|
||||
}
|
||||
}
|
||||
} else if (script.commonPipelineEnvironment.getStepConfiguration(step.getKey(), currentStage)?.get(condition.getValue())) {
|
||||
} else if (getConfigValue(stepConfig, condition.getValue())) {
|
||||
stepActive = true
|
||||
}
|
||||
break
|
||||
case 'configKeys':
|
||||
if (condition.getValue() instanceof List) {
|
||||
condition.getValue().each {configKey ->
|
||||
if (getConfigValue(stepConfig, configKey)) {
|
||||
stepActive = true
|
||||
}
|
||||
}
|
||||
} else if (getConfigValue(stepConfig, condition.getValue())) {
|
||||
stepActive = true
|
||||
}
|
||||
break
|
||||
case 'filePatternFromConfig':
|
||||
def conditionValue=script.commonPipelineEnvironment.getStepConfiguration(step.getKey(), currentStage)?.get(condition.getValue())
|
||||
def conditionValue = getConfigValue(stepConfig, condition.getValue())
|
||||
if (conditionValue && findFiles(glob: conditionValue)) {
|
||||
stepActive = true
|
||||
}
|
||||
@ -98,3 +110,16 @@ void call(Map parameters = [:]) {
|
||||
echo "[${STEP_NAME}] Debug - Run Step Configuration: ${script.commonPipelineEnvironment.configuration.runStep}"
|
||||
}
|
||||
}
|
||||
|
||||
private def getConfigValue(Map stepConfig, def configKey) {
|
||||
if (stepConfig == null) return null
|
||||
|
||||
List configPath = configKey instanceof String ? configKey.tokenize('/') : configKey
|
||||
|
||||
def configValue = stepConfig[configPath.head()]
|
||||
|
||||
if (configPath.size() == 1) return configValue
|
||||
if (configValue in Map) return getConfigValue(configValue, configPath.tail())
|
||||
|
||||
return null
|
||||
}
|
||||
|
@ -87,8 +87,7 @@ void call(parameters) {
|
||||
failure {buildSetResult(currentBuild, 'FAILURE')}
|
||||
unstable {buildSetResult(currentBuild, 'UNSTABLE')}
|
||||
cleanup {
|
||||
influxWriteData script: parameters.script, wrapInNode: true
|
||||
mailSendNotification script: parameters.script, wrapInNode: true
|
||||
piperPipelineStagePost script: parameters.script
|
||||
}
|
||||
}
|
||||
}
|
||||
|
45
vars/piperPipelineStagePost.groovy
Normal file
45
vars/piperPipelineStagePost.groovy
Normal file
@ -0,0 +1,45 @@
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.GenerateDocumentation
|
||||
import com.sap.piper.Utils
|
||||
import groovy.transform.Field
|
||||
|
||||
import static com.sap.piper.Prerequisites.checkScript
|
||||
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* In this stage reporting actions like mail notification or telemetry reporting are executed.
|
||||
*
|
||||
* This stage contains following steps:
|
||||
* - [influxWriteData](./influxWriteData.md)
|
||||
* - [mailSendNotification](./mailSendNotification.md)
|
||||
*
|
||||
* !!! note
|
||||
* This stage is meant to be used in a [post](https://jenkins.io/doc/book/pipeline/syntax/#post) section of a pipeline.
|
||||
*/
|
||||
@GenerateDocumentation
|
||||
void call(Map parameters = [:]) {
|
||||
def script = checkScript(this, parameters) ?: this
|
||||
def utils = parameters.juStabUtils ?: new Utils()
|
||||
def stageName = parameters.stageName?:env.STAGE_NAME
|
||||
// ease handling extension
|
||||
stageName = stageName.replace('Declarative: ', '')
|
||||
Map config = ConfigurationHelper.newInstance(this)
|
||||
.loadStepDefaults()
|
||||
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
|
||||
.mixinStageConfig(script.commonPipelineEnvironment, stageName, STEP_CONFIG_KEYS)
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.use()
|
||||
|
||||
piperStageWrapper (script: script, stageName: stageName, stageLocking: false) {
|
||||
// telemetry reporting
|
||||
utils.pushToSWA([step: STEP_NAME], config)
|
||||
|
||||
influxWriteData script: script
|
||||
mailSendNotification script: script
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user