mirror of
https://github.com/SAP/jenkins-library.git
synced 2025-03-03 15:02:35 +02:00
Merge branch 'master' into master
This commit is contained in:
commit
94345379ee
3
.github/CONTRIBUTING.md
vendored
3
.github/CONTRIBUTING.md
vendored
@ -24,6 +24,9 @@ Generally speaking, you should fork this repository, make changes in your own fo
|
||||
|
||||
All pipeline library coding _must_ come with automated unit tests.
|
||||
|
||||
Besides that, we have an integration test suite, which is not triggered during normal pull request builds. However, integration tests are mandatory before a change can be merged. It is the duty of a team member of the SAP/jenkins-library project to execute these tests.
|
||||
To trigger the integration test suite, the `HEAD` commit of the branch associated with the pull request must be pushed under the branch pattern `it/.*` (recommended naming convention: `it/<Number of the pull request>`). As a result, the status `integration-tests` is updated in the pull request.
|
||||
|
||||
### Documentation
|
||||
|
||||
The contract of functionality exposed by a library functionality needs to be documented, so it can be properly used.
|
||||
|
48
.travis.yml
48
.travis.yml
@ -1,6 +1,7 @@
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- /^it\/.*$/
|
||||
language: groovy
|
||||
sudo: required
|
||||
services:
|
||||
@ -15,6 +16,7 @@ cache:
|
||||
# Travis Lifecycle: https://docs.travis-ci.com/user/job-lifecycle#the-job-lifecycle
|
||||
# Travis Stages: https://docs.travis-ci.com/user/build-stages/
|
||||
# Travis Conditions: https://docs.travis-ci.com/user/conditional-builds-stages-jobs
|
||||
# Travis GHPages deploy provider: https://docs.travis-ci.com/user/deployment/pages/
|
||||
jobs:
|
||||
include:
|
||||
- stage: Tests
|
||||
@ -27,34 +29,22 @@ jobs:
|
||||
after_script:
|
||||
- JACOCO_SOURCE_PATH="src vars test" ./cc-test-reporter format-coverage target/site/jacoco/jacoco.xml --input-type jacoco
|
||||
- ./cc-test-reporter upload-coverage
|
||||
- name: Docs Build
|
||||
if: type = pull_request
|
||||
install: docker pull squidfunk/mkdocs-material:3.0.4
|
||||
script:
|
||||
- |
|
||||
cp -r documentation/docs documentation/docs-tmp
|
||||
documentation/bin/createDocu.sh vars documentation/docs-tmp/steps
|
||||
docker run --rm -it -v ${TRAVIS_BUILD_DIR}:/docs -w /docs/documentation squidfunk/mkdocs-material:3.0.4 build --clean --verbose --strict
|
||||
- name: Consumer Tests for s4sdk pipeline (CloudFoundry)
|
||||
script: cd consumer-test/s4sdk/CloudFoundry && chmod +x runTests.sh && ./runTests.sh
|
||||
- name: Consumer Tests for s4sdk pipeline (Neo Environment)
|
||||
script: cd consumer-test/s4sdk/NeoEnvironment && chmod +x runTests.sh && ./runTests.sh
|
||||
- name: Consumer Tests
|
||||
if: repo = "SAP/jenkins-library" && ( (type != pull_request && branch =~ /^master$|^it\/.*$/) || (type == pull_request && head_repo = "SAP/jenkins-library" && head_branch =~ /^it\/.*$/) )
|
||||
script: cd consumer-test && chmod +x integrationTestController.sh && ./integrationTestController.sh
|
||||
|
||||
- stage: Docs
|
||||
name: Deploy
|
||||
if: repo = "SAP/jenkins-library" AND branch = master AND NOT type = pull_request
|
||||
install:
|
||||
- docker pull squidfunk/mkdocs-material:3.0.4
|
||||
- |
|
||||
echo "Found change on master: Deployment of documentation"
|
||||
PRIVATE_KEY="cfg/id_rsa"
|
||||
openssl aes-256-cbc -K $encrypted_12c8071d2874_key -iv $encrypted_12c8071d2874_iv -in cfg/id_rsa.enc -out "${PRIVATE_KEY}" -d
|
||||
chmod a+x gh-pages-deploy.sh
|
||||
cp -r documentation/docs documentation/docs-tmp
|
||||
documentation/bin/createDocu.sh vars documentation/docs-tmp/steps
|
||||
script: docker run --rm -it --entrypoint "./gh-pages-deploy.sh" -e "TRAVIS_REPO_SLUG=${TRAVIS_REPO_SLUG}" -v ${TRAVIS_BUILD_DIR}:/docs -w /docs squidfunk/mkdocs-material:3.0.4
|
||||
# TODO: make use of GHPages deploy provider: https://docs.travis-ci.com/user/deployment/pages/
|
||||
|
||||
#notifications:
|
||||
# slack:
|
||||
# secure: UYzfd4QYLtAX39r8LzV1dYp7cKMhYRRjI/xswMEkR+RgdMWxVPPH3kcsNLwkdNGSPn1b8Aidz8YLss9JolrepWjwI283dK8EUthZAOw03+PmL5X/3nOJ7aGv0sxwYqF5ypltBrerTf6jtPUTcQdtao+0O8bgnzShc6nWWE4MLXonjOm1pZLRUo81un+0bzm8C2ABIeHC6xuZCRycXP5u1mW1nDLK3900uY1rxIDTSZKEzA0IzLQhE9uROvI1r48fW8cKJQQjMMO5PPorq+0eDl2YTE8rQr9ldvuRE7A/ubsOQR0N5F8iAv1JTZXuXGt62fw6eKDQ1h94suEk7X+baV0EwlfhsHXcI1MxRFwxNSr9k1WaVFfA4TrM8XYBAcW3JGRA51ZK3q4EcjpuxpupaA7kZDtH53W7ePzH2TIp6yknln1q+yfcsP7cGv38sSKpKwOyMgAPRElkZzcoo31kw/PLzKPXYJEovRqx/0lWzczbFSscsroNaGCavC02++bUnyUXW2W+PG4gDSBFVZjtrvTPKnZ6DpHXV97x6xC/CzyhFj/Nf+ao/J9IIfocnc4vXJojwS550KIvM7xCDJwa/+29dajj2l6dQqrcOe3UT3O5UGU9I0KkGEDMfkLOD71eRy58qiYz3y953e52DvvzWQJbvfuk8ubMO+Fmn4GyRz8=
|
||||
name: Build & Deploy
|
||||
install: docker pull squidfunk/mkdocs-material:3.0.4
|
||||
before_script: documentation/bin/createDocu.sh
|
||||
script: docker run --rm -it -v ${TRAVIS_BUILD_DIR}/documentation:/docs squidfunk/mkdocs-material:3.0.4 build --clean --strict
|
||||
deploy:
|
||||
on:
|
||||
branch: master
|
||||
provider: pages
|
||||
skip_cleanup: true
|
||||
github_token: ${GITHUB_TOKEN}
|
||||
name: 'Travis CI Publisher'
|
||||
local_dir: 'documentation/docs-gen'
|
||||
keep_history: true
|
||||
#verbose: true
|
||||
|
19
Jenkinsfile
vendored
19
Jenkinsfile
vendored
@ -1,19 +0,0 @@
|
||||
node {
|
||||
try {
|
||||
lock(resource: "sap-jenkins-library/10", inversePrecedence: true) {
|
||||
milestone 10
|
||||
deleteDir()
|
||||
stage ('Checkout'){
|
||||
checkout scm
|
||||
}
|
||||
stage ('Test') {
|
||||
sh "mvn clean test --batch-mode"
|
||||
}
|
||||
}
|
||||
} catch (Throwable err) {
|
||||
echo "Error occured: ${err}"
|
||||
currentBuild.result = 'FAILURE'
|
||||
mail subject: '[Build failed] SAP/jenkins-library', body: 'Fix the build.', to: 'marcus.holl@sap.com,oliver.nocon@sap.com'
|
||||
throw err
|
||||
}
|
||||
}
|
@ -14,15 +14,15 @@ Project "Piper" consists of two parts:
|
||||
|
||||
* [A shared library][piper-library] containing steps and utilities that are
|
||||
required by Jenkins pipelines.
|
||||
* A set of [Jenkins pipelines][piper-pipelines] using the piper library to
|
||||
implement best practice processes.
|
||||
* A set of [Docker images][devops-docker-images] used in the piper library to implement best practices.
|
||||
|
||||
Please follow [this link to our extended library documentation][piper-library-pages].
|
||||
|
||||
## What you get
|
||||
|
||||
The shared library contains all the necessary steps to run our best practice
|
||||
[Jenkins pipelines][piper-pipelines].
|
||||
[Jenkins pipelines][piper-library-pages] described in the Scenarios section or
|
||||
to run a [pipeline as step][piper-library-scenario].
|
||||
|
||||
The best practice pipelines are based on the general concepts of [Jenkins 2.0
|
||||
Pipelines as Code][jenkins-doc-pipelines]. With that you have the power of the
|
||||
@ -116,7 +116,8 @@ otherwise in the [LICENSE file][piper-library-license]
|
||||
|
||||
[github]: https://github.com
|
||||
[piper-library]: https://github.com/SAP/jenkins-library
|
||||
[piper-pipelines]: https://github.com/SAP/jenkins-pipelines
|
||||
[devops-docker-images]: https://github.com/SAP/devops-docker-images
|
||||
[piper-library-scenario]: https://sap.github.io/jenkins-library/scenarios/ui5-sap-cp/Readme/
|
||||
[piper-library-pages]: https://sap.github.io/jenkins-library
|
||||
[piper-library-pages-plugins]: https://sap.github.io/jenkins-library/jenkins/requiredPlugins
|
||||
[piper-library-issues]: https://github.com/SAP/jenkins-library/issues
|
||||
|
139
consumer-test/integrationTestController.sh
Executable file
139
consumer-test/integrationTestController.sh
Executable file
@ -0,0 +1,139 @@
|
||||
#!/bin/bash
|
||||
|
||||
function fail() {
|
||||
local message="$1"
|
||||
local returnCode=${2:-1}
|
||||
echo "[ERROR] ${message}" >&2
|
||||
exit "${returnCode}"
|
||||
}
|
||||
|
||||
function notify() {
|
||||
|
||||
local state=${1}
|
||||
local description=${2}
|
||||
local hash=${3}
|
||||
|
||||
echo "[INFO] Notifying about state \"${state}\" for commit \"${hash}\"."
|
||||
|
||||
curl -X POST \
|
||||
--fail \
|
||||
--silent \
|
||||
--output /dev/null \
|
||||
--data "{\"state\": \"${state}\", \"target_url\": \"${TRAVIS_BUILD_WEB_URL}\", \"description\": \"${description}\", \"context\": \"integration-tests\"}" \
|
||||
--user "${INTEGRATION_TEST_VOTING_USER}:${INTEGRATION_TEST_VOTING_TOKEN}" \
|
||||
"https://api.github.com/repos/SAP/jenkins-library/statuses/${hash}" || fail "Cannot send notification. curl return code: $?"
|
||||
}
|
||||
|
||||
function cleanup() {
|
||||
[[ -z "${notificationThreadPid}" ]] || kill -PIPE "${notificationThreadPid}" &>/dev/null
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
#
|
||||
# In case the build is performed for a pull request TRAVIS_COMMIT is a merge
|
||||
# commit between the base branch and the PR branch HEAD. That commit is actually built.
|
||||
# But for notifying about a build status we need the commit which is currently
|
||||
# the HEAD of the PR branch.
|
||||
#
|
||||
# In case the build is performed for a simple branch (not associated with a PR)
|
||||
# In this case there is no merge commit between any base branch and HEAD of a PR branch.
|
||||
# The commit which we need for notifying about a build status is in this case simply
|
||||
# TRAVIS_COMMIT itself.
|
||||
#
|
||||
COMMIT_HASH_FOR_STATUS_NOTIFICATIONS="${TRAVIS_PULL_REQUEST_SHA}"
|
||||
[[ -z "${COMMIT_HASH_FOR_STATUS_NOTIFICATIONS}" ]] && COMMIT_HASH_FOR_STATUS_NOTIFICATIONS="${TRAVIS_COMMIT}"
|
||||
|
||||
notify "pending" "Integration tests in progress." "${COMMIT_HASH_FOR_STATUS_NOTIFICATIONS}"
|
||||
|
||||
WORKSPACES_ROOT=workspaces
|
||||
[[ -e "${WORKSPACES_ROOT}" ]] && rm -rf ${WORKSPACES_ROOT}
|
||||
|
||||
TEST_CASES=$(find testCases -name '*.yml')
|
||||
|
||||
# This auxiliary thread is needed in order to produce some output while the
|
||||
# test are running. Otherwise the job will be canceled after 10 minutes without
|
||||
# output.
|
||||
while true; do sleep 10; echo "[INFO] Integration tests still running."; done &
|
||||
notificationThreadPid=$!
|
||||
|
||||
declare -a processes
|
||||
i=0
|
||||
for f in ${TEST_CASES}
|
||||
do
|
||||
testCase=$(basename "${f%.*}")
|
||||
area=$(dirname "${f#*/}")
|
||||
echo "[INFO] Running test case \"${testCase}\" in area \"${area}\"."
|
||||
TEST_CASE_ROOT="${WORKSPACES_ROOT}/${area}/${testCase}"
|
||||
[[ -e "${TEST_CASE_ROOT}" ]] && rm -rf "${TEST_CASE_ROOT}"
|
||||
mkdir -p "${TEST_CASE_ROOT}" || fail "Cannot create test case root directory for test case \"${testCase}\"." 1
|
||||
source ./runTest.sh "${testCase}" "${TEST_CASE_ROOT}" &> "${TEST_CASE_ROOT}/log.txt" &
|
||||
pid=$!
|
||||
processes[$i]="${area}/${testCase}:${pid}"
|
||||
echo "[INFO] Test case \"${testCase}\" in area \"${area}\" launched. (PID: \"${pid}\")."
|
||||
let i=i+1
|
||||
done
|
||||
|
||||
[[ "${i}" == 0 ]] && fail "No tests has been executed." 1
|
||||
|
||||
#
|
||||
# wait for the test cases and cat the log
|
||||
for p in "${processes[@]}"
|
||||
do
|
||||
area=$(dirname "${p%:*}")
|
||||
testCase=$(basename "${p%:*}")
|
||||
processId="${p#*:}"
|
||||
echo "[INFO] Waiting for test case \"${testCase}\" in area \"${area}\" (PID: \"${processId}\")."
|
||||
wait "${processId}"
|
||||
echo "[INFO] Test case \"${testCase}\" in area \"${area}\" finished (PID: \"${processId}\")."
|
||||
done
|
||||
|
||||
kill -PIPE "${notificationThreadPid}" &>/dev/null && notificationThreadPid=""
|
||||
|
||||
#
|
||||
# provide the logs
|
||||
for p in "${processes[@]}"
|
||||
do
|
||||
area=$(dirname "${p%:*}")
|
||||
testCase=$(basename "${p%:*}")
|
||||
TEST_CASE_ROOT="${WORKSPACES_ROOT}/${area}/${testCase}"
|
||||
echo "[INFO] === START === Logs for test case \"${testCase}\" ===."
|
||||
cat "${TEST_CASE_ROOT}/log.txt"
|
||||
echo "[INFO] === END === Logs for test case \"${testCase}\" ===."
|
||||
done
|
||||
|
||||
#
|
||||
# list test case status
|
||||
echo "[INFO] Build status:"
|
||||
failure="false"
|
||||
for p in "${processes[@]}"
|
||||
do
|
||||
status="UNDEFINED"
|
||||
area=$(dirname "${p%:*}")
|
||||
testCase=$(basename "${p%:*}")
|
||||
TEST_CASE_ROOT="${WORKSPACES_ROOT}/${area}/${testCase}"
|
||||
if [[ -f "${TEST_CASE_ROOT}/SUCCESS" ]]
|
||||
then
|
||||
status="SUCCESS"
|
||||
else
|
||||
status="FAILURE"
|
||||
failure="true"
|
||||
fi
|
||||
printf "[INFO] %-30s: %s\n" "${testCase}" "${status}"
|
||||
done
|
||||
|
||||
STATUS_DESCRIPTION="The integration tests failed."
|
||||
STATUS_STATE="failure"
|
||||
|
||||
if [[ "${failure}" == "false" ]]
|
||||
then
|
||||
STATUS_DESCRIPTION="The integration tests succeeded."
|
||||
STATUS_STATE="success"
|
||||
fi
|
||||
|
||||
notify "${STATUS_STATE}" "${STATUS_DESCRIPTION}" "${COMMIT_HASH_FOR_STATUS_NOTIFICATIONS}"
|
||||
|
||||
[[ "${failure}" != "false" ]] && fail "Integration tests failed." 1
|
||||
|
||||
echo "[INFO] Integration tests succeeded."
|
||||
exit 0
|
@ -1,14 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
EXAMPLE_PROJECT_BRANCH=$1
|
||||
TEST_CASE=$1
|
||||
TEST_CASE_ROOT=$2
|
||||
TEST_CASE_WORKSPACE="${TEST_CASE_ROOT}/workspace"
|
||||
|
||||
LIBRARY_VERSION_UNDER_TEST=$(git log --format="%H" -n 1)
|
||||
REPOSITORY_UNDER_TEST=${TRAVIS_REPO_SLUG:-SAP/jenkins-library}
|
||||
|
||||
rm -rf workspace
|
||||
git clone -b "${EXAMPLE_PROJECT_BRANCH}" https://github.com/sap/cloud-s4-sdk-book workspace
|
||||
cp -f ../jenkins.yml workspace
|
||||
cd workspace || exit 1
|
||||
git clone -b "${TEST_CASE}" https://github.com/sap/cloud-s4-sdk-book "${TEST_CASE_WORKSPACE}"
|
||||
cp -f jenkins.yml "${TEST_CASE_WORKSPACE}"
|
||||
cd "${TEST_CASE_WORKSPACE}" || exit 1
|
||||
|
||||
# Configure path to library-repository under test in Jenkins config
|
||||
sed -i -e "s:__REPO_SLUG__:${REPOSITORY_UNDER_TEST}:g" jenkins.yml
|
||||
@ -18,3 +19,12 @@ echo "@Library(\"piper-library-os@$LIBRARY_VERSION_UNDER_TEST\") _" | cat - Jenk
|
||||
|
||||
# Commit the changed version because artifactSetVersion expects the git repo not to be dirty
|
||||
git commit --all --author="piper-testing-bot <piper-testing-bot@example.com>" --message="Set piper lib version for test"
|
||||
|
||||
docker run -v /var/run/docker.sock:/var/run/docker.sock -v "${PWD}":/workspace -v /tmp -e CASC_JENKINS_CONFIG=/workspace/jenkins.yml \
|
||||
-e CX_INFRA_IT_CF_USERNAME -e CX_INFRA_IT_CF_PASSWORD -e BRANCH_NAME="${TEST_CASE}" ppiper/jenkinsfile-runner
|
||||
|
||||
RC=$?
|
||||
|
||||
cd - &> /dev/null || { echo "[ERROR] change directory back into integration test root folder failed."; exit 1; }
|
||||
|
||||
[[ "${RC}" == 0 ]] && touch "${TEST_CASE_ROOT}/SUCCESS"
|
@ -1,6 +0,0 @@
|
||||
#!/bin/bash -e
|
||||
|
||||
source ../prepareTests.sh consumer-test
|
||||
|
||||
docker run -v /var/run/docker.sock:/var/run/docker.sock -v "${PWD}":/workspace -v /tmp -e CASC_JENKINS_CONFIG=/workspace/jenkins.yml \
|
||||
-e CX_INFRA_IT_CF_USERNAME -e CX_INFRA_IT_CF_PASSWORD -e BRANCH_NAME=consumer-test ppiper/jenkinsfile-runner
|
@ -1,6 +0,0 @@
|
||||
#!/bin/bash -e
|
||||
|
||||
source ../prepareTests.sh consumer-test-neo
|
||||
|
||||
docker run -v /var/run/docker.sock:/var/run/docker.sock -v "${PWD}":/workspace -v /tmp -e CASC_JENKINS_CONFIG=/workspace/jenkins.yml \
|
||||
-e CX_INFRA_IT_CF_USERNAME -e CX_INFRA_IT_CF_PASSWORD -e BRANCH_NAME=consumer-test-neo ppiper/jenkinsfile-runner
|
2
consumer-test/testCases/s4sdk/consumer-test-neo.yml
Normal file
2
consumer-test/testCases/s4sdk/consumer-test-neo.yml
Normal file
@ -0,0 +1,2 @@
|
||||
# Empty for the moment.
|
||||
# Might contain test configuration in the future.
|
2
consumer-test/testCases/s4sdk/consumer-test.yml
Normal file
2
consumer-test/testCases/s4sdk/consumer-test.yml
Normal file
@ -0,0 +1,2 @@
|
||||
# Empty for the moment.
|
||||
# Might contain test configuration in the future.
|
@ -1,41 +1,15 @@
|
||||
import groovy.io.FileType;
|
||||
import groovy.io.FileType
|
||||
import org.yaml.snakeyaml.Yaml
|
||||
import org.codehaus.groovy.control.CompilerConfiguration
|
||||
import com.sap.piper.GenerateDocumentation
|
||||
import com.sap.piper.DefaultValueCache
|
||||
import java.util.regex.Matcher
|
||||
import groovy.text.StreamingTemplateEngine
|
||||
|
||||
//
|
||||
// Collects helper functions for rendering the docu
|
||||
// Collects helper functions for rendering the documentation
|
||||
//
|
||||
class TemplateHelper {
|
||||
|
||||
static replaceParagraph(def textIn, int level, name, replacement) {
|
||||
|
||||
boolean insideParagraph = false
|
||||
def textOut = ''
|
||||
|
||||
textIn.eachLine {
|
||||
|
||||
line ->
|
||||
|
||||
if(insideParagraph && line ==~ "^#{1,${level}} .*\$") {
|
||||
insideParagraph = false
|
||||
}
|
||||
|
||||
if(! insideParagraph) {
|
||||
textOut += "${line}\n"
|
||||
}
|
||||
|
||||
if(line ==~ "^#{${level}} ${name}.*\$") {
|
||||
insideParagraph = true
|
||||
textOut += "${replacement}\n\n"
|
||||
}
|
||||
}
|
||||
|
||||
textOut
|
||||
}
|
||||
|
||||
static createParametersTable(Map parameters) {
|
||||
|
||||
def t = ''
|
||||
@ -61,18 +35,22 @@ class TemplateHelper {
|
||||
t.trim()
|
||||
}
|
||||
|
||||
static createParametersSection(Map parameters) {
|
||||
createParametersTable(parameters) + '\n' + createParameterDescriptionSection(parameters)
|
||||
}
|
||||
|
||||
static createStepConfigurationSection(Map parameters) {
|
||||
|
||||
def t = '''|We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
|
||||
|In following sections of the config.yml the configuration is possible:\n\n'''.stripMargin()
|
||||
|
||||
t += '| parameter | general | step | stage |\n'
|
||||
t += '|-----------|---------|------|-------|\n'
|
||||
t += '| parameter | general | step/stage |\n'
|
||||
t += '|-----------|---------|------------|\n'
|
||||
|
||||
parameters.keySet().toSorted().each {
|
||||
def props = parameters.get(it)
|
||||
t += "| `${it}` | ${props.GENERAL_CONFIG ? 'X' : ''} | ${props.STEP_CONFIG ? 'X' : ''} | ${props.STAGE_CONFIG ? 'X' : ''} |\n"
|
||||
t += "| `${it}` | ${props.GENERAL_CONFIG ? 'X' : ''} | ${props.STEP_CONFIG ? 'X' : ''} |\n"
|
||||
}
|
||||
|
||||
t.trim()
|
||||
@ -84,15 +62,17 @@ class TemplateHelper {
|
||||
//
|
||||
class Helper {
|
||||
|
||||
static projectRoot = new File(Helper.class.protectionDomain.codeSource.location.path).getParentFile().getParentFile().getParentFile()
|
||||
|
||||
static getConfigHelper(classLoader, roots, script) {
|
||||
|
||||
def compilerConfig = new CompilerConfiguration()
|
||||
compilerConfig.setClasspathList( roots )
|
||||
|
||||
new GroovyClassLoader(classLoader, compilerConfig, true)
|
||||
.parseClass(new File('src/com/sap/piper/ConfigurationHelper.groovy'))
|
||||
.parseClass(new File(projectRoot, 'src/com/sap/piper/ConfigurationHelper.groovy'))
|
||||
.newInstance(script, [:]).loadStepDefaults()
|
||||
}
|
||||
}
|
||||
|
||||
static getPrepareDefaultValuesStep(def gse) {
|
||||
|
||||
@ -102,7 +82,7 @@ class Helper {
|
||||
m, c -> c()
|
||||
}
|
||||
prepareDefaultValuesStep.metaClass.libraryResource {
|
||||
f -> new File("resources/${f}").text
|
||||
f -> new File(projectRoot,"resources/${f}").text
|
||||
}
|
||||
prepareDefaultValuesStep.metaClass.readYaml {
|
||||
m -> new Yaml().load(m.text)
|
||||
@ -122,6 +102,7 @@ class Helper {
|
||||
|
||||
def prepareDefaultValues() {
|
||||
_prepareDefaultValuesStep()
|
||||
|
||||
}
|
||||
|
||||
def run() {
|
||||
@ -196,9 +177,10 @@ class Helper {
|
||||
boolean docu = false,
|
||||
value = false,
|
||||
mandatory = false,
|
||||
parentObject = false,
|
||||
docuEnd = false
|
||||
|
||||
def docuLines = [], valueLines = [], mandatoryLines = []
|
||||
def docuLines = [], valueLines = [], mandatoryLines = [], parentObjectLines = []
|
||||
|
||||
f.eachLine {
|
||||
line ->
|
||||
@ -219,13 +201,17 @@ class Helper {
|
||||
throw new RuntimeException('Cannot retrieve parameter for a comment')
|
||||
}
|
||||
|
||||
def _docu = [], _value = [], _mandatory = [], _parentObject = []
|
||||
docuLines.each { _docu << it }
|
||||
valueLines.each { _value << it }
|
||||
mandatoryLines.each { _mandatory << it }
|
||||
parentObjectLines.each { _parentObject << it }
|
||||
_parentObject << param
|
||||
param = _parentObject*.trim().join('/').trim()
|
||||
|
||||
if(step.parameters[param].docu || step.parameters[param].value)
|
||||
System.err << "[WARNING] There is already some documentation for parameter '${param}. Is this parameter documented twice?'\n"
|
||||
|
||||
def _docu = [], _value = [], _mandatory = []
|
||||
docuLines.each { _docu << it }
|
||||
valueLines.each { _value << it}
|
||||
mandatoryLines.each { _mandatory << it}
|
||||
step.parameters[param].docu = _docu*.trim().join(' ').trim()
|
||||
step.parameters[param].value = _value*.trim().join(' ').trim()
|
||||
step.parameters[param].mandatory = _mandatory*.trim().join(' ').trim()
|
||||
@ -233,6 +219,7 @@ class Helper {
|
||||
docuLines.clear()
|
||||
valueLines.clear()
|
||||
mandatoryLines.clear()
|
||||
parentObjectLines.clear()
|
||||
}
|
||||
|
||||
if( line.trim() ==~ /^\/\*\*.*/ ) {
|
||||
@ -249,11 +236,19 @@ class Helper {
|
||||
if(_line ==~ /.*@possibleValues.*/) {
|
||||
mandatory = false // should be something like reset attributes
|
||||
value = true
|
||||
parentObject = false
|
||||
}
|
||||
// some remark for mandatory e.g. some parameters are only mandatory under certain conditions
|
||||
if(_line ==~ /.*@mandatory.*/) {
|
||||
value = false // should be something like reset attributes ...
|
||||
mandatory = true
|
||||
parentObject = false
|
||||
}
|
||||
// grouping config properties within a parent object for easier readability
|
||||
if(_line ==~ /.*@parentConfigKey.*/) {
|
||||
value = false // should be something like reset attributes ...
|
||||
mandatory = false
|
||||
parentObject = true
|
||||
}
|
||||
|
||||
if(value) {
|
||||
@ -270,7 +265,14 @@ class Helper {
|
||||
}
|
||||
}
|
||||
|
||||
if(! value && ! mandatory) {
|
||||
if(parentObject) {
|
||||
if(_line) {
|
||||
_line = (_line =~ /.*@parentConfigKey\s*?(.*)/)[0][1]
|
||||
parentObjectLines << _line
|
||||
}
|
||||
}
|
||||
|
||||
if(!value && !mandatory && !parentObject) {
|
||||
docuLines << _line
|
||||
}
|
||||
}
|
||||
@ -279,6 +281,7 @@ class Helper {
|
||||
docu = false
|
||||
value = false
|
||||
mandatory = false
|
||||
parentObject = false
|
||||
docuEnd = true
|
||||
}
|
||||
}
|
||||
@ -311,14 +314,33 @@ class Helper {
|
||||
def params = [] as Set
|
||||
f.eachLine {
|
||||
line ->
|
||||
if( line ==~ /.*withMandatoryProperty.*/ ) {
|
||||
def param = (line =~ /.*withMandatoryProperty\('(.*)'/)[0][1]
|
||||
params << param
|
||||
}
|
||||
if (line ==~ /.*withMandatoryProperty.*/) {
|
||||
def param = (line =~ /.*withMandatoryProperty\('(.*)'/)[0][1]
|
||||
params << param
|
||||
}
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
static getParentObjectMappings(File f) {
|
||||
def mappings = [:]
|
||||
def parentObjectKey = ''
|
||||
f.eachLine {
|
||||
line ->
|
||||
if (line ==~ /.*parentConfigKey.*/ && !parentObjectKey) {
|
||||
def param = (line =~ /.*parentConfigKey\s*?(.*)/)[0][1]
|
||||
parentObjectKey = param.trim()
|
||||
} else if (line ==~ /\s*?(.*)[,]{0,1}/ && parentObjectKey) {
|
||||
def pName = retrieveParameterName(line)
|
||||
if(pName) {
|
||||
mappings.put(pName, parentObjectKey)
|
||||
parentObjectKey = ''
|
||||
}
|
||||
}
|
||||
}
|
||||
return mappings
|
||||
}
|
||||
|
||||
static getValue(Map config, def pPath) {
|
||||
def p =config[pPath.head()]
|
||||
if(pPath.size() == 1) return p // there is no tail
|
||||
@ -332,7 +354,7 @@ class Helper {
|
||||
|
||||
stepsDir.traverse(type: FileType.FILES, maxDepth: 0) {
|
||||
if(it.getName().endsWith('.groovy')) {
|
||||
def scriptName = (it =~ /vars\/(.*)\.groovy/)[0][1]
|
||||
def scriptName = (it =~ /vars\${File.separator}(.*)\.groovy/)[0][1]
|
||||
def stepScript = gse.createScript("${scriptName}.groovy", new Binding())
|
||||
for (def method in stepScript.getClass().getMethods()) {
|
||||
if(method.getName() == 'call' && method.getAnnotation(GenerateDocumentation) != null) {
|
||||
@ -347,8 +369,8 @@ class Helper {
|
||||
}
|
||||
|
||||
roots = [
|
||||
'vars',
|
||||
'src',
|
||||
new File(Helper.projectRoot, "vars").getAbsolutePath(),
|
||||
new File(Helper.projectRoot, "src").getAbsolutePath()
|
||||
]
|
||||
|
||||
stepsDir = null
|
||||
@ -362,12 +384,12 @@ steps = []
|
||||
if(args.length >= 1)
|
||||
stepsDir = new File(args[0])
|
||||
|
||||
stepsDir = stepsDir ?: new File('vars')
|
||||
stepsDir = stepsDir ?: new File(Helper.projectRoot, "vars")
|
||||
|
||||
if(args.length >= 2)
|
||||
stepsDocuDir = new File(args[1])
|
||||
|
||||
stepsDocuDir = stepsDocuDir ?: new File('documentation/docs/steps')
|
||||
stepsDocuDir = stepsDocuDir ?: new File(Helper.projectRoot, "documentation/docs/steps")
|
||||
|
||||
|
||||
if(args.length >= 3)
|
||||
@ -394,7 +416,7 @@ if( !stepsDir.exists() ) {
|
||||
// sanity checks
|
||||
//
|
||||
|
||||
def gse = new GroovyScriptEngine( [ stepsDir.getName() ] as String[] , getClass().getClassLoader() )
|
||||
def gse = new GroovyScriptEngine([ stepsDir.getAbsolutePath() ] as String[], GenerateDocumentation.class.getClassLoader() )
|
||||
|
||||
//
|
||||
// find all the steps we have to document (if no step has been provided from outside)
|
||||
@ -457,20 +479,15 @@ void renderStep(stepName, stepProperties) {
|
||||
return
|
||||
}
|
||||
|
||||
def text = theStepDocu.text
|
||||
if(stepProperties.description) {
|
||||
text = TemplateHelper.replaceParagraph(text, 2, 'Description', '\n' + stepProperties.description)
|
||||
}
|
||||
if(stepProperties.parameters) {
|
||||
def binding = [
|
||||
docGenStepName : stepName,
|
||||
docGenDescription : 'Description\n\n' + stepProperties.description,
|
||||
docGenParameters : 'Parameters\n\n' + TemplateHelper.createParametersSection(stepProperties.parameters),
|
||||
docGenConfiguration : 'Step configuration\n\n' + TemplateHelper.createStepConfigurationSection(stepProperties.parameters)
|
||||
]
|
||||
def template = new StreamingTemplateEngine().createTemplate(theStepDocu.text)
|
||||
String text = template.make(binding)
|
||||
|
||||
text = TemplateHelper.replaceParagraph(text, 2, 'Parameters', '\n' +
|
||||
TemplateHelper.createParametersTable(stepProperties.parameters) + '\n' +
|
||||
TemplateHelper.createParameterDescriptionSection(stepProperties.parameters))
|
||||
|
||||
|
||||
text = TemplateHelper.replaceParagraph(text, 2, 'Step configuration', '\n' +
|
||||
TemplateHelper.createStepConfigurationSection(stepProperties.parameters))
|
||||
}
|
||||
theStepDocu.withWriter { w -> w.write text }
|
||||
}
|
||||
|
||||
@ -529,6 +546,20 @@ def handleStep(stepName, prepareDefaultValuesStep, gse) {
|
||||
|
||||
params.addAll(requiredParameters)
|
||||
|
||||
// translate parameter names according to compatibility annotations
|
||||
def parentObjectMappings = Helper.getParentObjectMappings(theStep)
|
||||
def compatibleParams = [] as Set
|
||||
if(parentObjectMappings) {
|
||||
params.each {
|
||||
if (parentObjectMappings[it])
|
||||
compatibleParams.add(parentObjectMappings[it] + '/' + it)
|
||||
else
|
||||
compatibleParams.add(it)
|
||||
}
|
||||
if (compatibleParams)
|
||||
params = compatibleParams
|
||||
}
|
||||
|
||||
def step = [parameters:[:]]
|
||||
|
||||
//
|
||||
@ -544,8 +575,7 @@ def handleStep(stepName, prepareDefaultValuesStep, gse) {
|
||||
required: true,
|
||||
|
||||
GENERAL_CONFIG: false,
|
||||
STEP_CONFIG: false,
|
||||
STAGE_CONFIG: false
|
||||
STEP_CONFIG: false
|
||||
]
|
||||
|
||||
// END special handling for 'script' parameter
|
||||
@ -561,14 +591,14 @@ def handleStep(stepName, prepareDefaultValuesStep, gse) {
|
||||
required: requiredParameters.contains((it as String)) && defaultValue == null
|
||||
]
|
||||
|
||||
step.parameters.put(it, parameterProperties)
|
||||
step.parameters.put(it, parameterProperties)
|
||||
|
||||
// The scope is only defined for the first level of a hierarchical configuration.
|
||||
// If the first part is found, all nested parameters are allowed with that scope.
|
||||
def firstPart = it.split('/').head()
|
||||
scopedParameters.each { key, val ->
|
||||
parameterProperties.put(key, val.contains(firstPart))
|
||||
}
|
||||
// The scope is only defined for the first level of a hierarchical configuration.
|
||||
// If the first part is found, all nested parameters are allowed with that scope.
|
||||
def firstPart = it.split('/').head()
|
||||
scopedParameters.each { key, val ->
|
||||
parameterProperties.put(key, val.contains(firstPart))
|
||||
}
|
||||
}
|
||||
|
||||
Helper.scanDocu(theStep, step)
|
||||
|
@ -59,10 +59,11 @@ steps:
|
||||
buildTarget: 'NEO'
|
||||
mtaJarLocation: '/opt/sap/mta.jar'
|
||||
neoDeploy:
|
||||
neoCredentialsId: 'NEO_DEPLOY'
|
||||
neoHome: '/opt/sap/neo-sdk/'
|
||||
account: 'your-account-id'
|
||||
host: 'hana.ondemand.com'
|
||||
neo:
|
||||
credentialsId: 'NEO_DEPLOY'
|
||||
account: 'your-account-id'
|
||||
host: 'hana.ondemand.com'
|
||||
```
|
||||
|
||||
#### Configuration for the MTA Build
|
||||
@ -78,8 +79,8 @@ steps:
|
||||
| Parameter | Description |
|
||||
| -------------------|-------------|
|
||||
| `account` | The SAP Cloud Platform account to deploy to. |
|
||||
| `credentialsId` | The Jenkins credentials that contain the user and password which are used for the deployment on SAP Cloud Platform.|
|
||||
| `host` | The SAP Cloud Platform host to deploy to. |
|
||||
| `neoCredentialsId` | The Jenkins credentials that contain the user and password which are used for the deployment on SAP Cloud Platform. |
|
||||
| `neoHome` | The path to the `neo-java-web-sdk` tool that is used for the deployment. |
|
||||
|
||||
|
||||
|
@ -1,79 +1,14 @@
|
||||
# artifactSetVersion
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
The continuous delivery process requires that each build is done with a unique version number.
|
||||
|
||||
The version generated using this step will contain:
|
||||
|
||||
* Version (major.minor.patch) from descriptor file in master repository is preserved. Developers should be able to autonomously decide on increasing either part of this version number.
|
||||
* Timestamp
|
||||
* CommitId (by default the long version of the hash)
|
||||
|
||||
Optionally, but enabled by default, the new version is pushed as a new tag into the source code repository (e.g. GitHub).
|
||||
If this option is chosen, git credentials and the repository URL needs to be provided.
|
||||
Since you might not want to configure the git credentials in Jenkins, committing and pushing can be disabled using the `commitVersion` parameter as described below.
|
||||
If you require strict reproducibility of your builds, this should be used.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequsites
|
||||
|
||||
none
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
| script | yes | | |
|
||||
| artifactType | no | | 'appContainer' |
|
||||
| buildTool | no | maven | docker, dlang, golang, maven, mta, npm, pip, sbt |
|
||||
| commitVersion | no | `true` | `true`, `false` |
|
||||
| dockerVersionSource | no | `''` | FROM, (ENV name),appVersion |
|
||||
| filePath | no | buildTool=`docker`: Dockerfile <br />buildTool=`dlang`: dub.json <br />buildTool=`golang`: VERSION <br />buildTool=`maven`: pom.xml <br />buildTool=`mta`: mta.yaml <br />buildTool=`npm`: package.json <br />buildTool=`pip`: version.txt <br />buildTool=`sbt`: sbtDescriptor.json| |
|
||||
| gitCommitId | no | `GitUtils.getGitCommitId()` | |
|
||||
| gitSshCredentialsId | If `commitVersion` is `true` | as defined in custom configuration | |
|
||||
| gitUserEMail | no | | |
|
||||
| gitUserName | no | | |
|
||||
| gitSshUrl | If `commitVersion` is `true` | | |
|
||||
| tagPrefix | no | 'build_' | |
|
||||
| timestamp | no | current time in format according to `timestampTemplate` | |
|
||||
| timestampTemplate | no | `%Y%m%d%H%M%S` | |
|
||||
| versioningTemplate | no |buildTool=`docker`: `${version}-${timestamp}${commitId?"_"+commitId:""}`<br> />buildTool=`dlang`: `${version}-${timestamp}${commitId?"+"+commitId:""}`<br />buildTool=`golang`:`${version}-${timestamp}${commitId?"+"+commitId:""}`<br />buildTool=`maven`: `${version}-${timestamp}${commitId?"_"+commitId:""}`<br />buildTool=`mta`: `${version}-${timestamp}${commitId?"+"+commitId:""}`<br />buildTool=`npm`: `${version}-${timestamp}${commitId?"+"+commitId:""}`<br />buildTool=`pip`: `${version}.${timestamp}${commitId?"."+commitId:""}`<br />buildTool=`sbt`: `${version}-${timestamp}${commitId?"+"+commitId:""}`| |
|
||||
|
||||
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving e.g. configuration parameters.
|
||||
* `artifactType` defines the type of the artifact.
|
||||
* `buildTool` defines the tool which is used for building the artifact.
|
||||
* `commitVersion` controls if the changed version is committed and pushed to the git repository. If this is enabled (which is the default), you need to provide `gitCredentialsId` and `gitSshUrl`.
|
||||
* `dockerVersionSource` specifies the source to be used for the main version which is used for generating the automatic version.
|
||||
|
||||
* This can either be the version of the base image - as retrieved from the `FROM` statement within the Dockerfile, e.g. `FROM jenkins:2.46.2`
|
||||
* Alternatively the name of an environment variable defined in the Docker image can be used which contains the version number, e.g. `ENV MY_VERSION 1.2.3`
|
||||
* The third option `appVersion` applies only to the artifactType `appContainer`. Here the version of the app which is packaged into the container will be used as version for the container itself.
|
||||
|
||||
* Using `filePath` you could define a custom path to the descriptor file.
|
||||
* `gitCommitId` defines the version prefix of the automatically generated version. By default it will take the long commitId hash. You could pass any other string (e.g. the short commitId hash) to be used. In case you don't want to have the gitCommitId added to the automatic versioning string you could set the value to an empty string: `''`.
|
||||
* `gitSshCredentialsId`defines the ssh git credentials to be used for writing the tag.
|
||||
* The parameters `gitUserName` and `gitUserEMail` allow to overwrite the global git settings available on your Jenkins server
|
||||
* `gitSshUrl` defines the git ssh url to the source code repository.
|
||||
* `tagPrefix` defines the prefix wich is used for the git tag which is written during the versioning run.
|
||||
* `timestamp` defines the timestamp to be used in the automatic version string. You could overwrite the default behavior by explicitly setting this string.
|
||||
|
||||
## Step configuration
|
||||
|
||||
The following parameters can also be specified as step parameters using the global configuration file:
|
||||
|
||||
* `artifactType`
|
||||
* `buildTool`
|
||||
* `commitVersion`
|
||||
* `dockerVersionSource`
|
||||
* `filePath`
|
||||
* `gitCredentialsId`
|
||||
* `gitUserEMail`
|
||||
* `gitUserName`
|
||||
* `gitSshUrl`
|
||||
* `tagPrefix`
|
||||
* `timestamp`
|
||||
* `timestampTemplate`
|
||||
* `versioningTemplate`
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Example
|
||||
|
||||
|
@ -1,38 +1,21 @@
|
||||
# batsExecuteTests
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
This step executes tests using the [Bash Automated Testing System - bats-core](https://github.com/bats-core/bats-core)
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequsites
|
||||
|
||||
You need to have a Bats test file. By default you would put this into directory `src/test` within your source code repository.
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
|-----------|-----------|---------|-----------------|
|
||||
| script | yes | | |
|
||||
| dockerImage | no | `node:8-stretch` | |
|
||||
| dockerWorkspace | no |`/home/node`| |
|
||||
| envVars | no | `[:]` | |
|
||||
| failOnError | no | `false` | |
|
||||
| gitBranch | no | | |
|
||||
| gitSshKeyCredentialsId | no | | |
|
||||
| outputFormat | no | `junit` | `tap` |
|
||||
| repository | no | `https://github.com/bats-core/bats-core.git` | |
|
||||
| stashContent | no | `['tests']` | |
|
||||
| testPackage | no | `piper-bats` | |
|
||||
| testPath | no | `src/test`| |
|
||||
| testRepository | no | | |
|
||||
## ${docGenConfiguration}
|
||||
|
||||
Details:
|
||||
## Example
|
||||
|
||||
* `outputFormat` defines the format of the test result output. `junit` would be the standard for automated build environments but you could use also the option `tap`.
|
||||
* For the transformation of the test result to xUnit format the node module **tap-xunit** is used. `dockerImage` and `dockerWorkspace` define the Docker image used for the transformation and `testPackage` defines the name of the test package used in the xUnit result file.
|
||||
* `testPath` defines either the directory which contains the test files (`*.bats`) or a single file. You can find further details in the [Bats-core documentation](https://github.com/bats-core/bats-core#usage)
|
||||
* With `failOnError` you can define the behavior, in case tests fail. For example, in case of `outputFormat: 'junit'` you should set it to `false`. Otherwise test results cannot be recorded using the `testsPublishhResults` step afterwards.
|
||||
* You can pass environment variables to the test execution by defining parameter `envVars`.
|
||||
```groovy
|
||||
batsExecuteTests script:this
|
||||
testsPublishResults junit: [pattern: '**/Test-*.xml', archive: true]
|
||||
```
|
||||
|
||||
With `envVars` it is possible to pass either fixed values but also templates using [`commonPipelineEnvironment`](commonPipelineEnvironment.md).
|
||||
|
||||
@ -41,37 +24,10 @@ Details:
|
||||
```yaml
|
||||
batsExecuteTests script: this, envVars = [
|
||||
FIX_VALUE: 'my fixed value',
|
||||
CONTAINER_NAME: '${commonPipelineEnvironment.configuration.steps.executeBatsTests.dockerContainerName}',
|
||||
IMAGE_NAME: '${return commonPipelineEnvironment.getDockerImageNameAndTag()}'
|
||||
CONTAINER_NAME: '\${commonPipelineEnvironment.configuration.steps.executeBatsTests.dockerContainerName}',
|
||||
IMAGE_NAME: '\${return commonPipelineEnvironment.getDockerImageNameAndTag()}'
|
||||
]
|
||||
```
|
||||
|
||||
This means within the test one could refer to environment variables by calling e.g.
|
||||
`run docker run --rm -i --name $CONTAINER_NAME --entrypoint /bin/bash $IMAGE_NAME echo "Test"`
|
||||
|
||||
* Using parameters `testRepository` the tests can be loaded from another reposirory. In case the tests are not located in the master branch the branch can be specified with `gitBranch`. For protected repositories you can also define the access credentials via `gitSshKeyCredentialsId`. **Note: In case of using a protected repository, `testRepository` should include the ssh link to the repository.**
|
||||
* The parameter `repository` defines the version of **bats-core** to be used. By default we use the version from the master branch.
|
||||
|
||||
## Step configuration
|
||||
|
||||
The following parameters can also be specified as step/stage/general parameters using the [global configuration](../configuration.md):
|
||||
|
||||
* dockerImage
|
||||
* dockerWorkspace
|
||||
* envVars
|
||||
* failOnError
|
||||
* gitBranch
|
||||
* gitSshKeyCredentialsId
|
||||
* outputFormat
|
||||
* repository
|
||||
* stashContent
|
||||
* testPackage
|
||||
* testPath
|
||||
* testRepository
|
||||
|
||||
## Example
|
||||
|
||||
```groovy
|
||||
batsExecuteTests script:this
|
||||
testsPublishResults junit: [pattern: '**/Test-*.xml', archive: true]
|
||||
```
|
||||
`run docker run --rm -i --name \$CONTAINER_NAME --entrypoint /bin/bash \$IMAGE_NAME echo "Test"`
|
||||
|
@ -1,20 +1,14 @@
|
||||
# checkChangeInDevelopment
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* **[Change Management Client 2.0.0 or compatible version](http://central.maven.org/maven2/com/sap/devops/cmclient/dist.cli/)** - available for download on Maven Central.
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
|
||||
## Step configuration
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Exceptions
|
||||
|
||||
|
@ -1,8 +1,6 @@
|
||||
# checksPublishResults
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
This step can publish static check results from various sources.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
@ -15,31 +13,7 @@ This step can publish static check results from various sources.
|
||||
* [warnings](https://plugins.jenkins.io/warnings)
|
||||
* [core](https://plugins.jenkins.io/core)
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ---------------|-----------|-----------------------------------|--------------------|
|
||||
| script | yes | | |
|
||||
| aggregation | no | `true` | see below |
|
||||
| tasks | no | `false` | see below |
|
||||
| pmd | no | `false` | see below |
|
||||
| cpd | no | `false` | see below |
|
||||
| findbugs | no | `false` | see below |
|
||||
| checkstyle | no | `false` | see below |
|
||||
| eslint | no | `false` | see below |
|
||||
| pylint | no | `false` | see below |
|
||||
| archive | no | `false` | `true`, `false` |
|
||||
|
||||
* `aggregation` - Publishes .
|
||||
* `tasks` - Searches and publishes TODOs in files with the [Task Scanner Plugin](https://wiki.jenkins-ci.org/display/JENKINS/Task+Scanner+Plugin).
|
||||
* `pmd` - Publishes PMD findings with the [PMD plugin](https://plugins.jenkins.io/pmd) .
|
||||
* `cpd` - Publishes CPD findings with the [DRY plugin](https://plugins.jenkins.io/dry).
|
||||
* `findbugs` - Publishes Findbugs findings with the [Findbugs plugin](https://plugins.jenkins.io/findbugs).
|
||||
* `checkstyle` - Publishes Checkstyle findings with the [Checkstyle plugin](https://plugins.jenkins.io/checkstyle).
|
||||
* `eslint` - Publishes ESLint findings (in [JSLint format](https://eslint.org/docs/user-guide/formatters/)) with the [Warnings plugin](https://plugins.jenkins.io/warnings).
|
||||
* `pylint` - Publishes PyLint findings with the [Warnings plugin](https://plugins.jenkins.io/warnings), pylint needs to run with `--output-format=parseable` option.
|
||||
|
||||
Each of the parameters `aggregation`, `tasks`, `pmd`, `cpd`, `findbugs`, `checkstyle`, `eslint` and `pylint` can be set to `true` or `false` but also to a map of parameters to hand in different settings for the tools.
|
||||
## ${docGenParameters}
|
||||
|
||||
### aggregation
|
||||
|
||||
@ -106,19 +80,7 @@ Each of the parameters `aggregation`, `tasks`, `pmd`, `cpd`, `findbugs`, `checks
|
||||
| archive | no | `true` | `true`, `false` |
|
||||
| thresholds | no | none | see [thresholds](#thresholds) |
|
||||
|
||||
## Step configuration
|
||||
|
||||
Following parameters can also be specified as step parameters using the global configuration file:
|
||||
|
||||
* `aggregation`
|
||||
* `tasks`
|
||||
* `pmd`
|
||||
* `cpd`
|
||||
* `findbugs`
|
||||
* `checkstyle`
|
||||
* `eslint`
|
||||
* `pylint`
|
||||
* `archive`
|
||||
## ${docGenConfiguration}
|
||||
|
||||
### Thresholds
|
||||
|
||||
|
@ -1,18 +1,6 @@
|
||||
# cloudFoundryDeploy
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
The application will be deployed to a test or production space within Cloud Foundry.
|
||||
Deployment can be done
|
||||
|
||||
* in a standard way
|
||||
* in a zero downtime manner (using a [blue-green deployment approach](https://martinfowler.com/bliki/BlueGreenDeployment.html))
|
||||
|
||||
!!! note "Deployment supports multiple deployment tools"
|
||||
Currently the following are supported:
|
||||
|
||||
* Standard `cf push` and [Bluemix blue-green plugin](https://github.com/bluemixgaragelondon/cf-blue-green-deploy#how-to-use)
|
||||
* [MTA CF CLI Plugin](https://github.com/cloudfoundry-incubator/multiapps-cli-plugin)
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
@ -21,90 +9,9 @@ Deployment can be done
|
||||
|
||||

|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
| script | yes | | |
|
||||
| cloudFoundry | yes | | |
|
||||
| deployTool | no | cf_native | cf_native, mtaDeployPlugin |
|
||||
| deployType | no | standard | standard, blue-green |
|
||||
| keepOldInstance | no | false | true, false |
|
||||
| dockerImage | no | s4sdk/docker-cf-cli | |
|
||||
| dockerWorkspace | no | /home/piper | |
|
||||
| mtaDeployParameters | | for _deployType:standard_ `-f`<br />for _deployType:blue-green_ `-f --no-confirm` | |
|
||||
| mtaExtensionDescriptor | no | '' | |
|
||||
| mtaPath | no | '' | |
|
||||
| smokeTestScript | no | blueGreenCheckScript.sh (provided by library). <br />Can be overwritten using config property 'smokeTestScript' | |
|
||||
| smokeTestStatusCode | no | 200 | |
|
||||
| stashContent | no | [] | |
|
||||
|
||||
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving e.g. configuration parameters.
|
||||
* `cloudFoundry` defines a map containing following properties:
|
||||
* `apiEndpoint`: Cloud Foundry API endpoint (default: `https://api.cf.eu10.hana.ondemand.com`)
|
||||
* `appName`: App name of application to be deployed (optional)
|
||||
* `credentialsId`: Credentials to be used for deployment (mandatory)
|
||||
* `manifest`: Manifest to be used for deployment
|
||||
* `org`: Cloud Foundry target organization (mandatory)
|
||||
* `space`: Cloud Foundry target space (mandatory)
|
||||
|
||||
Example: `cloudFoundry: [apiEndpoint: 'https://test.server.com', appName:'cfAppName', credentialsId: 'cfCredentialsId', manifest: 'cfManifest', org: 'cfOrg', space: 'cfSpace']`
|
||||
|
||||
!!! note
|
||||
It is also possible to use following configuration parameters instead of `cloudFoundry` map:
|
||||
|
||||
- cfApiEndpoint
|
||||
- cfAppName
|
||||
- cfCredentialsId
|
||||
- cfManifest
|
||||
- cfOrg
|
||||
- cfSpace
|
||||
|
||||
!!! note
|
||||
Due to [an incompatible change](https://github.com/cloudfoundry/cli/issues/1445) in the Cloud Foundry CLI, multiple buildpacks are not supported by this step.
|
||||
If your `application` contains a list of `buildpacks` instead a single `buildpack`, this will be automatically re-written by the step when blue-green deployment is used.
|
||||
|
||||
* `deployTool` defines the tool which should be used for deployment.
|
||||
* `deployType` defines the type of deployment, either `standard` deployment which results in a system downtime or a zero-downtime `blue-green` deployment.
|
||||
* `keepOldInstance` in case of a `blue-green` deployment the old instance will be deleted by default. If this option is set to true the old instance will remain stopped in the Cloud Foundry space.
|
||||
* `dockerImage` defines the Docker image containing the deployment tools (like cf cli, ...) and `dockerWorkspace` defines the home directory of the default user of the `dockerImage`
|
||||
* `smokeTestScript` allows to specify a script which performs a check during blue-green deployment. The script gets the FQDN as parameter and returns `exit code 0` in case check returned `smokeTestStatusCode`. More details can be found [here](https://github.com/bluemixgaragelondon/cf-blue-green-deploy#how-to-use) <br /> Currently this option is only considered for deployTool `cf_native`.
|
||||
* `stashContent` defines the stash names which should be unstashed at the beginning of the step. This makes the files available in case the step is started on an empty node.
|
||||
|
||||
### Deployment with cf_native
|
||||
|
||||
* `appName` in `cloudFoundry` map (or `cfAppName`) defines the name of the application which will be deployed to the Cloud Foundry space.
|
||||
* `manifest` in `cloudFoundry` maps (or `cfManifest`) defines the manifest to be used for Cloud Foundry deployment.
|
||||
|
||||
!!! note
|
||||
Cloud Foundry supports the deployment of multiple applications using a single manifest file.
|
||||
This option is supported with Piper.
|
||||
|
||||
In this case define `appName: ''` since the app name for the individual applications have to be defined via the manifest.
|
||||
You can find details in the [Cloud Foundry Documentation](https://docs.cloudfoundry.org/devguide/deploy-apps/manifest.html#multi-apps)
|
||||
|
||||
### Deployment with mtaDeployPlugin
|
||||
|
||||
* `mtaPath` define path to *.mtar for deployment.
|
||||
* `mtaExtensionDescriptor` defines additional extension descriptor file for deployment.
|
||||
* `mtaDeployParameters` defines additional parameters passed to mta deployment.
|
||||
|
||||
## Step configuration
|
||||
|
||||
The following parameters can also be specified as step/stage/general parameters using the [global configuration](../configuration.md):
|
||||
|
||||
* cloudFoundry
|
||||
* deployUser
|
||||
* deployTool
|
||||
* deployType
|
||||
* dockerImage
|
||||
* dockerWorkspace
|
||||
* mtaDeployParameters
|
||||
* mtaExtensionDescriptor
|
||||
* mtaPath
|
||||
* smokeTestScript
|
||||
* smokeTestStatusCode
|
||||
* stashContent
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Example
|
||||
|
||||
|
@ -10,176 +10,6 @@ none
|
||||
|
||||
## Method details
|
||||
|
||||
### getArtifactVersion()
|
||||
|
||||
#### Description
|
||||
|
||||
Returns the version of the artifact which is build in the pipeline.
|
||||
|
||||
#### Parameters
|
||||
|
||||
none
|
||||
|
||||
#### Return value
|
||||
|
||||
A `String` containing the version.
|
||||
|
||||
#### Side effects
|
||||
|
||||
none
|
||||
|
||||
#### Exceptions
|
||||
|
||||
none
|
||||
|
||||
#### Example
|
||||
|
||||
```groovy
|
||||
def myVersion = commonPipelineEnvironment.getArtifactVersion()
|
||||
```
|
||||
|
||||
### setArtifactVersion(version)
|
||||
|
||||
#### Description
|
||||
|
||||
Sets the version of the artifact which is build in the pipeline.
|
||||
|
||||
#### Parameters
|
||||
|
||||
none
|
||||
|
||||
#### Return value
|
||||
|
||||
none
|
||||
|
||||
#### Side effects
|
||||
|
||||
none
|
||||
|
||||
#### Exceptions
|
||||
|
||||
none
|
||||
|
||||
#### Example
|
||||
|
||||
```groovy
|
||||
commonPipelineEnvironment.setArtifactVersion('1.2.3')
|
||||
```
|
||||
|
||||
### getConfigProperties()
|
||||
|
||||
#### Description
|
||||
|
||||
Returns the map of project specific configuration properties. No defensive copy is created.
|
||||
Write operations to the map are visible further down in the pipeline.
|
||||
|
||||
#### Parameters
|
||||
|
||||
none
|
||||
|
||||
#### Return value
|
||||
|
||||
A map containing project specific configuration properties.
|
||||
|
||||
#### Side effects
|
||||
|
||||
none
|
||||
|
||||
#### Exceptions
|
||||
|
||||
none
|
||||
|
||||
#### Example
|
||||
|
||||
```groovy
|
||||
commonPipelineEnvironment.getConfigProperties()
|
||||
```
|
||||
|
||||
### setConfigProperties(configuration)
|
||||
|
||||
#### Description
|
||||
|
||||
Sets the map of configuration properties. Any existing map is overwritten.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* `configuration` - A map containing the new configuration
|
||||
|
||||
#### Return value
|
||||
|
||||
none
|
||||
|
||||
#### Side effects
|
||||
|
||||
none
|
||||
|
||||
#### Exceptions
|
||||
|
||||
none
|
||||
|
||||
#### Example
|
||||
|
||||
```groovy
|
||||
commonPipelineEnvironment.setConfigProperties([DEPLOY_HOST: 'deploy-host.com', DEPLOY_ACCOUNT: 'deploy-account'])
|
||||
```
|
||||
|
||||
### getConfigProperty(property)
|
||||
|
||||
#### Description
|
||||
|
||||
Gets a specific value from the configuration property.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* `property` - The key of the property.
|
||||
|
||||
#### Return value
|
||||
|
||||
* The value associated with key `property`. `null` is returned in case the property does not exist.
|
||||
|
||||
#### Side effects
|
||||
|
||||
none
|
||||
|
||||
#### Exceptions
|
||||
|
||||
none
|
||||
|
||||
#### Example
|
||||
|
||||
```groovy
|
||||
commonPipelineEnvironment.getConfigProperty('DEPLOY_HOST')
|
||||
```
|
||||
|
||||
### setConfigProperty(property, value)
|
||||
|
||||
#### Description
|
||||
|
||||
Sets property `property` with value `value`. Any existing property with key `property` is overwritten.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* `property` - The key of the property.
|
||||
* `value` - The value of the property.
|
||||
|
||||
#### Return value
|
||||
|
||||
none
|
||||
|
||||
#### Side effects
|
||||
|
||||
none
|
||||
|
||||
#### Exceptions
|
||||
|
||||
none
|
||||
|
||||
#### Example
|
||||
|
||||
```groovy
|
||||
commonPipelineEnvironment.setConfigProperty('DEPLOY_HOST', 'my-deploy-host.com')
|
||||
```
|
||||
|
||||
### getInfluxCustomData()
|
||||
|
||||
#### Description
|
||||
@ -239,58 +69,6 @@ none
|
||||
def myInfluxDataMap = commonPipelineEnvironment.getInfluxCustomDataMap()
|
||||
```
|
||||
|
||||
### getMtarFileName()
|
||||
|
||||
#### Description
|
||||
|
||||
Returns the path of the mtar archive file.
|
||||
|
||||
#### Parameters
|
||||
|
||||
none
|
||||
|
||||
#### Return value
|
||||
|
||||
The path of the mtar archive file.
|
||||
|
||||
#### Side effects
|
||||
|
||||
none
|
||||
|
||||
#### Exceptions
|
||||
|
||||
none
|
||||
|
||||
#### Example
|
||||
|
||||
```groovy
|
||||
commonPipelineEnvironment.getMtarFileName()
|
||||
```
|
||||
|
||||
### setMtarFileName(name)
|
||||
|
||||
#### Description
|
||||
|
||||
Sets the path of the mtar archive file. Any old value is discarded.
|
||||
|
||||
#### Parameters
|
||||
|
||||
* `mtarFilePath` - The path of the mtar archive file name.
|
||||
|
||||
#### Side effects
|
||||
|
||||
none
|
||||
|
||||
#### Exceptions
|
||||
|
||||
none
|
||||
|
||||
#### Example
|
||||
|
||||
```groovy
|
||||
commonPipelineEnvironment.setMtarFileName('path/to/foo.mtar')
|
||||
```
|
||||
|
||||
### getPipelineMeasurement(measurementName)
|
||||
|
||||
#### Description
|
||||
|
@ -1,20 +1,15 @@
|
||||
# containerExecuteStructureTests
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
In this step [Container Structure Tests](https://github.com/GoogleContainerTools/container-structure-test) are executed.
|
||||
|
||||
This testing framework allows you to execute different test types against a Docker container, for example:
|
||||
|
||||
* Command tests (only if a Docker Deamon is available)
|
||||
* File existence tests
|
||||
* File content tests
|
||||
* Metadata test
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Test configuration is available.
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Example
|
||||
|
||||
```
|
||||
@ -24,59 +19,3 @@ containerExecuteStructureTests(
|
||||
testImage: 'node:latest'
|
||||
)
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script|yes|||
|
||||
|containerCommand|no|``||
|
||||
|containerShell|no|``||
|
||||
|dockerImage|yes|`ppiper/container-structure-test`||
|
||||
|dockerOptions|no|`-u 0 --entrypoint=''`||
|
||||
|failOnError|no|`true`|`true`, `false`|
|
||||
|pullImage|no||`true`, `false`|
|
||||
|stashContent|no|<ul><li>`tests`</li></ul>||
|
||||
|testConfiguration|no|||
|
||||
|testDriver|no|||
|
||||
|testImage|no|||
|
||||
|testReportFilePath|no|`cst-report.json`||
|
||||
|verbose|no||`true`, `false`|
|
||||
|
||||
Details:
|
||||
|
||||
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
|
||||
* `containerCommand`: Only for Kubernetes environments: Command which is executed to keep container alive, defaults to '/usr/bin/tail -f /dev/null'
|
||||
* containerShell: Only for Kubernetes environments: Shell to be used inside container, defaults to '/bin/sh'
|
||||
* dockerImage: Docker image for code execution.
|
||||
* dockerOptions: Options to be passed to Docker image when starting it (only relevant for non-Kubernetes case).
|
||||
* failOnError: Defines the behavior, in case tests fail.
|
||||
* pullImage: Only relevant for testDriver 'docker'.
|
||||
* stashContent: If specific stashes should be considered for the tests, you can pass this via this parameter.
|
||||
* testConfiguration: Container structure test configuration in yml or json format. You can pass a pattern in order to execute multiple tests.
|
||||
* testDriver: Container structure test driver to be used for testing, please see [https://github.com/GoogleContainerTools/container-structure-test](https://github.com/GoogleContainerTools/container-structure-test) for details.
|
||||
* testImage: Image to be tested
|
||||
* testReportFilePath: Path and name of the test report which will be generated
|
||||
* verbose: Print more detailed information into the log.
|
||||
|
||||
## Step configuration
|
||||
|
||||
We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
||||
In following sections the configuration is possible:
|
||||
|
||||
| parameter | general | step | stage |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script||||
|
||||
|containerCommand||X|X|
|
||||
|containerShell||X|X|
|
||||
|dockerImage||X|X|
|
||||
|dockerOptions||X|X|
|
||||
|failOnError||X|X|
|
||||
|pullImage||X|X|
|
||||
|stashContent||X|X|
|
||||
|testConfiguration||X|X|
|
||||
|testDriver||X|X|
|
||||
|testImage||X|X|
|
||||
|testReportFilePath||X|X|
|
||||
|verbose|X|X|X|
|
||||
|
@ -1,20 +1,14 @@
|
||||
# dockerExecute
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
## ${docGenDescription}
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
|
||||
## Parameters
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
## ${docGenParameters}
|
||||
|
||||
## Kubernetes support
|
||||
|
||||
If the Jenkins is setup on a Kubernetes cluster, then you can execute the closure inside a container of a pod by setting an environment variable `ON_K8S` to `true`. However, it will ignore `containerPortMappings`, `dockerOptions` and `dockerVolumeBind` values.
|
||||
|
||||
## Step configuration
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Side effects
|
||||
|
||||
|
@ -1,8 +1,6 @@
|
||||
# dockerExecuteOnKubernetes
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
@ -11,13 +9,9 @@ Content here is generated from corresponding step, see `vars`.
|
||||
|
||||

|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
|
||||
## Step configuration
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Side effects
|
||||
|
||||
|
@ -1,37 +1,10 @@
|
||||
# durationMeasure
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
## ${docGenDescription}
|
||||
|
||||
This step is used to measure the duration of a set of steps, e.g. a certain stage.
|
||||
The duration is stored in a Map. The measurement data can then be written to an Influx database using step [influxWriteData](influxWriteData.md).
|
||||
## ${docGenParameters}
|
||||
|
||||
!!! tip
|
||||
Measuring for example the duration of pipeline stages helps to identify potential bottlenecks within the deployment pipeline.
|
||||
This then helps to counter identified issues with respective optimization measures, e.g parallelization of tests.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
none
|
||||
|
||||
## Pipeline configuration
|
||||
|
||||
none
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
| script | yes | | |
|
||||
| measurementName | no | test_duration | |
|
||||
|
||||
Details:
|
||||
|
||||
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
|
||||
* `measurementName` defines the name of the measurement which is written to the Influx database.
|
||||
|
||||
## Step configuration
|
||||
|
||||
none
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Example
|
||||
|
||||
|
@ -1,39 +1,17 @@
|
||||
# gaugeExecuteTests
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
In this step Gauge ([getgauge.io](http:getgauge.io)) acceptance tests are executed.
|
||||
Using Gauge it will be possible to have a three-tier test layout:
|
||||
|
||||
* Acceptance Criteria
|
||||
* Test implemenation layer
|
||||
* Application driver layer
|
||||
|
||||
This layout is propagated by Jez Humble and Dave Farley in their book "Continuous Delivery" as a way to create maintainable acceptance test suites (see "Continuous Delivery", p. 190ff).
|
||||
|
||||
Using Gauge it is possible to write test specifications in [Markdown syntax](http://daringfireball.net/projects/markdown/syntax) and therefore allow e.g. product owners to write the relevant acceptance test specifications. At the same time it allows the developer to implement the steps described in the specification in her development environment.
|
||||
|
||||
You can use the [sample projects](https://github.com/getgauge/gauge-mvn-archetypes) of Gauge.
|
||||
|
||||
!!! note "Make sure to run against a Selenium Hub configuration"
|
||||
In the test example of _gauge-archetype-selenium_ please make sure to allow it to run against a Selenium hub:
|
||||
|
||||
Please extend DriverFactory.java for example in following way:
|
||||
|
||||
``` java
|
||||
String hubUrl = System.getenv("HUB_URL");
|
||||
//when running on a Docker deamon (and not using Kubernetes plugin), Docker images will be linked
|
||||
//in this case hubUrl will be http://selenium:4444/wd/hub due to the linking of the containers
|
||||
hubUrl = (hubUrl == null) ? "http://localhost:4444/wd/hub" : hubUrl;
|
||||
Capabilities chromeCapabilities = DesiredCapabilities.chrome();
|
||||
System.out.println("Running on Selenium Hub: " + hubUrl);
|
||||
return new RemoteWebDriver(new URL(hubUrl), chromeCapabilities);
|
||||
```
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequsites
|
||||
|
||||
none
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
||||
|
||||
We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
||||
## Example
|
||||
|
||||
Pipeline step:
|
||||
@ -41,63 +19,3 @@ Pipeline step:
|
||||
```groovy
|
||||
gaugeExecuteTests script: this, testServerUrl: 'http://test.url'
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script|yes|||
|
||||
|buildTool|no|`maven`||
|
||||
|dockerEnvVars|no|`[HUB:TRUE, HUB_URL:http://localhost:4444/wd/hub]`||
|
||||
|dockerImage|no|buildTool=`maven`: `maven:3.5-jdk-8`<br />buildTool=`npm`: `node:8-stretch`<br />||
|
||||
|dockerName|no|buildTool=`maven`: `maven`<br />buildTool=`npm`: `npm`<br />||
|
||||
|dockerWorkspace|no|buildTool=`maven`: <br />buildTool=`npm`: `/home/node`<br />||
|
||||
|failOnError|no|`false`||
|
||||
|gitBranch|no|||
|
||||
|gitSshKeyCredentialsId|no|``||
|
||||
|installCommand|no|`curl -SsL https://downloads.gauge.org/stable | sh -s -- --location=$HOME/bin/gauge`||
|
||||
|languageRunner|no|buildTool=`maven`: `java`<br />buildTool=`npm`: `js`<br />||
|
||||
|runCommand|no|buildTool=`maven`: `mvn test-compile gauge:execute`<br />buildTool=`npm`: `gauge run`<br />||
|
||||
|stashContent|no|<ul><li>`buildDescriptor`</li><li>`tests`</li></ul>||
|
||||
|testOptions|no|buildTool=`maven`: `-DspecsDir=specs`<br />buildTool=`npm`: `specs`<br />||
|
||||
|testRepository|no|||
|
||||
|testServerUrl|no|||
|
||||
|
||||
Details:
|
||||
|
||||
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
|
||||
* `buildTool` defines the build tool to be used for the test execution.
|
||||
* `dockerEnvVars`, see step [dockerExecute](dockerExecute.md)
|
||||
* `dockerImage`, see step [dockerExecute](dockerExecute.md)
|
||||
* `dockerName`, see step [dockerExecute](dockerExecute.md)
|
||||
* `dockerWorkspace`, see step [dockerExecute](dockerExecute.md)
|
||||
* With `failOnError` you can define the behavior, in case tests fail. When this is set to `true` test results cannot be recorded using the `publishTestResults` step afterwards.
|
||||
* `installCommand` defines the command for installing Gauge. In case the `dockerImage` already contains Gauge it can be set to empty: ``.
|
||||
* `languageRunner` defines the Gauge language runner to be used.
|
||||
* `runCommand` defines the command which is used for executing Gauge.
|
||||
* If specific stashes should be considered for the tests, you can pass this via parameter `stashContent`
|
||||
* `testOptions` allows to set specific options for the Gauge execution. Details can be found for example [in the Gauge Maven plugin documentation](https://github.com/getgauge/gauge-maven-plugin#executing-specs)
|
||||
* In case the test implementation is stored in a different repository than the code itself, you can define the repository containing the tests using parameter `testRepository` and if required `gitBranch` (for a different branch than master) and `gitSshKeyCredentialsId` (for protected repositories). For protected repositories the `testRepository` needs to contain the ssh git url.
|
||||
* `testServerUrl` is passed as environment variable `TARGET_SERVER_URL` to the test execution. Tests running against the system should read the host information from this environment variable in order to be infrastructure agnostic.
|
||||
|
||||
## Step configuration
|
||||
|
||||
We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
||||
In following sections the configuration is possible:
|
||||
|
||||
| parameter | general | step | stage |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script||||
|
||||
|buildTool||X|X|
|
||||
|dockerEnvVars||X|X|
|
||||
|dockerImage||X|X|
|
||||
|dockerName||X|X|
|
||||
|dockerWorkspace||X|X|
|
||||
|failOnError||X|X|
|
||||
|gitBranch||X|X|
|
||||
|gitSshKeyCredentialsId||X|X|
|
||||
|stashContent||X|X|
|
||||
|testOptions||X|X|
|
||||
|testRepository||X|X|
|
||||
|testServerUrl||X|X|
|
||||
|
@ -1,18 +1,6 @@
|
||||
# githubPublishRelease
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
This step creates a tag in your GitHub repository together with a release.
|
||||
|
||||
The release can be filled with text plus additional information like:
|
||||
|
||||
* Closed pull request since last release
|
||||
* Closed issues since last release
|
||||
* link to delta information showing all commits since last release
|
||||
|
||||
The result looks like
|
||||
|
||||

|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
@ -20,6 +8,10 @@ You need to create a personal access token within GitHub and add this to the Jen
|
||||
|
||||
Please see [GitHub documentation for details about creating the personal access token](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/).
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Example
|
||||
|
||||
Usage of pipeline step:
|
||||
@ -27,52 +19,3 @@ Usage of pipeline step:
|
||||
```groovy
|
||||
githubPublishRelease script: this, releaseBodyHeader: "**This is the latest success!**<br />"
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script|yes|||
|
||||
|addClosedIssues|no|`false`||
|
||||
|addDeltaToLastRelease|no|`false`||
|
||||
|customFilterExtension|no|``||
|
||||
|excludeLabels|no|<ul><li>`duplicate`</li><li>`invalid`</li><li>`question`</li><li>`wontfix`</li></ul>||
|
||||
|githubApiUrl|no|`//https://api.github.com`||
|
||||
|githubOrg|yes|`script.commonPipelineEnvironment.getGitFolder()`||
|
||||
|githubRepo|yes|`script.commonPipelineEnvironment.getGitRepo()`||
|
||||
|githubServerUrl|no|`https://github.com`||
|
||||
|githubTokenCredentialsId|yes|||
|
||||
|releaseBodyHeader|no|||
|
||||
|version|yes|`script.commonPipelineEnvironment.getArtifactVersion()`||
|
||||
|
||||
### Details
|
||||
|
||||
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
|
||||
* All GitHub related properties allow you to overwrite the default behavior of identifying e.g. GitHub organization, GitHub repository.
|
||||
* `version` defines the version number which will be written as tag as well as release name
|
||||
* By defining the `releaseBodyHeader` you can specify the content which will appear for the release
|
||||
* If you set `addClosedIssues` to `true`, a list of all closed issues and merged pull-requests since the last release will added below the `releaseBodyHeader`
|
||||
* If you set `addDeltaToLastRelease` to `true`, a link will be added to the relese information that brings up all commits since the last release.
|
||||
* By passing the parameter `customFilterExtension` it is possible to pass additional filter criteria for retrieving closed issues since the last release. Additional criteria could be for example specific `label`, or `filter` according to [GitHub API documentation](https://developer.github.com/v3/issues/).
|
||||
* It is possible to exclude issues with dedicated labels using parameter `excludeLabels`. Usage is like `excludeLabels: ['label1', 'label2']`
|
||||
|
||||
## Step configuration
|
||||
|
||||
We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
||||
In following sections the configuration is possible:
|
||||
|
||||
| parameter | general | step | stage |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script||||
|
||||
|addClosedIssues||X|X|
|
||||
|addDeltaToLastRelease||X|X|
|
||||
|customFilterExtension||X|X|
|
||||
|excludeLabels||X|X|
|
||||
|githubApiUrl|X|X|X|
|
||||
|githubOrg||X|X|
|
||||
|githubRepo||X|X|
|
||||
|githubServerUrl|X|X|X|
|
||||
|githubTokenCredentialsId|X|X|X|
|
||||
|releaseBodyHeader||X|X|
|
||||
|version||X|X|
|
||||
|
@ -1,58 +1,14 @@
|
||||
# handlePipelineStepErrors
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Used by other steps to make error analysis easier. Lists parameters and other data available to the step in which the error occurs.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
none
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -----------------|-----------|---------|-----------------|
|
||||
| `stepParameters` | yes | | |
|
||||
| `stepName` | yes | | |
|
||||
| `echoDetails` | yes | true | true, false |
|
||||
|
||||
* `stepParameters` - The parameters from the step to be executed. The list of parameters is then shown in the console output.
|
||||
* `stepName` - The name of the step executed to be shown in the console output.
|
||||
* `echoDetails` - If set to true the following will be output to the console:
|
||||
1. Step beginning: `--- Begin library step: ${stepName}.groovy ---`
|
||||
2. Step end: `--- End library step: ${stepName}.groovy ---`
|
||||
3. Step errors:
|
||||
|
||||
```log
|
||||
----------------------------------------------------------
|
||||
--- An error occurred in the library step: ${stepName}
|
||||
----------------------------------------------------------
|
||||
The following parameters were available to the step:
|
||||
***
|
||||
${stepParameters}
|
||||
***
|
||||
The error was:
|
||||
***
|
||||
${err}
|
||||
***
|
||||
Further information:
|
||||
* Documentation of step ${stepName}: .../${stepName}/
|
||||
* Pipeline documentation: https://...
|
||||
* GitHub repository for pipeline steps: https://...
|
||||
----------------------------------------------------------
|
||||
```
|
||||
|
||||
## Step configuration
|
||||
|
||||
none
|
||||
|
||||
## Side effects
|
||||
|
||||
none
|
||||
|
||||
## Exceptions
|
||||
|
||||
none
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Example
|
||||
|
||||
@ -61,6 +17,33 @@ handlePipelineStepErrors (stepName: 'executeHealthCheck', stepParameters: parame
|
||||
def url = new Utils().getMandatoryParameter(parameters, 'url', null)
|
||||
def statusCode = curl(url)
|
||||
if (statusCode != '200')
|
||||
error "Health Check failed: ${statusCode}"
|
||||
error "Health Check failed: \${statusCode}"
|
||||
}
|
||||
```
|
||||
|
||||
## Example console output
|
||||
|
||||
If `echoDetails` is set to true the following information will be output to the console:
|
||||
|
||||
1. Step beginning: `--- Begin library step: \${stepName}.groovy ---`
|
||||
1. Step end: `--- End library step: \${stepName}.groovy ---`
|
||||
1. Step errors:
|
||||
|
||||
```log
|
||||
----------------------------------------------------------
|
||||
--- An error occurred in the library step: \${stepName}
|
||||
----------------------------------------------------------
|
||||
The following parameters were available to the step:
|
||||
***
|
||||
\${stepParameters}
|
||||
***
|
||||
The error was:
|
||||
***
|
||||
\${err}
|
||||
***
|
||||
Further information:
|
||||
* Documentation of step \${stepName}: .../\${stepName}/
|
||||
* Pipeline documentation: https://...
|
||||
* GitHub repository for pipeline steps: https://...
|
||||
----------------------------------------------------------
|
||||
```
|
||||
|
@ -1,19 +1,6 @@
|
||||
# healthExecuteCheck
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Calls the health endpoint url of the application.
|
||||
|
||||
The intention of the check is to verify that a suitable health endpoint is available. Such a health endpoint is required for operation purposes.
|
||||
|
||||
This check is used as a real-life test for your productive health endpoints.
|
||||
|
||||
!!! note "Check Depth"
|
||||
Typically, tools performing simple health checks are not too smart. Therefore it is important to choose an endpoint for checking wisely.
|
||||
|
||||
This check therefore only checks if the application/service url returns `HTTP 200`.
|
||||
|
||||
This is in line with health check capabilities of platforms which are used for example in load balancing scenarios. Here you can find an [example for Amazon AWS](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-healthchecks.html).
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
@ -25,6 +12,10 @@ Endpoint for health check is configured.
|
||||
!!! tip
|
||||
If using Spring Boot framework, ideally the provided `/health` endpoint is used and extended by development. Further information can be found in the [Spring Boot documenation for Endpoints](http://docs.spring.io/spring-boot/docs/current/reference/html/production-ready-endpoints.html)
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Example
|
||||
|
||||
Pipeline step:
|
||||
@ -32,29 +23,3 @@ Pipeline step:
|
||||
```groovy
|
||||
healthExecuteCheck testServerUrl: 'https://testserver.com'
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script|yes|||
|
||||
|healthEndpoint|no|``||
|
||||
|testServerUrl|no|||
|
||||
|
||||
Details:
|
||||
|
||||
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
|
||||
* Health check function is called providing full qualified `testServerUrl` (and optionally with `healthEndpoint` if endpoint is not the standard url) to the health check.
|
||||
* In case response of the call is different than `HTTP 200 OK` the **health check fails and the pipeline stops**.
|
||||
|
||||
## Step configuration
|
||||
|
||||
We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
||||
In following sections the configuration is possible:
|
||||
|
||||
| parameter | general | step | stage |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script||||
|
||||
|healthEndpoint|X|X|X|
|
||||
|testServerUrl|X|X|X|
|
||||
|
@ -1,22 +1,6 @@
|
||||
# influxWriteData
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Since your Continuous Delivery Pipeline in Jenkins provides your productive development and delivery infrastructure you should monitor the pipeline to ensure it runs as expected. How to setup this monitoring is described in the following.
|
||||
|
||||
You basically need three components:
|
||||
|
||||
- The [InfluxDB Jenkins plugin](https://wiki.jenkins-ci.org/display/JENKINS/InfluxDB+Plugin) which allows you to send build metrics to InfluxDB servers
|
||||
- The [InfluxDB](https://www.influxdata.com/time-series-platform/influxdb/) to store this data (Docker available)
|
||||
- A [Grafana](http://grafana.org/) dashboard to visualize the data stored in InfluxDB (Docker available)
|
||||
|
||||
!!! note "no InfluxDB available?"
|
||||
If you don't have an InfluxDB available yet this step will still provide you some benefit.
|
||||
|
||||
It will create following files for you and archive them into your build:
|
||||
|
||||
* `jenkins_data.json`: This file gives you build-specific information, like e.g. build result, stage where the build failed
|
||||
* `influx_data.json`: This file gives you detailed information about your pipeline, e.g. stage durations, steps executed, ...
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
@ -79,37 +63,9 @@ You need to define the influxDB server in your pipeline as it is defined in the
|
||||
influxDBServer=jenkins
|
||||
```
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script|yes|||
|
||||
|artifactVersion|no|`commonPipelineEnvironment.getArtifactVersion()`||
|
||||
|customData|no|`commonPipelineEnvironment.getInfluxCustomData()`||
|
||||
|customDataMap|no|`commonPipelineEnvironment.getInfluxCustomDataMap()`||
|
||||
|customDataMapTags|no|`commonPipelineEnvironment.getInfluxCustomDataTags()`||
|
||||
|customDataTags|no|`commonPipelineEnvironment.getInfluxCustomDataTags()`||
|
||||
|influxPrefix|no|||
|
||||
|influxServer|no|`''`||
|
||||
|wrapInNode|no|`false`||
|
||||
|
||||
## Step configuration
|
||||
|
||||
We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
||||
In following sections the configuration is possible:
|
||||
|
||||
| parameter | general | step | stage |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script||||
|
||||
|artifactVersion||X|X|
|
||||
|customData||X|X|
|
||||
|customDataMap||X|X|
|
||||
|customDataMapTags||X|X|
|
||||
|customDataTags||X|X|
|
||||
|influxPrefix||X|X|
|
||||
|influxServer||X|X|
|
||||
|wrapInNode||X|X|
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Example
|
||||
|
||||
@ -144,7 +100,7 @@ As a first step you need to add your InfluxDB as Data source to your Grafana:
|
||||
The Influx plugin collects following data in the Piper context:
|
||||
|
||||
- All data as per default [InfluxDB plugin capabilities](https://wiki.jenkins.io/display/JENKINS/InfluxDB+Plugin)
|
||||
- Additional data collected via `commonPipelineEnvironment.setInfluxCustomDataProperty()` and via `commonPipelineEnvironment.setPipelineMeasurement()`
|
||||
- Additional data collected via `InfluxData.addField(measurement, key, value)`
|
||||
|
||||
!!! note "Add custom information to your InfluxDB"
|
||||
You can simply add custom data collected during your pipeline runs via available data objects.
|
||||
@ -169,7 +125,7 @@ Measurements are potentially pre-fixed - see parameter `influxPrefix` above.
|
||||
| sonarqube_data | <ul><li>blocker_issues</li><li>critical_issues</li><li>info_issues</li><li>major_issues</li><li>minor_issues</li><li>lines_of_code</li><li>...</li></ul> | Details see [InfluxDB plugin documentation](https://wiki.jenkins.io/display/JENKINS/InfluxDB+Plugin) |
|
||||
| jenkins_custom_data | Piper fills following colums by default: <br /><ul><li>build_result</li><li>build_result_key</li><li>build_step (->step in case of error)</li><li>build_error (->error message in case of error)</li></ul> | filled by `commonPipelineEnvironment.setInfluxCustomDataProperty()` |
|
||||
| pipeline_data | Examples from the Piper templates:<br /><ul><li>build_duration</li><li>opa_duration</li><li>deploy_test_duration</li><li>deploy_test_duration</li><li>fortify_duration</li><li>release_duration</li><li>...</li></ul>| filled by step [`measureDuration`](durationMeasure.md) using parameter `measurementName`|
|
||||
| step_data | Considered, e.g.:<br /><ul><li>build_url</li><li>bats</li><li>checkmarx</li><li>fortify</li><li>gauge</li><li>nsp</li><li>snyk</li><li>sonar</li><li>...</li></ul>| filled by `commonPipelineEnvironment.setInfluxStepData()` |
|
||||
| step_data | Considered, e.g.:<br /><ul><li>build_url</li><li>bats</li><li>checkmarx</li><li>fortify</li><li>gauge</li><li>nsp</li><li>snyk</li><li>sonar</li><li>...</li></ul>| filled by `InfluxData.addField('step_data', key, value)` |
|
||||
|
||||
### Examples for InfluxDB queries which can be used in Grafana
|
||||
|
||||
|
@ -1,83 +1,15 @@
|
||||
# karmaExecuteTests
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
In this step the ([Karma test runner](http://karma-runner.github.io)) is executed.
|
||||
|
||||
The step is using the `seleniumExecuteTest` step to spins up two containers in a Docker network:
|
||||
|
||||
- a Selenium/Chrome container (`selenium/standalone-chrome`)
|
||||
- a NodeJS container (`node:8-stretch`)
|
||||
|
||||
In the Docker network, the containers can be referenced by the values provided in `dockerName` and `sidecarName`, the default values are `karma` and `selenium`. These values must be used in the `hostname` properties of the test configuration ([Karma](https://karma-runner.github.io/1.0/config/configuration-file.html) and [WebDriver](https://github.com/karma-runner/karma-webdriver-launcher#usage)).
|
||||
|
||||
!!! note
|
||||
In a Kubernetes environment, the containers both need to be referenced with `localhost`.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- **running Karma tests** - have a NPM module with running tests executed with Karma
|
||||
- **configured WebDriver** - have the [`karma-webdriver-launcher`](https://github.com/karma-runner/karma-webdriver-launcher) package installed and a custom, WebDriver-based browser configured in Karma
|
||||
* **running Karma tests** - have a NPM module with running tests executed with Karma
|
||||
* **configured WebDriver** - have the [`karma-webdriver-launcher`](https://github.com/karma-runner/karma-webdriver-launcher) package installed and a custom, WebDriver-based browser configured in Karma
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script|yes|||
|
||||
|containerPortMappings|no|`[node:8-stretch: [[containerPort: 9876, hostPort: 9876]]]`||
|
||||
|dockerEnvVars|no|`[ NO_PROXY: 'localhost,karma,$NO_PROXY', no_proxy: 'localhost,karma,$no_proxy']`||
|
||||
|dockerImage|no|`node:8-stretch`||
|
||||
|dockerName|no|`karma`||
|
||||
|dockerWorkspace|no|`/home/node`||
|
||||
|failOnError|no|||
|
||||
|installCommand|no|`npm install --quiet`||
|
||||
|modules|no|`['.']`||
|
||||
|runCommand|no|`npm run karma`||
|
||||
|sidecarEnvVars|no|`[ NO_PROXY: 'localhost,selenium,$NO_PROXY', no_proxy: 'localhost,selenium,$no_proxy']`||
|
||||
|sidecarImage|no|||
|
||||
|sidecarName|no|||
|
||||
|sidecarVolumeBind|no|||
|
||||
|stashContent|no|`['buildDescriptor', 'tests']`||
|
||||
|
||||
- `script` - defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
|
||||
- `containerPortMappings` - see step [dockerExecute](dockerExecute.md)
|
||||
- `dockerEnvVars` - see step [dockerExecute](dockerExecute.md)
|
||||
- `dockerImage` - see step [dockerExecute](dockerExecute.md)
|
||||
- `dockerName` - see step [dockerExecute](dockerExecute.md)
|
||||
- `dockerWorkspace` - see step [dockerExecute](dockerExecute.md)
|
||||
- `failOnError` - see step [seleniumExecuteTests](seleniumExecuteTests.md)
|
||||
- `installCommand` - the command that is executed to install dependencies
|
||||
- `modules` - define the paths of the modules to execute tests on
|
||||
- `runCommand` - the command that is executed to start the tests
|
||||
- `sidecarEnvVars` - see step [dockerExecute](dockerExecute.md)
|
||||
- `sidecarImage` - see step [dockerExecute](dockerExecute.md)
|
||||
- `sidecarName` - see step [dockerExecute](dockerExecute.md)
|
||||
- `sidecarVolumeBind` - see step [dockerExecute](dockerExecute.md)
|
||||
- `stashContent` - pass specific stashed that should be considered for the tests
|
||||
|
||||
## Step configuration
|
||||
|
||||
We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
||||
In following sections the configuration is possible:
|
||||
|
||||
| parameter | general | step | stage |
|
||||
| ----------|---------|------|-------|
|
||||
|script||||
|
||||
|containerPortMappings|X|X|X|
|
||||
|dockerEnvVars|X|X|X|
|
||||
|dockerImage|X|X|X|
|
||||
|dockerName|X|X|X|
|
||||
|dockerWorkspace|X|X|X|
|
||||
|failOnError|X|X|X|
|
||||
|installCommand|X|X|X|
|
||||
|modules|X|X|X|
|
||||
|runCommand|X|X|X|
|
||||
|sidecarEnvVars|X|X|X|
|
||||
|sidecarImage|X|X|X|
|
||||
|sidecarName|X|X|X|
|
||||
|sidecarVolumeBind|X|X|X|
|
||||
|stashContent|X|X|X|
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Side effects
|
||||
|
||||
|
@ -1,14 +1,6 @@
|
||||
# mailSendNotification
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Sends notifications to all potential culprits of a current or previous build failure plus to fixed list of recipients.
|
||||
It will attach the current build log to the email.
|
||||
|
||||
Notifications are sent in following cases:
|
||||
|
||||
* current build failed or is unstable
|
||||
* current build is successful and previous build failed or was unstable
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequsites
|
||||
|
||||
@ -22,60 +14,9 @@ Usage of pipeline step:
|
||||
mailSendNotification script: this
|
||||
```
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script|yes|||
|
||||
|buildResult|no|||
|
||||
|gitCommitId|no|`script.commonPipelineEnvironment.getGitCommitId()`||
|
||||
|gitSshKeyCredentialsId|no|``||
|
||||
|gitUrl|no|||
|
||||
|notificationAttachment|no|`true`||
|
||||
|notificationRecipients|no|||
|
||||
|notifyCulprits|no|`true`||
|
||||
|numLogLinesInBody|no|`100`||
|
||||
|projectName|no|||
|
||||
|wrapInNode|no|`false`||
|
||||
|
||||
### Details
|
||||
|
||||
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
|
||||
* `buildResult` may be used to overrule the build result coming from `currentBuild.result`. This is for example used in the step `pipelineRestartSteps`
|
||||
* `gitCommitId` defines a dedicated git commitId for culprit retrieval.
|
||||
* `gitUrl` and `gitCommitId` are used to retrieve culprit information.
|
||||
* `gitSshKeyCredentialsId` only required if your git repository is protected. It defines the credentialsId for the git ssh credentials.
|
||||
* `notificationAttachment` defines if the console log file should be attached to the notification mail.
|
||||
* `notificationRecipients` defines the fixed list of recipient that always get the notification. In case you want to send the notification to the culprits only set it to an empty string `''`.
|
||||
|
||||
!!! note
|
||||
Multiple recipients need to be separated with the `space` character.
|
||||
In case you do not want to have any fixed recipients of the notifications leave the property empty.
|
||||
|
||||
* `notifyCulprits` defines if potential culprits should receive an email.
|
||||
* `numLogLinesInBody` defines the number of log lines (=last lines of the log) which are included into the body of the notification email.
|
||||
* `projectName` may be used to specify a different name in the email subject.
|
||||
* `wrapInNode` needs to be set to `true` if step is used outside of a node context, e.g. post actions in a declarative pipeline script.
|
||||
|
||||
## Step configuration
|
||||
|
||||
We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
||||
In following sections the configuration is possible:
|
||||
|
||||
| parameter | general | step | stage |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script||||
|
||||
|buildResult||X|X|
|
||||
|gitCommitId||X|X|
|
||||
|gitSshKeyCredentialsId|X|X|X|
|
||||
|gitUrl||X|X|
|
||||
|notificationAttachment||X|X|
|
||||
|notificationRecipients||X|X|
|
||||
|notifyCulprits||X|X|
|
||||
|numLogLinesInBody||X|X|
|
||||
|projectName||X|X|
|
||||
|wrapInNode||X|X|
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Side effects
|
||||
|
||||
|
@ -1,47 +1,10 @@
|
||||
# mavenExecute
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
## ${docGenDescription}
|
||||
|
||||
Executes a maven command inside a Docker container.
|
||||
## ${docGenParameters}
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | mandatory | default | example values |
|
||||
| -------------------------------|-----------|-------------------|----------------------------|
|
||||
| `script` | yes | | |
|
||||
| `dockerImage` | no | 'maven:3.5-jdk-7' | |
|
||||
| `globalSettingsFile` | no | | 'local_folder/settings.xml'|
|
||||
| `projectSettingsFile` | no | | |
|
||||
| `pomPath` | no | | 'local_folder/m2' |
|
||||
| `flags` | no | | '-o' |
|
||||
| `goals` | no | | 'clean install' |
|
||||
| `m2Path` | no | | 'local_folder/m2' |
|
||||
| `defines` | no | | '-Dmaven.tests.skip=true' |
|
||||
| `logSuccessfulMavenTransfers` | no | `false` | 'true' |
|
||||
|
||||
* `script` defines the global script environment of the Jenkinsfile run.
|
||||
Typically `this` is passed to this parameter. This allows the function
|
||||
to access the commonPipelineEnvironment for retrieving, for example,
|
||||
configuration parameters.
|
||||
* `dockerImage` Name of the docker image that should be used.
|
||||
* `globalSettingsFile` Path or url to the mvn settings file that should be used as global settings file.
|
||||
* `projectSettingsFile` Path or url to the mvn settings file that should be used as project settings file.
|
||||
* `pomPath` Path to the pom file that should be used.
|
||||
* `flags` Flags to provide when running mvn.
|
||||
* `goals` Maven goals that should be executed.
|
||||
* `m2Path` Path to the location of the local repository that should be used.
|
||||
* `defines` Additional properties.
|
||||
* `logSuccessfulMavenTransfers` configures maven to log successful downloads. This is set to `false` by default to reduce the noise in build logs.
|
||||
|
||||
## Step configuration
|
||||
|
||||
The following parameters can also be specified as step parameters using the global configuration file:
|
||||
|
||||
* `dockerImage`
|
||||
* `globalSettingsFile`
|
||||
* `projectSettingsFile`
|
||||
* `pomPath`
|
||||
* `m2Path`
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Exceptions
|
||||
|
||||
|
@ -1,44 +1,18 @@
|
||||
# mtaBuild
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
## ${docGenDescription}
|
||||
|
||||
Executes the SAP Multitarget Application Archive Builder to create an mtar archive of the application.
|
||||
## Prerequisites
|
||||
|
||||
Before doing this, validates that SAP Multitarget Application Archive Builder exists and the version is compatible.
|
||||
While using a custom docker file, ensure that the following tools are installed:
|
||||
|
||||
Note that a version is formed by `major.minor.patch`, and a version is compatible to another version if the minor and patch versions are higher, but the major version is not, e.g. if 3.39.10 is the expected version, 3.39.11 and 3.40.1 would be compatible versions, but 4.0.1 would not be a compatible version.
|
||||
* **SAP MTA Archive Builder 1.0.6 or compatible version** - can be downloaded from [SAP Development Tools](https://tools.hana.ondemand.com/#cloud).
|
||||
* **Java 8 or compatible version** - necessary to run the *MTA Archive Builder* itself and to build Java modules.
|
||||
* **NodeJS installed** - the MTA Builder uses `npm` to download node module dependencies such as `grunt`.
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -----------------|-----------|--------------------------------------------------------|--------------------|
|
||||
| `script` | yes | | |
|
||||
| `dockerImage` | no | `ppiper/mta-archive-builder` | |
|
||||
| `dockerOptions` | no | '' | |
|
||||
| `buildTarget` | yes | `'NEO'` | 'CF', 'NEO', 'XSA' |
|
||||
| `extension` | no | | |
|
||||
| `mtaJarLocation` | no | `'/opt/sap/mta/lib/mta.jar'` | |
|
||||
| `applicationName`| no | | |
|
||||
|
||||
* `script` - The common script environment of the Jenkinsfile running. Typically the reference to the script calling the pipeline step is provided with the `this` parameter, as in `script: this`. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving, for example, configuration parameters.
|
||||
* `dockerImage` - The Docker image to execute the MTA build.
|
||||
Note that you can provide your own image if required, but for most cases, the default should be fine.
|
||||
* `dockerOptions` Docker options to be set when starting the container. It can be a list or a string.
|
||||
* `buildTarget` - The target platform to which the mtar can be deployed.
|
||||
* `extension` - The path to the extension descriptor file.
|
||||
* `mtaJarLocation` - The location of the SAP Multitarget Application Archive Builder jar file, including file name and extension. First, the location is retrieved from the environment variables using the environment variable `MTA_JAR_LOCATION`. If no environment variable is provided, the location is retrieved from the parameters, or the step configuration using the key `mtaJarLocation`. If SAP Multitarget Application Archive Builder is not found on one of the previous locations an AbortException is thrown.
|
||||
Note that the environment variable `MTA_JAR_LOCATION` has priority. In case that the script runs on multiple nodes, SAP Multitarget Application Archive Builder must be located on all the nodes, therefore the environment variable must be also configured on all the nodes.
|
||||
* `applicationName` - The name of the application which is being built. If the parameter has been provided and no `mta.yaml` exists, the `mta.yaml` will be automatically generated using this parameter and the information (`name` and `version`) from `package.json` before the actual build starts.
|
||||
|
||||
## Step configuration
|
||||
|
||||
The following parameters can also be specified as step parameters using the global configuration file:
|
||||
|
||||
* `dockerImage`
|
||||
* `buildTarget`
|
||||
* `extension`
|
||||
* `mtaJarLocation`
|
||||
* `applicationName`
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Side effects
|
||||
|
||||
@ -47,7 +21,6 @@ The following parameters can also be specified as step parameters using the glob
|
||||
## Exceptions
|
||||
|
||||
* `AbortException`:
|
||||
* If SAP Multitarget Application Archive Builder is not found.
|
||||
* If there is an invalid `buildTarget`.
|
||||
* If there is no key `ID` inside the `mta.yaml` file.
|
||||
|
||||
|
18
documentation/docs/steps/multicloudDeploy.md
Normal file
18
documentation/docs/steps/multicloudDeploy.md
Normal file
@ -0,0 +1,18 @@
|
||||
# ${docGenStepName}
|
||||
|
||||
## ${docGenDescription}
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Examples
|
||||
|
||||
```groovy
|
||||
multicloudDeploy(
|
||||
script: script,
|
||||
cfTargets: [[apiEndpoint: 'https://test.server.com', appName:'cfAppName', credentialsId: 'cfCredentialsId', manifest: 'cfManifest', org: 'cfOrg', space: 'cfSpace']],
|
||||
neoTargets: [[credentialsId: 'my-credentials-id', host: hana.example.org, account: 'trialuser1']],
|
||||
enableZeroDowntimeDeployment: 'true'
|
||||
)
|
||||
```
|
@ -1,12 +1,6 @@
|
||||
# neoDeploy
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Deploys an Application to SAP Cloud Platform (SAP CP) using the SAP Cloud Platform Console Client (Neo Java Web SDK).
|
||||
|
||||
Before doing this, validates that SAP Cloud Platform Console Client is installed and the version is compatible.
|
||||
|
||||
Note that a version is formed by `major.minor.patch`, and a version is compatible to another version if the minor and patch versions are higher, but the major version is not, e.g. if 3.39.10 is the expected version, 3.39.11 and 3.40.1 would be compatible versions, but 4.0.1 would not be a compatible version.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
@ -16,100 +10,13 @@ Note that a version is formed by `major.minor.patch`, and a version is compatibl
|
||||
|
||||

|
||||
|
||||
* **Neo Java Web SDK 3.39.10 or compatible version** - can be downloaded from [Maven Central](http://central.maven.org/maven2/com/sap/cloud/neo-java-web-sdk/). The Neo Java Web SDK needs to be extracted into the folder provided by `neoHome`. In case this parameters is not provided and there is no NEO_HOME parameter in the environment `<neoRoot>/tools` needs to be in the `PATH`. This step is also capable of triggering the neo deploy tool provided inside a docker image.
|
||||
* **Neo Java Web SDK 3.39.10 or compatible version** - can be downloaded from [Maven Central](http://central.maven.org/maven2/com/sap/cloud/neo-java-web-sdk/). This step is capable of triggering the neo deploy tool provided inside a docker image. We provide docker image `ppiper/neo-cli`. `neo.sh` needs to be contained in path, e.g by adding a symbolic link to `/usr/local/bin`.
|
||||
|
||||
* **Java 8 or compatible version** - needed by the *Neo-Java-Web-SDK*
|
||||
* **Java 8 or compatible version** - needed by the *Neo-Java-Web-SDK*. Java environment needs to be properly configured (JAVA_HOME, java exectutable contained in path).
|
||||
|
||||
## Parameters when using MTA deployment method (default - MTA)
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -------------------|-----------|-------------------------------|-------------------------------------------------|
|
||||
| `script` | yes | | |
|
||||
| `neo` | no | | |
|
||||
| `deployMode` | yes | `'mta'` | `'mta'`, `'warParams'`, `'warPropertiesFile'` |
|
||||
| `neoHome` | no | | |
|
||||
| `source` | no | | |
|
||||
|
||||
The parameter `neo` is a map which contains the following parameters:
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -------------------|-----------|-------------------------------|-------------------------------------------------|
|
||||
| `account` | no | | |
|
||||
| `credentialsId` | no | `'CI_CREDENTIALS_ID'` | |
|
||||
| `host` | no | | |
|
||||
|
||||
## Parameters when using WAR file deployment method with .properties file (WAR_PROPERTIESFILE)
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -------------------|-----------|-------------------------------|-------------------------------------------------|
|
||||
| `script` | yes | | |
|
||||
| `neo` | no | | |
|
||||
| `deployMode` | yes | `'mta'` | `'mta'`, `'warParams'`, `'warPropertiesFile'` |
|
||||
| `neoHome` | no | | |
|
||||
| `source` | no | | |
|
||||
| `warAction` | yes | `'deploy'` | `'deploy'`, `'rolling-update'` |
|
||||
|
||||
The parameter `neo` is a map which contains the following parameters:
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -------------------|-----------|-------------------------------|-------------------------------------------------|
|
||||
| `credentialsId` | no | `'CI_CREDENTIALS_ID'` | |
|
||||
| `propertiesFile` | yes | | |
|
||||
|
||||
## Parameters when using WAR file deployment method without .properties file - with parameters (WAR_PARAMS)
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -------------------|-----------|-------------------------------|-------------------------------------------------|
|
||||
| `script` | yes | | |
|
||||
| `neo` | no | | |
|
||||
| `deployMode` | yes | `'mta'` | `'mta'`, `'warParams'`, `'warPropertiesFile'` |
|
||||
| `neoHome` | no | | |
|
||||
| `source` | no | | |
|
||||
| `warAction` | yes | `'deploy'` | `'deploy'`, `'rolling-update'` |
|
||||
|
||||
The parameter `neo` is a map which contains the following parameters:
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -------------------|-----------|-------------------------------|-------------------------------------------------|
|
||||
| `account` | yes | | |
|
||||
| `application` | yes | | |
|
||||
| `credentialsId` | no | `'CI_CREDENTIALS_ID'` | |
|
||||
| `environment` | | | |
|
||||
| `host` | yes | | |
|
||||
| `runtime` | yes | | |
|
||||
| `runtimeVersion` | yes | | |
|
||||
| `size` | no | `'lite'` | `'lite'`, `'pro'`, `'prem'`, `'prem-plus'` |
|
||||
| `vmArguments` | | | |
|
||||
|
||||
* `script` - The common script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving e.g. configuration parameters.
|
||||
* `deployMode` - The deployment mode which should be used. Available options are `'mta'` (default), `'warParams'` (deploying WAR file and passing all the deployment parameters via the function call) and `'warPropertiesFile'` (deploying WAR file and putting all the deployment parameters in a .properties file)
|
||||
* `neoHome` - The path to the `neo-java-web-sdk` tool used for SAP CP deployment. If no parameter is provided, the path is retrieved from the environment variables using the environment variable `NEO_HOME`. If no parameter and no environment variable is provided, the path is retrieved from the step configuration using the step configuration key `neoHome`. If the previous configurations are not provided, the tool is expected on the `PATH`, and if it is not available on the `PATH` an AbortException is thrown.
|
||||
* `source`- The path to the archive for deployment to SAP CP. If not provided `mtarFilePath` from commom pipeline environment is used instead.
|
||||
* `warAction` - Action mode when using WAR file mode. Available options are `deploy` (default) and `rolling-update` which performs update of an application without downtime in one go.
|
||||
|
||||
The parameters for `neo`:
|
||||
|
||||
* `account` - The SAP Cloud Platform account to deploy to.
|
||||
* `application` - Name of the application you want to manage, configure, or deploy
|
||||
* `credentialsId` - The Jenkins credentials containing user and password used for SAP CP deployment.
|
||||
* `environment` - Map of environment variables in the form of KEY: VALUE
|
||||
* `host` - The SAP Cloud Platform host to deploy to.
|
||||
* `propertiesFile` - The path to the .properties file in which all necessary deployment properties for the application are defined.
|
||||
* `runtime` - Name of SAP Cloud Platform application runtime
|
||||
* `runtimeVersion` - Version of SAP Cloud Platform application runtime
|
||||
* `size` - Compute unit (VM) size. Acceptable values: lite, pro, prem, prem-plus.
|
||||
* `vmArguments` - String of VM arguments passed to the JVM
|
||||
|
||||
The step is prepared for being executed in docker. The corresponding parameters can be applied. See step `dockerExecute` for details.
|
||||
|
||||
## Step configuration
|
||||
|
||||
The parameter `neo` including all can also be specified as a global parameter using the global configuration file.
|
||||
|
||||
The following parameters can also be specified as step parameters using the global configuration file:
|
||||
|
||||
* `dockerImage`
|
||||
* `neoHome`
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Side effects
|
||||
|
||||
|
@ -1,12 +1,22 @@
|
||||
# newmanExecute
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
## ${docGenDescription}
|
||||
|
||||
This script executes your [Postman](https://www.getpostman.com) tests from a collection via the [Newman](https://www.getpostman.com/docs/v6/postman/collection_runs/command_line_integration_with_newman) command line collection.
|
||||
## Prerequisites
|
||||
|
||||
## Prequisites
|
||||
* prepared Postman with a test collection
|
||||
|
||||
- prepared Postman with a test collection
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Side effects
|
||||
|
||||
Step uses `dockerExecute` inside.
|
||||
|
||||
## Exceptions
|
||||
|
||||
none
|
||||
|
||||
## Example
|
||||
|
||||
@ -22,51 +32,3 @@ This step should be used in combination with `testsPublishResults`:
|
||||
newmanExecute script: this, failOnError: false
|
||||
testsPublishResults script: this, junit: [pattern: '**/newman/TEST-*.xml']
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| name | mandatory | default | possible values |
|
||||
|------|-----------|---------|-----------------|
|
||||
| `dockerImage` | no | | |
|
||||
| `failOnError` | no | | `true`, `false` |
|
||||
| `gitBranch` | no | | |
|
||||
| `gitSshKeyCredentialsId` | no | | |
|
||||
| `newmanCollection` | no | | |
|
||||
| `newmanEnvironment` | no | | |
|
||||
| `newmanGlobals` | no | | |
|
||||
| `newmanRunCommand` | no | | |
|
||||
| `script` | yes | | |
|
||||
| `stashContent` | no | | |
|
||||
| `testRepository` | no | | |
|
||||
|
||||
- `dockerImage` - Docker image for code execution.
|
||||
- `failOnError` - Defines the behavior, in case tests fail.
|
||||
- `gitBranch` - see `testRepository`
|
||||
- `gitSshKeyCredentialsId` - see `testRepository`
|
||||
- `newmanCollection` - The test collection that should be executed. This could also be a file pattern.
|
||||
- `newmanEnvironment` - Specify an environment file path or URL. Environments provide a set of variables that one can use within collections. see also [Newman docs](https://github.com/postmanlabs/newman#newman-run-collection-file-source-options)
|
||||
- `newmanGlobals` - Specify the file path or URL for global variables. Global variables are similar to environment variables but have a lower precedence and can be overridden by environment variables having the same name. see also [Newman docs](https://github.com/postmanlabs/newman#newman-run-collection-file-source-options)
|
||||
- `newmanRunCommand` - The newman command that will be executed inside the docker container.
|
||||
- `script` - The common script environment of the Jenkinsfile running. Typically the reference to the script calling the pipeline step is provided with the this parameter, as in script: this. This allows the function to access the commonPipelineEnvironment for retrieving, for example, configuration parameters.
|
||||
- `stashContent` - If specific stashes should be considered for the tests, you can pass this via this parameter.
|
||||
- `testRepository` - In case the test implementation is stored in a different repository than the code itself, you can define the repository containing the tests using parameter `testRepository` and if required `gitBranch` (for a different branch than master) and `gitSshKeyCredentialsId` (for protected repositories). For protected repositories the `testRepository` needs to contain the ssh git url.
|
||||
|
||||
## Step configuration
|
||||
|
||||
We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
||||
In following sections the configuration is possible:
|
||||
|
||||
| parameter | general | step | stage |
|
||||
|-----------|---------|------|-------|
|
||||
| `dockerImage` | | X | X |
|
||||
| `failOnError` | | X | X |
|
||||
| `gitBranch` | | X | X |
|
||||
| `gitSshKeyCredentialsId` | | X | X |
|
||||
| `newmanCollection` | | X | X |
|
||||
| `newmanEnvironment` | | X | X |
|
||||
| `newmanGlobals` | | X | X |
|
||||
| `newmanRunCommand` | | X | X |
|
||||
| `script` | X | X | X |
|
||||
| `stashContent` | | X | X |
|
||||
| `testRepository` | | X | X |
|
||||
|
@ -1,16 +1,12 @@
|
||||
# npmExecute
|
||||
|
||||
## Description
|
||||
# ${docGenStepName}
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
|
||||
## Parameters
|
||||
## ${docGenDescription}
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
## ${docGenParameters}
|
||||
|
||||
## Step configuration
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Exceptions
|
||||
|
||||
|
@ -1,34 +1,14 @@
|
||||
# pipelineExecute
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Loads a pipeline from a git repository. The idea is to set up a pipeline job in Jenkins that loads a minimal pipeline, which in turn loads the shared library and then uses this step to load the actual pipeline.
|
||||
|
||||
A centrally maintained pipeline script (Jenkinsfile) can be re-used by
|
||||
several projects using `pipelineExecute` as outlined in the example
|
||||
below.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
none
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -------------------|-----------|-----------------|-----------------|
|
||||
| `repoUrl` | yes | | |
|
||||
| `branch` | no | 'master' | |
|
||||
| `path` | no | 'Jenkinsfile' | |
|
||||
| `credentialsId` | no | An empty String | |
|
||||
|
||||
* `repoUrl` The url to the git repository of the pipeline to be loaded.
|
||||
* `branch` The branch of the git repository from which the pipeline should be checked out.
|
||||
* `path` The path to the Jenkinsfile, inside the repository, to be loaded.
|
||||
* `credentialsId` The Jenkins credentials containing user and password needed to access a private git repository.
|
||||
|
||||
## Step configuration
|
||||
|
||||
none
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Side effects
|
||||
|
||||
|
@ -1,25 +1,15 @@
|
||||
# pipelineRestartSteps
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Support of restarting failed stages or steps in a pipeline is limited in Jenkins.
|
||||
|
||||
This has been documented in the [Jenkins Jira issue JENKINS-33846](https://issues.jenkins-ci.org/browse/JENKINS-33846).
|
||||
|
||||
For declarative pipelines there is a solution available which partially addresses this topic:
|
||||
https://jenkins.io/doc/book/pipeline/running-pipelines/#restart-from-a-stage.
|
||||
|
||||
Nonetheless, still features are missing, so it can't be used in all cases.
|
||||
The more complex Piper pipelines which share a state via [`commonPipelineEnvironment`](commonPipelineEnvironment.md) will for example not work with the standard _restart-from-stage_.
|
||||
|
||||
The step `pipelineRestartSteps` aims to address this gap and allows individual parts of a pipeline (e.g. a failed deployment) to be restarted.
|
||||
|
||||
This is done in a way that the pipeline waits for user input to restart the pipeline in case of a failure. In case this user input is not provided the pipeline stops after a timeout which can be configured.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
none
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Example
|
||||
|
||||
Usage of pipeline step:
|
||||
@ -37,32 +27,6 @@ pipelineRestartSteps (script: this) {
|
||||
|
||||
In case you cannot use `node` inside this step, please choose the parameter `timeoutInSeconds` carefully!
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script|yes|||
|
||||
|sendMail|no|`true`||
|
||||
|timeoutInSeconds|no|`900`||
|
||||
|
||||
### Details
|
||||
|
||||
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
|
||||
* If `sendMail: true` the step `mailSendNotification` will be triggered in case of an error
|
||||
* `timeoutInSeconds` defines the time period where the job waits for input. Default is 15 minutes. Once this time is passed the job enters state FAILED.
|
||||
|
||||
## Step configuration
|
||||
|
||||
We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
||||
In following sections the configuration is possible:
|
||||
|
||||
| parameter | general | step | stage |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script||||
|
||||
|sendMail|X|X|X|
|
||||
|timeoutInSeconds|X|X|X|
|
||||
|
||||
## Side effects
|
||||
|
||||
none
|
||||
|
@ -1,22 +1,12 @@
|
||||
# pipelineStashFiles
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
This step stashes files that are needed in other build steps (on other nodes).
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequsites
|
||||
|
||||
none
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
| script | yes | | |
|
||||
| runCheckmarx | no | false | |
|
||||
| runOpaTests | no | false | |
|
||||
| stashIncludes | no | see details | |
|
||||
| stashExcludes | no | see details | |
|
||||
## ${docGenParameters}
|
||||
|
||||
Details:
|
||||
|
||||
@ -42,14 +32,7 @@ The step is stashing files before and after the build. This is due to the fact,
|
||||
* `stashIncludes: [buildDescriptor: '**/mybuild.yml]`
|
||||
* `stashExcludes: [tests: '**/NOTRELEVANT.*]`
|
||||
|
||||
## Step configuration
|
||||
|
||||
The following parameters can also be specified as step parameters using the global configuration file:
|
||||
|
||||
* runOpaTests
|
||||
* runCheckmarx
|
||||
* stashExcludes
|
||||
* stashIncludes
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Explanation of pipeline step
|
||||
|
||||
|
11
documentation/docs/steps/pipelineStashFilesAfterBuild.md
Normal file
11
documentation/docs/steps/pipelineStashFilesAfterBuild.md
Normal file
@ -0,0 +1,11 @@
|
||||
# ${docGenStepName}
|
||||
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequsites
|
||||
|
||||
none
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
11
documentation/docs/steps/pipelineStashFilesBeforeBuild.md
Normal file
11
documentation/docs/steps/pipelineStashFilesBeforeBuild.md
Normal file
@ -0,0 +1,11 @@
|
||||
# ${docGenStepName}
|
||||
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequsites
|
||||
|
||||
none
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
@ -1,17 +1,10 @@
|
||||
# prepareDefaultValues
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
## ${docGenDescription}
|
||||
|
||||
Loads the pipeline library default values from the file `resources/default_pipeline_environment.yml`.
|
||||
Afterwards the values can be loaded by the method: `ConfigurationLoader.defaultStepConfiguration`
|
||||
## ${docGenParameters}
|
||||
|
||||
## Parameters
|
||||
|
||||
None
|
||||
|
||||
## Step configuration
|
||||
|
||||
None
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Exceptions
|
||||
|
||||
|
@ -1,18 +1,6 @@
|
||||
# seleniumExecuteTests
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Enables UI test execution with Selenium in a sidecar container.
|
||||
|
||||
The step executes a closure (see example below) connecting to a sidecar container with a Selenium Server.
|
||||
|
||||
When executing in a
|
||||
|
||||
* local Docker environment, please make sure to set Selenium host to **`selenium`** in your tests.
|
||||
* Kubernetes environment, plese make sure to set Seleniums host to **`localhost`** in your tests.
|
||||
|
||||
!!! note "Proxy Environments"
|
||||
If work in an environment containing a proxy, please make sure that `localhost`/`selenium` is added to your proxy exclusion list, e.g. via environment variable `NO_PROXY` & `no_proxy`. You can pass those via parameters `dockerEnvVars` and `sidecarEnvVars` directly to the containers if required.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
@ -76,66 +64,9 @@ webdriverio
|
||||
});
|
||||
```
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script|yes|||
|
||||
|buildTool|no|`npm`|`maven`, `npm`|
|
||||
|containerPortMappings|no|`[selenium/standalone-chrome:[[containerPort:4444, hostPort:4444]]]`||
|
||||
|dockerEnvVars|no|||
|
||||
|dockerImage|no|buildTool=`maven`: `maven:3.5-jdk-8`<br />buildTool=`npm`: `node:8-stretch`<br />||
|
||||
|dockerName|no|buildTool=`maven`: `maven`<br />buildTool=`npm`: `npm`<br />||
|
||||
|dockerWorkspace|no|buildTool=`maven`: <br />buildTool=`npm`: `/home/node`<br />||
|
||||
|failOnError|no|`true`||
|
||||
|gitBranch|no|||
|
||||
|gitSshKeyCredentialsId|no|``||
|
||||
|sidecarEnvVars|no|||
|
||||
|sidecarImage|no|`selenium/standalone-chrome`||
|
||||
|sidecarName|no|`selenium`||
|
||||
|sidecarVolumeBind|no|`[/dev/shm:/dev/shm]`||
|
||||
|stashContent|no|<ul><li>`tests`</li></ul>||
|
||||
|testRepository|no|||
|
||||
|
||||
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
|
||||
* `buildTool` defines the build tool to be used for the test execution.
|
||||
* `containerPortMappings`, see step [dockerExecute](dockerExecute.md)
|
||||
* `dockerEnvVars`, see step [dockerExecute](dockerExecute.md)
|
||||
* `dockerImage`, see step [dockerExecute](dockerExecute.md)
|
||||
* `dockerName`, see step [dockerExecute](dockerExecute.md)
|
||||
* `dockerWorkspace`, see step [dockerExecute](dockerExecute.md)
|
||||
* `failOnError` specifies if the step should fail in case the execution of the body of this step fails.
|
||||
* `sidecarEnvVars`, see step [dockerExecute](dockerExecute.md)
|
||||
* `sidecarImage`, see step [dockerExecute](dockerExecute.md)
|
||||
* `sidecarName`, see step [dockerExecute](dockerExecute.md)
|
||||
* `sidecarVolumeBind`, see step [dockerExecute](dockerExecute.md)
|
||||
* If specific stashes should be considered for the tests, you can pass this via parameter `stashContent`
|
||||
* In case the test implementation is stored in a different repository than the code itself, you can define the repository containing the tests using parameter `testRepository` and if required `gitBranch` (for a different branch than master) and `gitSshKeyCredentialsId` (for protected repositories). For protected repositories the testRepository needs to contain the ssh git url.
|
||||
|
||||
## Step configuration
|
||||
|
||||
We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
||||
In following sections the configuration is possible:
|
||||
|
||||
| parameter | general | step | stage |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script||||
|
||||
|buildTool||X|X|
|
||||
|containerPortMappings|X|X|X|
|
||||
|dockerEnvVars|X|X|X|
|
||||
|dockerImage|X|X|X|
|
||||
|dockerName|X|X|X|
|
||||
|dockerWorkspace|X|X|X|
|
||||
|failOnError|X|X|X|
|
||||
|gitBranch|X|X|X|
|
||||
|gitSshKeyCredentialsId|X|X|X|
|
||||
|sidecarEnvVars|X|X|X|
|
||||
|sidecarImage|X|X|X|
|
||||
|sidecarName|X|X|X|
|
||||
|sidecarVolumeBind|X|X|X|
|
||||
|stashContent|X|X|X|
|
||||
|testRepository|X|X|X|
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Side effects
|
||||
|
||||
|
@ -1,30 +1,14 @@
|
||||
# setupCommonPipelineEnvironment
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Initializes the [`commonPipelineEnvironment`](commonPipelineEnvironment.md), which is used throughout the complete pipeline.
|
||||
|
||||
!!! tip
|
||||
This step needs to run at the beginning of a pipeline right after the SCM checkout.
|
||||
Then subsequent pipeline steps consume the information from `commonPipelineEnvironment`; it does not need to be passed to pipeline steps explicitly.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* A **configuration file** with properties (default location: `.pipeline/config.properties`). The property values are used as default values in many pipeline steps.
|
||||
* A **configuration file** with properties. The property values are used as default values in many pipeline steps.
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ------------ |-----------|-------------------------------|-----------------|
|
||||
| `script` | yes | - | |
|
||||
| `configFile` | no | `.pipeline/config.properties` | |
|
||||
|
||||
* `script` - The reference to the pipeline script (Jenkinsfile). Normally `this` needs to be provided.
|
||||
* `configFile` - Property file defining project specific settings.
|
||||
|
||||
## Step configuration
|
||||
|
||||
none
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Side effects
|
||||
|
||||
|
@ -1,73 +1,33 @@
|
||||
# slackSendNotification
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Sends notifications to the Slack channel about the build status.
|
||||
|
||||
Notification contains:
|
||||
|
||||
* Build status;
|
||||
* Repo Owner;
|
||||
* Repo Name;
|
||||
* Branch Name;
|
||||
* Jenkins Build Number;
|
||||
* Jenkins Build URL.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Installed and configured [Jenkins Slack plugin](https://github.com/jenkinsci/slack-plugin).
|
||||
* Installed and configured [Jenkins Slack plugin](https://github.com/jenkinsci/slack-plugin).
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Example
|
||||
|
||||
Usage of pipeline step:
|
||||
|
||||
```groovy
|
||||
try {
|
||||
stage('..') {..}
|
||||
stage('..') {..}
|
||||
stage('..') {..}
|
||||
currentBuild.result = 'SUCCESS'
|
||||
} catch (Throwable err) {
|
||||
currentBuild.result = 'FAILURE'
|
||||
throw err
|
||||
} finally {
|
||||
stage('report') {
|
||||
slackSendNotification script: this
|
||||
pipeline {
|
||||
agent any
|
||||
stages {
|
||||
stage('Build') {
|
||||
steps {
|
||||
echo "do something"
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
slackSendNotification script: this
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script|yes|||
|
||||
|baseUrl|no|||
|
||||
|channel|no|||
|
||||
|color|no|`${buildStatus == 'SUCCESS'?'#008000':'#E60000'}`||
|
||||
|credentialsId|no|||
|
||||
|message|no|||
|
||||
|
||||
### Details
|
||||
|
||||
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
|
||||
* `baseUrl` allows overriding the Slack Plugin Integration Base Url specified in the global configuration.
|
||||
* `color` defines the message color.
|
||||
* If `channel` is defined another than the default channel will be used.
|
||||
* `credentialsId` defines the Jenkins credentialId which holds the Slack token
|
||||
* With parameter `message` a custom message can be defined which is sent into the Slack channel.
|
||||
|
||||
## Step configuration
|
||||
|
||||
We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
||||
In following sections the configuration is possible:
|
||||
|
||||
| parameter | general | step | stage |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script||||
|
||||
|baseUrl||X|X|
|
||||
|channel||X|X|
|
||||
|color||X|X|
|
||||
|credentialsId||X|X|
|
||||
|message||X|X|
|
||||
|
26
documentation/docs/steps/snykExecute.md
Normal file
26
documentation/docs/steps/snykExecute.md
Normal file
@ -0,0 +1,26 @@
|
||||
# ${docGenStepName}
|
||||
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* **Snyk account** - have an account on snyk.io
|
||||
* **Snyk token** - have a Snyk user token
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Side effects
|
||||
|
||||
Step uses `dockerExecute` inside.
|
||||
|
||||
## Exceptions
|
||||
|
||||
none
|
||||
|
||||
## Example
|
||||
|
||||
```groovy
|
||||
snykExecute script: this, snykCredentialsId: 'mySnykToken'
|
||||
```
|
@ -1,8 +1,6 @@
|
||||
# testsPublishResults
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
This step can publish test results from various sources.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequsites
|
||||
|
||||
@ -28,29 +26,7 @@ testsPublishResults(
|
||||
)
|
||||
```
|
||||
|
||||
Available parameters:
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
| script | yes | | |
|
||||
| `failOnError` | no | `false` | `true`, `false` |
|
||||
| junit | no | `false` | true, false |
|
||||
| jacoco | no | `false` | true, false |
|
||||
| cobertura | no | `false` | true, false |
|
||||
| jmeter | no | `false` | true, false |
|
||||
|
||||
* `script` - The common script environment of the Jenkinsfile running.
|
||||
Typically the reference to the script calling the pipeline step is provided
|
||||
with the `this` parameter, as in `script: this`.
|
||||
This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md)
|
||||
for retrieving, for example, configuration parameters.
|
||||
* `failOnError` - If `failOnError` it set to `true` the step will fail the build if JUnit detected any failing tests.
|
||||
* `junit` - Publishes test results files in JUnit format with the [JUnit Plugin](https://plugins.jenkins.io/junit).
|
||||
* `jacoco` - Publishes code coverage with the [JaCoCo plugin](https://plugins.jenkins.io/jacoco) .
|
||||
* `cobertura` - Publishes code coverage with the [Cobertura plugin](https://plugins.jenkins.io/cobertura).
|
||||
* `jmeter` - Publishes performance test results with the [Performance plugin](https://plugins.jenkins.io/performance).
|
||||
|
||||
Each of the parameters `junit`, `jacoco`, `cobertura` and `jmeter` can be set to `true` or `false` but also to a map of parameters to hand in different settings for the tools.
|
||||
## ${docGenParameters}
|
||||
|
||||
### junit
|
||||
|
||||
@ -101,14 +77,7 @@ Each of the parameters `junit`, `jacoco`, `cobertura` and `jmeter` can be set to
|
||||
| archive | no | `false` | true, false |
|
||||
| allowEmptyResults | no | `true` | true, false |
|
||||
|
||||
## Step configuration
|
||||
|
||||
Following parameters can also be specified as step parameters using the global configuration file:
|
||||
|
||||
* `junit`
|
||||
* `jacoco`
|
||||
* `cobertura`
|
||||
* `jmeter`
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Side effects
|
||||
|
||||
|
@ -1,41 +0,0 @@
|
||||
# toolValidate
|
||||
|
||||
## Description
|
||||
|
||||
Checks the existence and compatibility of a tool, necessary for a successful pipeline execution.
|
||||
In case a violation is found, an exception is raised.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
none
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -----------------|-----------|-----------------------------------|----------------------------|
|
||||
| `tool` | yes | | 'java', 'mta', 'neo' |
|
||||
| `home` | yes | | |
|
||||
|
||||
* `tool` The tool that is checked for existence and compatible version.
|
||||
* `home` The location in the file system where Jenkins can access the tool.
|
||||
|
||||
## Step configuration
|
||||
|
||||
none
|
||||
|
||||
## Side effects
|
||||
|
||||
none
|
||||
|
||||
## Exceptions
|
||||
|
||||
* `IllegalArgumentException`:
|
||||
* If at least one of the parameters `tool`, `home` is not provided.
|
||||
* `AbortException`:
|
||||
* If `tool` is not supported.
|
||||
|
||||
## Example
|
||||
|
||||
```groovy
|
||||
toolValidate tool: 'neo', home:'/path/to/neo-java-web-sdk'
|
||||
```
|
@ -1,52 +1,15 @@
|
||||
# transportRequestCreate
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Creates
|
||||
|
||||
* a Transport Request for a Change Document on the Solution Manager (type `SOLMAN`) or
|
||||
* a Transport Request inside an ABAP system (type`CTS`)
|
||||
|
||||
The id of the transport request is availabe via [commonPipelineEnvironment.getTransportRequestId()](commonPipelineEnvironment.md)
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* **[Change Management Client 2.0.0 or compatible version](http://central.maven.org/maven2/com/sap/devops/cmclient/dist.cli/)** - available for download on Maven Central.
|
||||
* Solution Manager version `ST720 SP08` or newer.
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -----------------|-----------|--------------------------------------------------------|--------------------|
|
||||
| `script` | yes | | |
|
||||
| `changeDocumentId` | for `SOLMAN` | | |
|
||||
| `transportType` | for `CTS` | no | |
|
||||
| `targetSystem` | for `CTS` | no | |
|
||||
| `description` | for `CTS` | no | |
|
||||
| `changeManagement/credentialsId` | yes | | |
|
||||
| `changeManagement/endpoint` | yes | | |
|
||||
| `changeManagement/clientOpts` | no | | |
|
||||
| `changeManagement/git/from` | no | `origin/master` | |
|
||||
| `changeManagement/git/to` | no | `HEAD` | |
|
||||
| `changeManagement/changeDocumentLabel` | no | `ChangeDocument\s?:` | regex pattern |
|
||||
| `changeManagement/git/format` | no | `%b` | see `git log --help` |
|
||||
| `changeManagement/type` | no | `SOLMAN` | `SOLMAN`, `CTS` |
|
||||
| `developmentSystemId` | for `SOLMAN` | | |
|
||||
|
||||
* `script` - The common script environment of the Jenkinsfile running. Typically the reference to the script calling the pipeline step is provided with the `this` parameter, as in `script: this`. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving, for example, configuration parameters.
|
||||
* `changeDocumentId` - for `SOLMAN` only. The id of the change document to that the transport request is bound to. Typically this value is provided via commit message in the commit history.
|
||||
* `changeManagement/type` Where/how the transport request is created (via SAP Solution Manager, ABAP).
|
||||
* `changeManagement/credentialsId` - The credentials to connect to the service endpoint (Solution Manager, ABAP System).
|
||||
* `changeManagement/endpoint` - The service endpoint (Solution Manager, ABAP System).
|
||||
* `changeManagement/clientOpts`- Options forwarded to JVM used by the CM client, like `JAVA_OPTS`
|
||||
* `changeManagement/git/from` - The starting point for retrieving the change document id
|
||||
* `changeManagement/git/to` - The end point for retrieving the change document id
|
||||
* `changeManagement/changeDocumentLabel` - For type `SOLMAN` only. A pattern used for identifying lines holding the change document id.
|
||||
* `changeManagement/git/format` - Specifies what part of the commit is scanned. By default the body of the commit message is scanned.
|
||||
* `description` - for `CTS` only. The description of the transport request.
|
||||
* `targetSystem` - for `CTS` only. The system receiving the transport request.
|
||||
* `transportType` - for type `CTS` only. Typically `W` (workbench) or `C` customizing.
|
||||
* `developmentSystemId`- for `SOLMAN` only. The logical system id for which the transport request is created. The format is `<SID>~<TYPE>(/<CLIENT>)?`. For ABAP Systems the `developmentSystemId` looks like `DEV~ABAP/100`. For non-ABAP systems the `developmentSystemId` looks like e.g. `L21~EXT_SRV` or `J01~JAVA`. In case the system type is not known (in the examples provided here: `EXT_SRV` or `JAVA`) the information can be retrieved from the Solution Manager instance.
|
||||
## Step configuration
|
||||
## ${docGenConfiguration}
|
||||
|
||||
The step is configured using a customer configuration file provided as
|
||||
resource in an custom shared library.
|
||||
|
@ -1,41 +1,17 @@
|
||||
# transportRequestRelease
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Releases a Transport Request.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* **[Change Management Client 2.0.0 or compatible version](http://central.maven.org/maven2/com/sap/devops/cmclient/dist.cli/)** - available for download on Maven Central.
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -----------------|-----------|--------------------------------------------------------|--------------------|
|
||||
| `script` | yes | | |
|
||||
| `changeDocumentId` | `SOLMAN` only | | |
|
||||
| `transportRequestId`| yes | | |
|
||||
| `changeManagement/changeDocumentLabel` | no | `ChangeDocument\s?:` | regex pattern |
|
||||
| `changeManagment/transportRequestLabel` | no | `TransportRequest\s?:` | regex pattern |
|
||||
| `changeManagement/credentialsId` | yes | | |
|
||||
| `changeManagement/endpoint` | yes | | |
|
||||
| `changeManagement/git/from` | no | `origin/master` | |
|
||||
| `changeManagement/git/to` | no | `HEAD` | |
|
||||
| `changeManagement/git/format` | no | `%b` | see `git log --help` |
|
||||
| `changeManagement/type` | no | `SOLMAN` | `SOLMAN`, `CTS` |
|
||||
|
||||
* `script` - The common script environment of the Jenkinsfile running. Typically the reference to the script calling the pipeline step is provided with the `this` parameter, as in `script: this`. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving, for example, configuration parameters.
|
||||
* `changeDocumentId` - for `SOLMAN` only. The id of the change document related to the transport request to release.
|
||||
* `transportRequestId` - The id of the transport request to release.
|
||||
* `changeManagement/changeDocumentLabel` - for `SOLMAN` only. A pattern used for identifying lines holding the change document id.
|
||||
* `changeManagment/transportRequestLabel` - A pattern used for identifying lines holding the transport request id.
|
||||
* `changeManagement/credentialsId` - The credentials to connect to the service endpoint (Solution Manager, ABAP System).
|
||||
* `changeManagement/endpoint` - The service endpoint (Solution Manager, ABAP System).
|
||||
* `changeManagement/git/from` - The starting point for retrieving the change document id and/or transport request id
|
||||
* `changeManagement/git/to` - The end point for retrieving the change document id and/or transport request id
|
||||
* `changeManagement/git/format` - Specifies what part of the commit is scanned. By default the body of the commit message is scanned.
|
||||
## ${docGenConfiguration}
|
||||
|
||||
|
||||
## Step configuration
|
||||
|
||||
The step is configured using a customer configuration file provided as
|
||||
resource in an custom shared library.
|
||||
|
@ -1,46 +1,14 @@
|
||||
# transportRequestUploadFile
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Uploads a file to a Transport Request.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* **[Change Management Client 2.0.0 or compatible version](http://central.maven.org/maven2/com/sap/devops/cmclient/dist.cli/)** - available for download on Maven Central.
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -----------------|-----------|--------------------------------------------------------|--------------------|
|
||||
| `script` | yes | | |
|
||||
| `changeDocumentId` | `SOLMAN` only | | |
|
||||
| `transportRequestId`| yes | | |
|
||||
| `applicationId` | `SOLMAN` only | | |
|
||||
| `filePath` | yes | | |
|
||||
| `changeManagement/credentialsId` | yes | | |
|
||||
| `changeManagement/endpoint` | yes | | |
|
||||
| `changeManagement/git/from` | no | `origin/master` | |
|
||||
| `changeManagement/git/to` | no | `HEAD` | |
|
||||
| `changeManagement/changeDocumentLabel` | no | `ChangeDocument\s?:` | regex pattern |
|
||||
| `changeManagement/transportRequestLabel` | no | `TransportRequest\s?:` | regex pattern |
|
||||
| `changeManagement/git/format` | no | `%b` | see `git log --help` |
|
||||
| `changeManagement/type` | no | `SOLMAN` | `SOLMAN`, `CTS` |
|
||||
|
||||
* `script` - The common script environment of the Jenkinsfile running. Typically the reference to the script calling the pipeline step is provided with the `this` parameter, as in `script: this`. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving, for example, configuration parameters.
|
||||
* `changeDocumentId` - For type `SOLMAN` only. The id of the change document related to the transport request to release. Typically provided via commit history.
|
||||
* `transportRequestId` - The id of the transport request to release. Typically provided via commit history.
|
||||
* `applicationId` - For type `SOLMAN` only. The id of the application.
|
||||
* `filePath` - The path of the file to upload.
|
||||
* `changeManagement/credentialsId` - The credentials to connect to the service endpoint (Solution Manager, ABAP System).
|
||||
* `changeManagement/endpoint` - The service endpoint (Solution Manager, ABAP System).
|
||||
* `changeManagement/git/from` - The starting point for retrieving the change document id and/or transport request id
|
||||
* `changeManagement/git/to` - The end point for retrieving the change document id and/or transport request id
|
||||
* `changeManagement/changeDocumentLabel` - For type `SOLMAN` only. A pattern used for identifying lines holding the change document id.
|
||||
* `changeManagement/transportRequestLabel` - A pattern used for identifying lines holding the transport request id.
|
||||
* `changeManagement/type` Where/how the transport request is created (via SAP Solution Manager, ABAP).
|
||||
* `changeManagement/git/format` - Specifies what part of the commit is scanned. By default the body of the commit message is scanned.
|
||||
|
||||
## Step configuration
|
||||
## ${docGenConfiguration}
|
||||
|
||||
The step is configured using a customer configuration file provided as
|
||||
resource in an custom shared library.
|
||||
|
@ -1,19 +1,88 @@
|
||||
# uiVeri5ExecuteTests
|
||||
# ${docGenStepName}
|
||||
|
||||
## Description
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
## Parameters
|
||||
## ${docGenParameters}
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
|
||||
## Step configuration
|
||||
|
||||
Content here is generated from corresponding step, see `vars`.
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Exceptions
|
||||
|
||||
If you see an error like `fatal: Not a git repository (or any parent up to mount point /home/jenkins)` it is likely that your test description cannot be found.<br />
|
||||
Please make sure to point parameter `testOptions` to your `conf.js` file like `testOptions: './path/to/my/tests/conf.js'`
|
||||
|
||||
## Examples
|
||||
|
||||
### Passing credentials from Jenkins
|
||||
|
||||
When running acceptance tests in a real environment, authentication will be enabled in most cases. UIVeri5 includes [features to automatically perform the login](https://github.com/SAP/ui5-uiveri5/blob/master/docs/config/authentication.md) with credentials in the `conf.js`. However, having credentials to the acceptance system stored in plain text is not an optimal solution.
|
||||
|
||||
Therefore, UIVeri5 allows templating to set parameters at runtime, as shown in the following example `conf.js`:
|
||||
|
||||
```js
|
||||
// Read environment variables
|
||||
const defaultParams = {
|
||||
url: process.env.TARGET_SERVER_URL,
|
||||
user: process.env.TEST_USER,
|
||||
pass: process.env.TEST_PASS
|
||||
};
|
||||
|
||||
// Resolve path to specs relative to the working directory
|
||||
const path = require('path');
|
||||
const specs = path.relative(process.cwd(), path.join(__dirname, '*.spec.js'));
|
||||
|
||||
// export UIVeri5 config
|
||||
exports.config = {
|
||||
profile: 'integration',
|
||||
baseUrl: '\${params.url}',
|
||||
specs: specs,
|
||||
params: defaultParams, // can be overridden via cli `--params.<key>=<value>`
|
||||
auth: {
|
||||
// set up authorization for CF XSUAA
|
||||
'sapcloud-form': {
|
||||
user: '\${params.user}',
|
||||
pass: '\${params.pass}',
|
||||
userFieldSelector: 'input[name="username"]',
|
||||
passFieldSelector: 'input[name="password"]',
|
||||
logonButtonSelector: 'input[type="submit"]',
|
||||
redirectUrl: /cp.portal\/site/
|
||||
}
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
While default values for `baseUrl`, `user` and `pass` are read from the environment, they can also be overridden when calling the CLI.
|
||||
|
||||
In a custom Pipeline, this is very simple: Just wrap the call to `uiVeri5ExecuteTests` in `withCredentials` (`TARGET_SERVER_URL` is read from `config.yml`):
|
||||
|
||||
```groovy
|
||||
withCredentials([usernamePassword(
|
||||
credentialsId: 'MY_ACCEPTANCE_CREDENTIALS',
|
||||
passwordVariable: 'password',
|
||||
usernameVariable: 'username'
|
||||
)]) {
|
||||
uiVeri5ExecuteTests script: this, testOptions: "./uiveri5/conf.js --params.user=\${username} --params.pass=\${password}"
|
||||
}
|
||||
```
|
||||
|
||||
In a Pipeline Template, a [Stage Exit](#) can be used to fetch the credentials and store them in the environment. As the environment is passed down to uiVeri5ExecuteTests, the variables will be present there. This is an example for the stage exit `.pipeline/extensions/Acceptance.groovy` where the `credentialsId` is read from the `config.yml`:
|
||||
|
||||
```groovy
|
||||
void call(Map params) {
|
||||
// read username and password from the credential store
|
||||
withCredentials([usernamePassword(
|
||||
credentialsId: params.config.acceptanceCredentialsId,
|
||||
passwordVariable: 'password',
|
||||
usernameVariable: 'username'
|
||||
)]) {
|
||||
// store the result in the environment variables for executeUIVeri5Test
|
||||
withEnv(["TEST_USER=\${username}", "TEST_PASS=\${password}"]) {
|
||||
//execute original stage as defined in the template
|
||||
params.originalStage()
|
||||
}
|
||||
}
|
||||
}
|
||||
return this
|
||||
```
|
||||
|
23
documentation/docs/steps/whitesourceExecuteScan.md
Normal file
23
documentation/docs/steps/whitesourceExecuteScan.md
Normal file
@ -0,0 +1,23 @@
|
||||
# ${docGenStepName}
|
||||
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Your company has registered an account with WhiteSource and you have enabled the use of so called `User Keys` to manage
|
||||
access to your organization in WhiteSource via dedicated privileges. Scanning your products without adequate user level
|
||||
access protection imposed on the WhiteSource backend would simply allow access based on the organization token.
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## Exceptions
|
||||
|
||||
None
|
||||
|
||||
## Examples
|
||||
|
||||
```groovy
|
||||
whitesourceExecuteScan script: this, scanType: 'pip', productName: 'My Whitesource Product', userTokenCredentialsId: 'companyAdminToken', orgAdminUserTokenCredentialsId: 'orgAdminToken', orgToken: 'myWhitesourceOrganizationToken'
|
||||
```
|
@ -32,12 +32,13 @@ nav:
|
||||
- seleniumExecuteTests: steps/seleniumExecuteTests.md
|
||||
- setupCommonPipelineEnvironment: steps/setupCommonPipelineEnvironment.md
|
||||
- slackSendNotification: steps/slackSendNotification.md
|
||||
- snykExecute: steps/snykExecute.md
|
||||
- testsPublishResults: steps/testsPublishResults.md
|
||||
- toolValidate: steps/toolValidate.md
|
||||
- transportRequestCreate: steps/transportRequestCreate.md
|
||||
- transportRequestRelease: steps/transportRequestRelease.md
|
||||
- transportRequestUploadFile: steps/transportRequestUploadFile.md
|
||||
- uiVeri5ExecuteTests: steps/uiVeri5ExecuteTests.md
|
||||
- whitesourceExecuteScan: steps/whitesourceExecuteScan.md
|
||||
- 'Scenarios':
|
||||
- 'Build and Deploy Hybrid Applications with Jenkins and SAP Solution Manager': scenarios/changeManagement.md
|
||||
- 'Build and Deploy SAP UI5 or SAP Fiori Applications on SAP Cloud Platform with Jenkins': scenarios/ui5-sap-cp/Readme.md
|
||||
@ -64,6 +65,5 @@ markdown_extensions:
|
||||
extra_css:
|
||||
- 'css/extra.css'
|
||||
edit_uri: edit/master/documentation/docs
|
||||
docs_dir: docs-tmp
|
||||
site_dir: docs-gen
|
||||
repo_url: https://github.com/SAP/jenkins-library
|
||||
|
@ -1,16 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
PRIVATE_KEY="cfg/id_rsa"
|
||||
|
||||
chmod 600 "${PRIVATE_KEY}"
|
||||
eval `ssh-agent -s`
|
||||
ssh-add "${PRIVATE_KEY}"
|
||||
mkdir ~/.ssh
|
||||
chmod 700 ~/.ssh
|
||||
ssh-keyscan github.com >> ~/.ssh/known_hosts
|
||||
git config user.name "Travis CI Publisher"
|
||||
git remote add docu "git@github.com:$TRAVIS_REPO_SLUG.git";
|
||||
git fetch docu gh-pages:gh-pages
|
||||
echo "Pushing to gh-pages of repository $TRAVIS_REPO_SLUG"
|
||||
cd documentation
|
||||
mkdocs gh-deploy -v --clean --remote-name docu
|
2
pom.xml
2
pom.xml
@ -10,7 +10,7 @@
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>com.sap.cp.jenkins</groupId>
|
||||
<artifactId>jenkins-library</artifactId>
|
||||
<version>0.9</version>
|
||||
<version>0.10</version>
|
||||
|
||||
<name>SAP CP Piper Library</name>
|
||||
<description>Shared library containing steps and utilities to set up continuous deployment processes for SAP technologies.</description>
|
||||
|
@ -13,9 +13,9 @@ ${error}
|
||||
***
|
||||
|
||||
Further information:
|
||||
* Documentation of library step ${stepName}: https://sap.github.io/jenkins-library/steps/${stepName}/
|
||||
* Source code of library step ${stepName}: https://github.com/SAP/jenkins-library/blob/master/vars/${stepName}.groovy
|
||||
* Library documentation: https://sap.github.io/jenkins-library/
|
||||
* Library repository: https://github.com/SAP/jenkins-library
|
||||
* Documentation of library step ${stepName}: ${libraryDocumentationUrl}steps/${stepName}/
|
||||
* Source code of library step ${stepName}: ${libraryRepositoryUrl}blob/master/vars/${stepName}.groovy
|
||||
* Library documentation: ${libraryDocumentationUrl}
|
||||
* Library repository: ${libraryRepositoryUrl}
|
||||
|
||||
----------------------------------------------------------
|
||||
|
@ -0,0 +1,41 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>${reportTitle}</title>
|
||||
<style type="text/css">${style}</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>${reportTitle}</h1>
|
||||
<h2>
|
||||
<span>
|
||||
<i>WhiteSource product name: ${whitesourceProductName}</i><br />
|
||||
<i>Filtered project names: ${whitesourceProjectNames?:''}</i>
|
||||
</span>
|
||||
</h2>
|
||||
<div>
|
||||
<h3> total number of vulnerabilities: ${totalVulnerabilities}<br />
|
||||
total number of high/critical vulnerabilities with CVSS score >= ${cvssSeverityLimit}: ${totalSevereVulnerabilities}
|
||||
</h3>
|
||||
</div>
|
||||
<p>Snapshot taken:${now}</p>
|
||||
<table>
|
||||
<tr>
|
||||
<th>Entry #</th>
|
||||
<th>Date</th>
|
||||
<th>CVE</th>
|
||||
<th>CVSS Score</th>
|
||||
<th>CVSS Version</th>
|
||||
<th>Project</th>
|
||||
<th>Library file name</th>
|
||||
<th>Library group ID</th>
|
||||
<th>Library artifact ID</th>
|
||||
<th>Library version</th>
|
||||
<th>Description</th>
|
||||
<th>Top fix</th>
|
||||
</tr>
|
||||
|
||||
${vulnerabilityTable}
|
||||
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
@ -16,13 +16,37 @@ general:
|
||||
from: 'origin/master'
|
||||
to: 'HEAD'
|
||||
format: '%b'
|
||||
solman:
|
||||
docker:
|
||||
image: 'ppiper/cm-client'
|
||||
options: []
|
||||
envVars: {}
|
||||
pullImage: true
|
||||
cts:
|
||||
docker:
|
||||
image: 'ppiper/cm-client'
|
||||
options: []
|
||||
envVars: {}
|
||||
pullImage: true
|
||||
rfc:
|
||||
docker:
|
||||
image: 'rfc'
|
||||
options: []
|
||||
envVars: {}
|
||||
pullImage: true
|
||||
githubApiUrl: 'https://api.github.com'
|
||||
githubServerUrl: 'https://github.com'
|
||||
gitSshKeyCredentialsId: '' #needed to allow sshagent to run with local ssh key
|
||||
jenkinsKubernetes:
|
||||
jnlpAgent: 's4sdk/jenkins-agent-k8s:latest'
|
||||
securityContext:
|
||||
# Setting security context globally is currently not working with jaas
|
||||
# runAsUser: 1000
|
||||
# fsGroup: 1000
|
||||
manualConfirmation: true
|
||||
productiveBranch: 'master'
|
||||
whitesource:
|
||||
serviceUrl: 'https://saas.whitesourcesoftware.com/api'
|
||||
|
||||
#Steps Specific Configuration
|
||||
steps:
|
||||
@ -197,6 +221,13 @@ steps:
|
||||
languageRunner: 'js'
|
||||
runCommand: 'gauge run'
|
||||
testOptions: 'specs'
|
||||
handlePipelineStepErrors:
|
||||
echoDetails: true
|
||||
failOnError: true
|
||||
libraryDocumentationUrl: 'https://sap.github.io/jenkins-library/'
|
||||
libraryRepositoryUrl: 'https://github.com/SAP/jenkins-library/'
|
||||
mandatorySteps: []
|
||||
stepTimeouts: {}
|
||||
healthExecuteCheck:
|
||||
healthEndpoint: ''
|
||||
influxWriteData:
|
||||
@ -254,11 +285,77 @@ steps:
|
||||
- 'tests'
|
||||
npmExecute:
|
||||
dockerImage: 'node:8-stretch'
|
||||
whitesourceExecuteScan:
|
||||
createProductFromPipeline: true
|
||||
emailAddressesOfInitialProductAdmins: []
|
||||
buildDescriptorExcludeList: []
|
||||
parallelLimit: 15
|
||||
licensingVulnerabilities: true
|
||||
securityVulnerabilities: true
|
||||
cvssSeverityLimit: -1
|
||||
reporting: true
|
||||
vulnerabilityReportFileName: 'piper_whitesource_vulnerability_report'
|
||||
vulnerabilityReportTitle: 'WhiteSource Security Vulnerability Report'
|
||||
projectNames: []
|
||||
jreDownloadUrl: 'https://github.com/SAP/SapMachine/releases/download/sapmachine-11.0.2/sapmachine-jre-11.0.2_linux-x64_bin.tar.gz'
|
||||
agentFileName: 'wss-unified-agent.jar'
|
||||
agentDownloadUrl: 'https://github.com/whitesource/unified-agent-distribution/raw/master/standAlone/${config.agentFileName}'
|
||||
agentParameters: ''
|
||||
configFilePath: './wss-unified-agent.config'
|
||||
mta:
|
||||
stashContent:
|
||||
- 'buildDescriptor'
|
||||
- 'opensourceConfiguration'
|
||||
maven:
|
||||
buildDescriptorFile: './pom.xml'
|
||||
dockerImage: 'maven:3.5-jdk-8'
|
||||
dockerWorkspace: '/home/java'
|
||||
stashContent:
|
||||
- 'buildDescriptor'
|
||||
- 'opensourceConfiguration'
|
||||
npm:
|
||||
buildDescriptorFile: './package.json'
|
||||
dockerImage: 'node:8-stretch'
|
||||
dockerWorkspace: '/home/node'
|
||||
stashContent:
|
||||
- 'buildDescriptor'
|
||||
- 'opensourceConfiguration'
|
||||
pip:
|
||||
buildDescriptorFile: './setup.py'
|
||||
dockerImage: 'python:3.7.2-stretch'
|
||||
dockerWorkspace: '/home/python'
|
||||
stashContent:
|
||||
- 'buildDescriptor'
|
||||
- 'opensourceConfiguration'
|
||||
golang:
|
||||
buildDescriptorFile: './Gopkg.toml'
|
||||
dockerImage: 'golang:1.12-stretch'
|
||||
dockerWorkspace: '/home/dep'
|
||||
stashContent:
|
||||
- 'buildDescriptor'
|
||||
- 'opensourceConfiguration'
|
||||
additionalInstallCommand: >-
|
||||
curl --fail https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
|
||||
&& mkdir -p \$GOPATH/src/${config.whitesource.projectName.substring(0, config.whitesource.projectName.lastIndexOf('/'))}
|
||||
&& ln -s \$(pwd) \$GOPATH/src/${config.whitesource.projectName}
|
||||
&& cd \$GOPATH/src/${config.whitesource.projectName} && dep ensure
|
||||
sbt:
|
||||
buildDescriptorFile: './build.sbt'
|
||||
dockerImage: 'hseeberger/scala-sbt:8u181_2.12.8_1.2.8'
|
||||
dockerWorkspace: '/home/scala'
|
||||
stashContent:
|
||||
- 'buildDescriptor'
|
||||
- 'opensourceConfiguration'
|
||||
verbose: false
|
||||
timeout: 0
|
||||
pipelineExecute:
|
||||
branch: 'master'
|
||||
path: 'Jenkinsfile'
|
||||
credentialsId: ''
|
||||
pipelineRestartSteps:
|
||||
sendMail: true
|
||||
timeoutInSeconds: 900
|
||||
pipelineStashFilesAfterBuild:
|
||||
runOpaTests: false
|
||||
stashIncludes:
|
||||
checkmarx: '**/*.js, **/*.scala, **/*.py, **/*.go, **/*.xml, **/*.html'
|
||||
classFiles: '**/target/classes/**/*.class, **/target/test-classes/**/*.class'
|
||||
@ -267,14 +364,14 @@ steps:
|
||||
checkmarx: '**/*.mockserver.js, node_modules/**/*.js'
|
||||
classFiles: ''
|
||||
sonar: ''
|
||||
noDefaultExludes: []
|
||||
pipelineStashFilesBeforeBuild:
|
||||
runCheckmarx: false
|
||||
stashIncludes:
|
||||
buildDescriptor: '**/pom.xml, **/.mvn/**, **/assembly.xml, **/.swagger-codegen-ignore, **/package.json, **/requirements.txt, **/setup.py, **/whitesource_config.py, **/mta*.y*ml, **/.npmrc, **/whitesource.*.json, **/whitesource-fs-agent.config, Dockerfile, **/VERSION, **/version.txt, **/build.sbt, **/sbtDescriptor.json, **/project/*'
|
||||
buildDescriptor: '**/pom.xml, **/.mvn/**, **/assembly.xml, **/.swagger-codegen-ignore, **/package.json, **/requirements.txt, **/setup.py, **/mta*.y*ml, **/.npmrc, Dockerfile, **/VERSION, **/version.txt, **/Gopkg.*, **/build.sbt, **/sbtDescriptor.json, **/project/*'
|
||||
deployDescriptor: '**/manifest*.y*ml, **/*.mtaext.y*ml, **/*.mtaext, **/xs-app.json, helm/**, *.y*ml'
|
||||
git: '**/gitmetadata/**'
|
||||
git: '.git/**'
|
||||
opa5: '**/*.*'
|
||||
opensourceConfiguration: '**/srcclr.yml, **/vulas-custom.properties, **/.nsprc, **/.retireignore, **/.retireignore.json, **/.snyk'
|
||||
opensourceConfiguration: '**/srcclr.yml, **/vulas-custom.properties, **/.nsprc, **/.retireignore, **/.retireignore.json, **/.snyk, **/wss-unified-agent.config, **/vendor/**/*'
|
||||
pipelineConfigAndTests: '.pipeline/**'
|
||||
securityDescriptor: '**/xs-security.json'
|
||||
tests: '**/pom.xml, **/*.json, **/*.xml, **/src/**, **/node_modules/**, **/specs/**, **/env/**, **/*.js, **/tests/**'
|
||||
@ -287,6 +384,8 @@ steps:
|
||||
pipelineConfigAndTests: ''
|
||||
securityDescriptor: ''
|
||||
tests: ''
|
||||
noDefaultExludes:
|
||||
- 'git'
|
||||
seleniumExecuteTests:
|
||||
buildTool: 'npm'
|
||||
containerPortMappings:
|
||||
@ -363,8 +462,14 @@ steps:
|
||||
failIfStatusIsNotInDevelopment: true
|
||||
transportRequestCreate:
|
||||
developmentSystemId: null
|
||||
verbose: false
|
||||
transportRequestUploadFile:
|
||||
acceptUnixStyleLineEndings: true
|
||||
codePage: 'UTF-8'
|
||||
failOnWarning: true
|
||||
verbose: false
|
||||
transportRequestRelease:
|
||||
verbose: false
|
||||
uiVeri5ExecuteTests:
|
||||
failOnError: false
|
||||
dockerEnvVars: {}
|
||||
|
60
resources/piper-os.css
Normal file
60
resources/piper-os.css
Normal file
@ -0,0 +1,60 @@
|
||||
body {
|
||||
font-family: Arial, Verdana;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
div.code {
|
||||
font-family: "Courier New", "Lucida Console";
|
||||
}
|
||||
th {
|
||||
border-top: 1px solid #ddd;
|
||||
}
|
||||
th, td {
|
||||
padding: 12px;
|
||||
text-align: left;
|
||||
border-bottom: 1px solid #ddd;
|
||||
border-right: 1px solid #ddd;
|
||||
}
|
||||
tr:nth-child(even) {
|
||||
background-color: #f2f2f2;
|
||||
}
|
||||
.bold {
|
||||
font-weight: bold;
|
||||
}
|
||||
.nobullets {
|
||||
list-style-type:none;
|
||||
padding-left: 0;
|
||||
padding-bottom: 0;
|
||||
margin: 0;
|
||||
}
|
||||
.notok {
|
||||
background-color: #ffe5e5;
|
||||
padding: 5px
|
||||
}
|
||||
.warn {
|
||||
background-color: #ffff99;
|
||||
padding: 5px
|
||||
}
|
||||
.ok {
|
||||
background-color: #e1f5a9;
|
||||
padding: 5px
|
||||
}
|
||||
.green{
|
||||
color: olivedrab;
|
||||
}
|
||||
.red{
|
||||
color: orangered;
|
||||
}
|
||||
.risk-yellow{
|
||||
padding: 5px;
|
||||
color: rgba(255, 255, 0, 0.6);
|
||||
}
|
||||
.risk-grey{
|
||||
background-color: rgba(212, 212, 212, 0.7);
|
||||
padding: 5px;
|
||||
}
|
||||
.risk-black{
|
||||
background-color: rgba(0, 0, 0, 0.75);
|
||||
padding: 5px;
|
||||
}
|
5
src/com/sap/piper/CloudPlatform.groovy
Normal file
5
src/com/sap/piper/CloudPlatform.groovy
Normal file
@ -0,0 +1,5 @@
|
||||
package com.sap.piper
|
||||
|
||||
enum CloudPlatform {
|
||||
NEO, CLOUD_FOUNDRY
|
||||
}
|
@ -5,14 +5,17 @@ import com.cloudbees.groovy.cps.NonCPS
|
||||
@API
|
||||
class ConfigurationHelper implements Serializable {
|
||||
|
||||
def static SEPARATOR = '/'
|
||||
|
||||
static ConfigurationHelper newInstance(Script step, Map config = [:]) {
|
||||
new ConfigurationHelper(step, config)
|
||||
}
|
||||
|
||||
ConfigurationHelper loadStepDefaults() {
|
||||
ConfigurationHelper loadStepDefaults(Map compatibleParameters = [:]) {
|
||||
this.step.prepareDefaultValues()
|
||||
this.config = ConfigurationLoader.defaultGeneralConfiguration()
|
||||
mixin(ConfigurationLoader.defaultStepConfiguration(null, name))
|
||||
mixin(ConfigurationLoader.defaultGeneralConfiguration(), null, compatibleParameters)
|
||||
mixin(ConfigurationLoader.defaultStepConfiguration(null, name), null, compatibleParameters)
|
||||
}
|
||||
|
||||
private Map config
|
||||
@ -33,8 +36,8 @@ class ConfigurationHelper implements Serializable {
|
||||
}
|
||||
|
||||
ConfigurationHelper mixinGeneralConfig(commonPipelineEnvironment, Set filter = null, Map compatibleParameters = [:]){
|
||||
Map stepConfiguration = ConfigurationLoader.generalConfiguration([commonPipelineEnvironment: commonPipelineEnvironment])
|
||||
return mixin(stepConfiguration, filter, compatibleParameters)
|
||||
Map generalConfiguration = ConfigurationLoader.generalConfiguration([commonPipelineEnvironment: commonPipelineEnvironment])
|
||||
return mixin(generalConfiguration, filter, compatibleParameters)
|
||||
}
|
||||
|
||||
ConfigurationHelper mixinStageConfig(commonPipelineEnvironment, stageName, Set filter = null, Map compatibleParameters = [:]){
|
||||
@ -58,21 +61,24 @@ class ConfigurationHelper implements Serializable {
|
||||
return this
|
||||
}
|
||||
|
||||
private Map handleCompatibility(Map compatibleParameters, String paramStructure = '', Map configMap ) {
|
||||
private Map handleCompatibility(Map compatibleParameters, String paramStructure = '', Map configMap, Map newConfigMap = [:] ) {
|
||||
Map newConfig = [:]
|
||||
compatibleParameters.each {entry ->
|
||||
if (entry.getValue() instanceof Map) {
|
||||
paramStructure = (paramStructure ? paramStructure + '.' : '') + entry.getKey()
|
||||
newConfig[entry.getKey()] = handleCompatibility(entry.getValue(), paramStructure, configMap)
|
||||
def internalParamStructure = (paramStructure ? paramStructure + '.' : '') + entry.getKey()
|
||||
newConfig[entry.getKey()] = handleCompatibility(entry.getValue(), internalParamStructure, configMap, newConfig)
|
||||
} else {
|
||||
def configSubMap = configMap
|
||||
for(String key in paramStructure.tokenize('.')){
|
||||
configSubMap = configSubMap?.get(key)
|
||||
}
|
||||
if (configSubMap == null || (configSubMap != null && configSubMap[entry.getKey()] == null)) {
|
||||
newConfig[entry.getKey()] = configMap[entry.getValue()]
|
||||
def paramName = (paramStructure ? paramStructure + '.' : '') + entry.getKey()
|
||||
if (configMap[entry.getValue()] != null) {
|
||||
def value = configMap[entry.getValue()]
|
||||
if(null == value)
|
||||
value = newConfigMap[entry.getValue()]
|
||||
if (value != null) {
|
||||
newConfig[entry.getKey()] = value
|
||||
def paramName = (paramStructure ? paramStructure + '.' : '') + entry.getKey()
|
||||
this.step.echo ("[INFO] The parameter '${entry.getValue()}' is COMPATIBLE to the parameter '${paramName}'")
|
||||
}
|
||||
}
|
||||
@ -84,9 +90,16 @@ class ConfigurationHelper implements Serializable {
|
||||
Map dependingOn(dependentKey){
|
||||
return [
|
||||
mixin: {key ->
|
||||
def parts = tokenizeKey(key)
|
||||
def targetMap = config
|
||||
if(parts.size() > 1) {
|
||||
key = parts.last()
|
||||
parts.remove(key)
|
||||
targetMap = getConfigPropertyNested(config, (parts as Iterable).join(SEPARATOR))
|
||||
}
|
||||
def dependentValue = config[dependentKey]
|
||||
if(config[key] == null && dependentValue && config[dependentValue])
|
||||
config[key] = config[dependentValue][key]
|
||||
if(targetMap[key] == null && dependentValue && config[dependentValue])
|
||||
targetMap[key] = config[dependentValue][key]
|
||||
return this
|
||||
}
|
||||
]
|
||||
@ -123,26 +136,28 @@ class ConfigurationHelper implements Serializable {
|
||||
|
||||
/* private */ static getConfigPropertyNested(Map config, key) {
|
||||
|
||||
def separator = '/'
|
||||
List parts = tokenizeKey(key)
|
||||
|
||||
// reason for cast to CharSequence: String#tokenize(./.) causes a deprecation warning.
|
||||
List parts = (key in String) ? (key as CharSequence).tokenize(separator) : ([key] as List)
|
||||
if (config[parts.head()] != null) {
|
||||
|
||||
if(config[parts.head()] != null) {
|
||||
|
||||
if(config[parts.head()] in Map && ! parts.tail().isEmpty()) {
|
||||
return getConfigPropertyNested(config[parts.head()], (parts.tail() as Iterable).join(separator))
|
||||
if (config[parts.head()] in Map && !parts.tail().isEmpty()) {
|
||||
return getConfigPropertyNested(config[parts.head()], (parts.tail() as Iterable).join(SEPARATOR))
|
||||
}
|
||||
|
||||
if (config[parts.head()].class == String) {
|
||||
return (config[parts.head()] as String).trim()
|
||||
}
|
||||
}
|
||||
|
||||
return config[parts.head()]
|
||||
}
|
||||
|
||||
private void existsMandatoryProperty(key, errorMessage) {
|
||||
/* private */ static tokenizeKey(String key) {
|
||||
// reason for cast to CharSequence: String#tokenize(./.) causes a deprecation warning.
|
||||
List parts = (key in String) ? (key as CharSequence).tokenize(SEPARATOR) : ([key] as List)
|
||||
return parts
|
||||
}
|
||||
|
||||
private void existsMandatoryProperty(key, errorMessage) {
|
||||
|
||||
def paramValue = getConfigPropertyNested(config, key)
|
||||
|
||||
|
@ -26,7 +26,11 @@ class ConfigurationLoader implements Serializable {
|
||||
|
||||
@NonCPS
|
||||
static Map generalConfiguration(script){
|
||||
return script?.commonPipelineEnvironment?.configuration?.general ?: [:]
|
||||
try {
|
||||
return script?.commonPipelineEnvironment?.configuration?.general ?: [:]
|
||||
} catch (groovy.lang.MissingPropertyException mpe) {
|
||||
return [:]
|
||||
}
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
@ -43,7 +47,12 @@ class ConfigurationLoader implements Serializable {
|
||||
private static Map loadConfiguration(script, String type, String entryName, ConfigurationType configType){
|
||||
switch (configType) {
|
||||
case ConfigurationType.CUSTOM_CONFIGURATION:
|
||||
return script?.commonPipelineEnvironment?.configuration?.get(type)?.get(entryName) ?: [:]
|
||||
try {
|
||||
return script?.commonPipelineEnvironment?.configuration?.get(type)?.get(entryName) ?: [:]
|
||||
} catch (groovy.lang.MissingPropertyException mpe) {
|
||||
return [:]
|
||||
}
|
||||
|
||||
case ConfigurationType.DEFAULT_CONFIGURATION:
|
||||
return DefaultValueCache.getInstance()?.getDefaultValues()?.get(type)?.get(entryName) ?: [:]
|
||||
default:
|
||||
|
34
src/com/sap/piper/DeploymentType.groovy
Normal file
34
src/com/sap/piper/DeploymentType.groovy
Normal file
@ -0,0 +1,34 @@
|
||||
package com.sap.piper
|
||||
|
||||
enum DeploymentType {
|
||||
|
||||
NEO_ROLLING_UPDATE('rolling-update'), CF_BLUE_GREEN('blue-green'), CF_STANDARD('standard'), NEO_DEPLOY('deploy')
|
||||
|
||||
private String value
|
||||
|
||||
public DeploymentType(String value){
|
||||
this.value = value
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(){
|
||||
return value
|
||||
}
|
||||
|
||||
static DeploymentType selectFor(CloudPlatform cloudPlatform, boolean enableZeroDowntimeDeployment) {
|
||||
|
||||
switch (cloudPlatform) {
|
||||
|
||||
case CloudPlatform.NEO:
|
||||
if (enableZeroDowntimeDeployment) return NEO_ROLLING_UPDATE
|
||||
return NEO_DEPLOY
|
||||
|
||||
case CloudPlatform.CLOUD_FOUNDRY:
|
||||
if (enableZeroDowntimeDeployment) return CF_BLUE_GREEN
|
||||
return CF_STANDARD
|
||||
|
||||
default:
|
||||
throw new RuntimeException("Unknown cloud platform: ${cloudPlatform}")
|
||||
}
|
||||
}
|
||||
}
|
128
src/com/sap/piper/DescriptorUtils.groovy
Normal file
128
src/com/sap/piper/DescriptorUtils.groovy
Normal file
@ -0,0 +1,128 @@
|
||||
package com.sap.piper
|
||||
|
||||
import com.cloudbees.groovy.cps.NonCPS
|
||||
import groovy.transform.Field
|
||||
|
||||
import java.util.regex.Matcher
|
||||
import java.util.regex.Pattern
|
||||
|
||||
@Field
|
||||
def name = Pattern.compile("(.*)name=['\"](.*?)['\"](.*)", Pattern.DOTALL)
|
||||
@Field
|
||||
def version = Pattern.compile("(.*)version=['\"](.*?)['\"](.*)", Pattern.DOTALL)
|
||||
@Field
|
||||
def method = Pattern.compile("(.*)\\(\\)", Pattern.DOTALL)
|
||||
|
||||
def getMavenGAV(file = 'pom.xml') {
|
||||
def result = [:]
|
||||
def descriptor = readMavenPom(file: file)
|
||||
def group = descriptor.getGroupId()
|
||||
def artifact = descriptor.getArtifactId()
|
||||
def version = descriptor.getVersion()
|
||||
result['packaging'] = descriptor.getPackaging()
|
||||
result['group'] = (null != group && group.length() > 0) ? group : sh(returnStdout: true, script: "mvn -f ${file} help:evaluate -Dexpression=project.groupId | grep -Ev '(^\\s*\\[|Download|Java\\w+:)'").trim()
|
||||
result['artifact'] = (null != artifact && artifact.length() > 0) ? artifact : sh(returnStdout: true, script: "mvn -f ${file} help:evaluate -Dexpression=project.artifactId | grep -Ev '(^\\s*\\[|Download|Java\\w+:)'").trim()
|
||||
result['version'] = (null != version && version.length() > 0) ? version : sh(returnStdout: true, script: "mvn -f ${file} help:evaluate -Dexpression=project.version | grep ^[0-9].*").trim()
|
||||
echo "loaded ${result} from ${file}"
|
||||
return result
|
||||
}
|
||||
|
||||
def getNpmGAV(file = 'package.json') {
|
||||
def result = [:]
|
||||
def descriptor = readJSON(file: file)
|
||||
|
||||
if (descriptor.name.startsWith('@')) {
|
||||
def packageNameArray = descriptor.name.split('/')
|
||||
if (packageNameArray.length != 2)
|
||||
error "Unable to parse package name '${descriptor.name}'"
|
||||
result['group'] = packageNameArray[0]
|
||||
result['artifact'] = packageNameArray[1]
|
||||
} else {
|
||||
result['group'] = ''
|
||||
result['artifact'] = descriptor.name
|
||||
}
|
||||
result['version'] = descriptor.version
|
||||
echo "loaded ${result} from ${file}"
|
||||
return result
|
||||
}
|
||||
|
||||
def getDlangGAV(file = 'dub.json') {
|
||||
def result = [:]
|
||||
def descriptor = readJSON(file: file)
|
||||
|
||||
result['group'] = 'com.sap.dlang'
|
||||
result['artifact'] = descriptor.name
|
||||
result['version'] = descriptor.version
|
||||
result['packaging'] = 'tar.gz'
|
||||
echo "loaded ${result} from ${file}"
|
||||
return result
|
||||
}
|
||||
|
||||
def getSbtGAV(file = 'sbtDescriptor.json') {
|
||||
def result = [:]
|
||||
def descriptor = readJSON(file: file)
|
||||
|
||||
result['group'] = descriptor.group
|
||||
result['artifact'] = descriptor.artifactId
|
||||
result['version'] = descriptor.version
|
||||
result['packaging'] = descriptor.packaging
|
||||
echo "loaded ${result} from ${file}"
|
||||
return result
|
||||
}
|
||||
|
||||
def getPipGAV(file = 'setup.py') {
|
||||
def result = [:]
|
||||
def descriptor = readFile(file: file)
|
||||
|
||||
result['group'] = ''
|
||||
result['packaging'] = ''
|
||||
result['artifact'] = matches(name, descriptor)
|
||||
result['version'] = matches(version, descriptor)
|
||||
|
||||
if (result['version'] == '' || matches(method, result['version'])) {
|
||||
file = file.replace('setup.py', 'version.txt')
|
||||
result['version'] = getVersionFromFile(file)
|
||||
}
|
||||
|
||||
echo "loaded ${result} from ${file}"
|
||||
return result
|
||||
}
|
||||
|
||||
def getGoGAV(file = 'Gopkg.toml', URI repoUrl) {
|
||||
def name = "${repoUrl.getHost()}${repoUrl.getPath().replaceAll(/\.git/, '')}"
|
||||
def path = file.substring(0, file.lastIndexOf('/') + 1)
|
||||
def module = path?.replaceAll(/\./, '')?.replaceAll('/', '')
|
||||
def result = [:]
|
||||
|
||||
result['group'] = ''
|
||||
result['packaging'] = ''
|
||||
result['artifact'] = "${name}${module?'.':''}${module?:''}".toString()
|
||||
file = path + 'version.txt'
|
||||
result['version'] = getVersionFromFile(file)
|
||||
|
||||
if (!result['version']) {
|
||||
file = path + 'VERSION'
|
||||
result['version'] = getVersionFromFile(file)
|
||||
}
|
||||
|
||||
echo "loaded ${result} from ${file}"
|
||||
return result
|
||||
}
|
||||
|
||||
private getVersionFromFile(file) {
|
||||
try {
|
||||
def versionString = readFile(file: file)
|
||||
if (versionString) {
|
||||
return versionString.trim()
|
||||
}
|
||||
} catch (java.nio.file.NoSuchFileException e) {
|
||||
echo "Failed to load version string from file ${file} due to ${e}"
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
private def matches(regex, input) {
|
||||
def m = new Matcher(regex, input)
|
||||
return m.matches() ? m.group(2) : ''
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
package com.sap.piper
|
||||
|
||||
import hudson.AbortException
|
||||
|
||||
|
||||
class EnvironmentUtils implements Serializable {
|
||||
|
||||
static boolean isEnvironmentVariable(script, variable) {
|
||||
return !getEnvironmentVariable(script, variable).isEmpty()
|
||||
}
|
||||
|
||||
static String getEnvironmentVariable(script, variable) {
|
||||
try {
|
||||
def envVar = script.sh returnStdout: true, script: """#!/bin/bash --login
|
||||
echo \$$variable"""
|
||||
return envVar.trim()
|
||||
} catch(AbortException e) {
|
||||
throw new AbortException("There was an error requesting the environment variable '$variable'. Reason: $e.message.")
|
||||
}
|
||||
}
|
||||
}
|
@ -1,91 +0,0 @@
|
||||
package com.sap.piper
|
||||
|
||||
import hudson.AbortException
|
||||
|
||||
class FileUtils implements Serializable {
|
||||
|
||||
static boolean directoryOrFileExists(script, dirOrFile) {
|
||||
if (!dirOrFile) throw new IllegalArgumentException("The parameter 'dirOrFile' can not be null or empty.")
|
||||
def returnStatus = script.sh returnStatus: true, script: """
|
||||
set +x
|
||||
if [ -d $dirOrFile ]; then
|
||||
echo \"$dirOrFile exists.\"
|
||||
exit 0
|
||||
elif [ -f $dirOrFile ]; then
|
||||
echo \"$dirOrFile exists.\"
|
||||
exit 0
|
||||
else
|
||||
echo \"$dirOrFile does not exist.\"
|
||||
exit 1
|
||||
fi
|
||||
"""
|
||||
return returnStatus == 0
|
||||
}
|
||||
|
||||
static boolean isDirectory(script, dir) {
|
||||
if (!dir) throw new IllegalArgumentException("The parameter 'dir' can not be null or empty.")
|
||||
def returnStatus = script.sh returnStatus: true, script: """
|
||||
set +x
|
||||
if [ -d $dir ]; then
|
||||
echo \"$dir is a directory.\"
|
||||
exit 0
|
||||
else
|
||||
echo \"$dir is not a directory.\"
|
||||
exit 1
|
||||
fi
|
||||
"""
|
||||
return returnStatus == 0
|
||||
}
|
||||
|
||||
static boolean isDirectoryEmpty(script, dir) {
|
||||
if (!dir) throw new IllegalArgumentException("The parameter 'dir' can not be null or empty.")
|
||||
def returnStatus = script.sh returnStatus: true, script: """
|
||||
set +x
|
||||
if [ -z "\$(ls -A $dir)" ]; then
|
||||
echo "$dir is empty."
|
||||
exit 1
|
||||
else
|
||||
echo "$dir is not empty."
|
||||
exit 0
|
||||
fi
|
||||
"""
|
||||
return returnStatus == 1
|
||||
}
|
||||
|
||||
static boolean isFile(script, filePath) {
|
||||
if (!filePath) throw new IllegalArgumentException("The parameter 'filePath' can not be null or empty.")
|
||||
def returnStatus = script.sh returnStatus: true, script: """
|
||||
set +x
|
||||
if [ -f $filePath ]; then
|
||||
echo \"$filePath is a file.\"
|
||||
exit 0
|
||||
else
|
||||
echo \"$filePath is not a file.\"
|
||||
exit 1
|
||||
fi
|
||||
"""
|
||||
return returnStatus == 0
|
||||
}
|
||||
|
||||
static validateDirectoryOrFileExists(script, dirOrFile) {
|
||||
if (!dirOrFile) throw new IllegalArgumentException("The parameter 'dirOrFile' can not be null or empty.")
|
||||
if (!directoryOrFileExists(script, dirOrFile)) throw new AbortException("Validation failed. '$dirOrFile' does not exist.")
|
||||
}
|
||||
|
||||
static validateDirectory(script, dir) {
|
||||
if (!dir) throw new IllegalArgumentException("The parameter 'dir' can not be null or empty.")
|
||||
validateDirectoryOrFileExists(script, dir)
|
||||
if (!isDirectory(script, dir)) throw new AbortException("Validation failed. '$dir' is not a directory.")
|
||||
}
|
||||
|
||||
static validateDirectoryIsNotEmpty(script, dir) {
|
||||
validateDirectory(script, dir)
|
||||
if (isDirectoryEmpty(script, dir)) throw new AbortException("Validation failed. '$dir' is empty.")
|
||||
}
|
||||
|
||||
static validateFile(script, filePath) {
|
||||
if (!filePath) throw new IllegalArgumentException("The parameter 'filePath' can not be null or empty.")
|
||||
validateDirectoryOrFileExists(script, filePath)
|
||||
if (!isFile(script, filePath)) throw new AbortException("Validation failed. '$filePath' is not a file.")
|
||||
}
|
||||
}
|
@ -3,6 +3,11 @@ package com.sap.piper
|
||||
import com.cloudbees.groovy.cps.NonCPS
|
||||
|
||||
@NonCPS
|
||||
String getPrettyJsonString(object) {
|
||||
String groovyObjectToPrettyJsonString(object) {
|
||||
return groovy.json.JsonOutput.prettyPrint(groovy.json.JsonOutput.toJson(object))
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
def jsonStringToGroovyObject(text) {
|
||||
return new groovy.json.JsonSlurperClassic().parseText(text)
|
||||
}
|
||||
|
@ -7,23 +7,30 @@ import groovy.text.SimpleTemplateEngine
|
||||
import java.nio.charset.StandardCharsets
|
||||
import java.security.MessageDigest
|
||||
|
||||
@NonCPS
|
||||
def getMandatoryParameter(Map map, paramName, defaultValue = null) {
|
||||
|
||||
def paramValue = map[paramName]
|
||||
|
||||
if (paramValue == null)
|
||||
paramValue = defaultValue
|
||||
|
||||
if (paramValue == null)
|
||||
throw new Exception("ERROR - NO VALUE AVAILABLE FOR ${paramName}")
|
||||
return paramValue
|
||||
def stash(name, include = '**/*.*', exclude = '', useDefaultExcludes = true) {
|
||||
echo "Stash content: ${name} (include: ${include}, exclude: ${exclude}, useDefaultExcludes: ${useDefaultExcludes})"
|
||||
|
||||
Map stashParams = [
|
||||
name: name,
|
||||
includes: include,
|
||||
excludes: exclude
|
||||
]
|
||||
//only set the optional parameter if default excludes should not be applied
|
||||
if (!useDefaultExcludes) {
|
||||
stashParams.useDefaultExcludes = useDefaultExcludes
|
||||
}
|
||||
steps.stash stashParams
|
||||
}
|
||||
|
||||
def stash(name, include = '**/*.*', exclude = '') {
|
||||
echo "Stash content: ${name} (include: ${include}, exclude: ${exclude})"
|
||||
steps.stash name: name, includes: include, excludes: exclude
|
||||
@NonCPS
|
||||
def runClosures(Map closures) {
|
||||
|
||||
def closuresToRun = closures.values().asList()
|
||||
Collections.shuffle(closuresToRun) // Shuffle the list so no one tries to rely on the order of execution
|
||||
for (int i = 0; i < closuresToRun.size(); i++) {
|
||||
(closuresToRun[i] as Closure).run()
|
||||
}
|
||||
}
|
||||
|
||||
def stashList(script, List stashes) {
|
||||
@ -46,15 +53,16 @@ def stashList(script, List stashes) {
|
||||
}
|
||||
}
|
||||
|
||||
def stashWithMessage(name, msg, include = '**/*.*', exclude = '') {
|
||||
def stashWithMessage(name, msg, include = '**/*.*', exclude = '', useDefaultExcludes = true) {
|
||||
try {
|
||||
stash(name, include, exclude)
|
||||
stash(name, include, exclude, useDefaultExcludes)
|
||||
} catch (e) {
|
||||
echo msg + name + " (${e.getMessage()})"
|
||||
}
|
||||
}
|
||||
|
||||
def unstash(name, msg = "Unstash failed:") {
|
||||
|
||||
def unstashedContent = []
|
||||
try {
|
||||
echo "Unstash content: ${name}"
|
||||
@ -70,7 +78,9 @@ def unstashAll(stashContent) {
|
||||
def unstashedContent = []
|
||||
if (stashContent) {
|
||||
for (i = 0; i < stashContent.size(); i++) {
|
||||
unstashedContent += unstash(stashContent[i])
|
||||
if(stashContent[i]) {
|
||||
unstashedContent += unstash(stashContent[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
return unstashedContent
|
||||
|
@ -1,52 +0,0 @@
|
||||
package com.sap.piper
|
||||
|
||||
import hudson.AbortException
|
||||
|
||||
|
||||
class Version implements Serializable {
|
||||
|
||||
final def major
|
||||
final def minor
|
||||
final def patch
|
||||
|
||||
Version(major, minor, patch = -1) {
|
||||
if (major < 0) throw new IllegalArgumentException("The parameter 'major' can not have a value less than 0.")
|
||||
if (minor < 0) throw new IllegalArgumentException("The parameter 'minor' can not have a value less than 0.")
|
||||
this.major = major
|
||||
this.minor = minor
|
||||
this.patch = patch
|
||||
}
|
||||
|
||||
Version(text) {
|
||||
if (!text) throw new IllegalArgumentException("The parameter 'text' can not be null or empty.")
|
||||
def group = text =~ /(\d+[.]\d+[.]\d+)/
|
||||
if (!group) throw new AbortException("The version '$text' has an unexpected format. The expected format is <major.minor.patch>.")
|
||||
def i = group[0].size()-1
|
||||
def versionNumbers = group[0][i].split("\\.")
|
||||
major = versionNumbers[0].toInteger()
|
||||
minor = versionNumbers[1].toInteger()
|
||||
patch = versionNumbers[2].toInteger()
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean equals(version) {
|
||||
if (!version) throw new IllegalArgumentException("The parameter 'version' can not be null.")
|
||||
return major == version.major && minor == version.minor && patch == version.patch
|
||||
}
|
||||
|
||||
boolean isHigher(version) {
|
||||
if (!version) throw new IllegalArgumentException("The parameter 'version' can not be null.")
|
||||
return major > version.major || major == version.major && ( minor > version.minor || minor == version.minor && patch > version.patch)
|
||||
}
|
||||
|
||||
boolean isCompatibleVersion(version) {
|
||||
if (!version) throw new IllegalArgumentException("The parameter 'version' can not be null.")
|
||||
return this == version || isHigher(version) && major == version.major
|
||||
}
|
||||
|
||||
@Override
|
||||
String toString() {
|
||||
return patch != -1 ? "$major.$minor.$patch".toString() : "$major.$minor".toString()
|
||||
}
|
||||
}
|
||||
|
@ -1,73 +0,0 @@
|
||||
package com.sap.piper
|
||||
|
||||
import hudson.AbortException
|
||||
|
||||
|
||||
class VersionUtils implements Serializable {
|
||||
|
||||
def static getVersion(script, name, executable, versionOption) {
|
||||
|
||||
return new Version(getVersionDesc(script, name, executable, versionOption))
|
||||
}
|
||||
|
||||
def static getVersionDesc(script, name, executable, versionOption) {
|
||||
|
||||
def toolVersion
|
||||
try {
|
||||
toolVersion = script.sh returnStdout: true, script: """#!/bin/bash
|
||||
$executable $versionOption"""
|
||||
} catch(AbortException e) {
|
||||
throw new AbortException("The verification of $name failed. Please check '$executable'. $e.message.")
|
||||
}
|
||||
|
||||
return toolVersion
|
||||
}
|
||||
|
||||
def static verifyVersion(script, name, executable, String version, versionOption) {
|
||||
|
||||
script.echo "Verifying $name version $version or compatible version."
|
||||
|
||||
Version installedVersion = getVersion(script, name, executable, versionOption)
|
||||
|
||||
if (!installedVersion.isCompatibleVersion(new Version(version))) {
|
||||
throw new AbortException("The installed version of $name is ${installedVersion.toString()}. Please install version $version or a compatible version.")
|
||||
}
|
||||
script.echo "Verification success. $name version ${installedVersion.toString()} is installed."
|
||||
}
|
||||
|
||||
def static verifyVersion(script, name, String versionDesc, String versionExpected) {
|
||||
|
||||
script.echo "Verifying $name version $versionExpected or compatible version."
|
||||
|
||||
Version versionAvailable = new Version(versionDesc)
|
||||
|
||||
if (!versionAvailable.isCompatibleVersion(new Version(versionExpected))) {
|
||||
throw new AbortException("The installed version of $name is ${versionAvailable.toString()}. Please install version $versionExpected or a compatible version.")
|
||||
}
|
||||
script.echo "Verification success. $name version ${versionAvailable.toString()} is installed."
|
||||
}
|
||||
|
||||
|
||||
def static verifyVersion(script, name, executable, Map versions, versionOption) {
|
||||
|
||||
def versionDesc = getVersionDesc(script, name, executable, versionOption)
|
||||
|
||||
verifyVersion(script, name, versionDesc, versions)
|
||||
}
|
||||
|
||||
def static verifyVersion(script, name, String versionDesc, Map versions) {
|
||||
|
||||
for (def entry : versions) {
|
||||
if (versionDesc.contains(entry.getKey())) {
|
||||
def installedVersion = new Version(versionDesc)
|
||||
def expectedVersion = entry.getValue()
|
||||
script.echo "Verifying $name version $expectedVersion or compatible version."
|
||||
if (!installedVersion.isCompatibleVersion(new Version(expectedVersion))) {
|
||||
throw new AbortException("The installed version of $name is ${installedVersion.toString()}. Please install version $expectedVersion or a compatible version.")
|
||||
}
|
||||
script.echo "Verification success. $name version ${installedVersion.toString()} is installed."
|
||||
}
|
||||
}
|
||||
script.echo "Verification success."
|
||||
}
|
||||
}
|
129
src/com/sap/piper/WhitesourceConfigurationHelper.groovy
Normal file
129
src/com/sap/piper/WhitesourceConfigurationHelper.groovy
Normal file
@ -0,0 +1,129 @@
|
||||
package com.sap.piper
|
||||
|
||||
import com.cloudbees.groovy.cps.NonCPS
|
||||
|
||||
class WhitesourceConfigurationHelper implements Serializable {
|
||||
|
||||
static def extendUAConfigurationFile(script, utils, config, path) {
|
||||
def mapping = []
|
||||
def parsingClosure = { fileReadPath -> return script.readProperties (file: fileReadPath) }
|
||||
def serializationClosure = { configuration -> serializeUAConfig(configuration) }
|
||||
def inputFile = config.whitesource.configFilePath.replaceFirst('\\./', '')
|
||||
def suffix = utils.generateSha1(config.whitesource.configFilePath)
|
||||
def targetFile = "${inputFile}.${suffix}"
|
||||
if(config.whitesource.productName.startsWith('DIST - ')) {
|
||||
mapping += [
|
||||
[name: 'checkPolicies', value: false, force: true],
|
||||
[name: 'forceCheckAllDependencies', value: false, force: true]
|
||||
]
|
||||
} else {
|
||||
mapping += [
|
||||
[name: 'checkPolicies', value: true, force: true],
|
||||
[name: 'forceCheckAllDependencies', value: true, force: true]
|
||||
]
|
||||
}
|
||||
if(config.verbose)
|
||||
mapping += [name: 'log.level', value: 'debug']
|
||||
|
||||
mapping += [
|
||||
[name: 'apiKey', value: config.whitesource.orgToken, force: true],
|
||||
[name: 'productName', value: config.whitesource.productName, force: true],
|
||||
[name: 'productVersion', value: config.whitesource.productVersion?:'', force: true],
|
||||
[name: 'projectName', value: config.whitesource.projectName, force: true],
|
||||
[name: 'projectVersion', value: config.whitesource.productVersion?:'', force: true],
|
||||
[name: 'productToken', value: config.whitesource.productToken, omitIfPresent: 'projectToken', force: true],
|
||||
[name: 'userKey', value: config.whitesource.userKey, force: true],
|
||||
[name: 'forceUpdate', value: true, force: true],
|
||||
[name: 'offline', value: false, force: true],
|
||||
[name: 'ignoreSourceFiles', value: true, force: true],
|
||||
[name: 'resolveAllDependencies', value: false, force: true],
|
||||
[name: 'failErrorLevel', value: 'ALL', force: true],
|
||||
[name: 'case.sensitive.glob', value: false],
|
||||
[name: 'followSymbolicLinks', value: true]
|
||||
]
|
||||
|
||||
switch (config.scanType) {
|
||||
case 'pip':
|
||||
mapping += [
|
||||
[name: 'python.resolveDependencies', value: true, force: true],
|
||||
[name: 'python.ignoreSourceFiles', value: true, force: true],
|
||||
[name: 'python.ignorePipInstallErrors', value: false],
|
||||
[name: 'python.installVirtualenv', value: true],
|
||||
[name: 'python.resolveHierarchyTree', value: true],
|
||||
[name: 'python.requirementsFileIncludes', value: 'requirements.txt'],
|
||||
[name: 'python.resolveSetupPyFiles', value: true],
|
||||
[name: 'python.runPipenvPreStep', value: true],
|
||||
[name: 'python.pipenvDevDependencies', value: true],
|
||||
[name: 'python.IgnorePipenvInstallErrors', value: false],
|
||||
[name: 'includes', value: '**/*.py **/*.txt'],
|
||||
[name: 'excludes', value: '**/*sources.jar **/*javadoc.jar']
|
||||
]
|
||||
break
|
||||
case 'sbt':
|
||||
mapping += [
|
||||
[name: 'sbt.resolveDependencies', value: true, force: true],
|
||||
[name: 'sbt.ignoreSourceFiles', value: true, force: true],
|
||||
[name: 'sbt.aggregateModules', value: false, force: true],
|
||||
[name: 'sbt.runPreStep', value: true],
|
||||
[name: 'includes', value: '**/*.jar'],
|
||||
[name: 'excludes', value: '**/*sources.jar **/*javadoc.jar']
|
||||
]
|
||||
break
|
||||
case 'golang':
|
||||
mapping += [
|
||||
[name: 'go.resolveDependencies', value: true, force: true],
|
||||
[name: 'go.ignoreSourceFiles', value: true, force: true],
|
||||
[name: 'go.collectDependenciesAtRuntime', value: false],
|
||||
[name: 'go.dependencyManager', value: 'dep'],
|
||||
[name: 'includes', value: '**/*.lock'],
|
||||
[name: 'excludes', value: '**/*sources.jar **/*javadoc.jar']
|
||||
]
|
||||
break
|
||||
default:
|
||||
script.echo "[Warning][Whitesource] Configuration for scanType: '${config.scanType}' is not yet hardened, please do a quality assessment of your scan results."
|
||||
}
|
||||
|
||||
rewriteConfiguration(script, utils, config, mapping, suffix, path, inputFile, targetFile, parsingClosure, serializationClosure)
|
||||
}
|
||||
|
||||
static private def rewriteConfiguration(script, utils, config, mapping, suffix, path, inputFile, targetFile, parsingClosure, serializationClosure) {
|
||||
def inputFilePath = "${path}${inputFile}"
|
||||
def outputFilePath = "${path}${targetFile}"
|
||||
def moduleSpecificFile = parsingClosure(inputFilePath)
|
||||
if (!moduleSpecificFile && inputFilePath != config.whitesource.configFilePath)
|
||||
moduleSpecificFile = parsingClosure(config.whitesource.configFilePath)
|
||||
if (!moduleSpecificFile)
|
||||
moduleSpecificFile = [:]
|
||||
|
||||
mapping.each {
|
||||
entry ->
|
||||
def dependentValue = entry.omitIfPresent ? moduleSpecificFile[entry.omitIfPresent] : null
|
||||
if ((entry.omitIfPresent && !dependentValue || !entry.omitIfPresent) && (entry.force || moduleSpecificFile[entry.name] == null) && entry.value != 'null')
|
||||
moduleSpecificFile[entry.name] = entry.value.toString()
|
||||
}
|
||||
|
||||
def output = serializationClosure(moduleSpecificFile)
|
||||
|
||||
if(config.verbose)
|
||||
script.echo "Writing config file ${outputFilePath} with content:\n${output}"
|
||||
script.writeFile file: outputFilePath, text: output
|
||||
if(config.stashContent && config.stashContent.size() > 0) {
|
||||
def stashName = "modified whitesource config ${suffix}".toString()
|
||||
utils.stashWithMessage (
|
||||
stashName,
|
||||
"Stashing modified Whitesource configuration",
|
||||
outputFilePath.replaceFirst('\\./', '')
|
||||
)
|
||||
config.stashContent += [stashName]
|
||||
}
|
||||
config.whitesource.configFilePath = outputFilePath
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
static private def serializeUAConfig(configuration) {
|
||||
Properties p = new Properties()
|
||||
p.putAll(configuration)
|
||||
|
||||
new StringWriter().with{ w -> p.store(w, null); w }.toString()
|
||||
}
|
||||
}
|
41
src/com/sap/piper/analytics/InfluxData.groovy
Normal file
41
src/com/sap/piper/analytics/InfluxData.groovy
Normal file
@ -0,0 +1,41 @@
|
||||
package com.sap.piper.analytics
|
||||
|
||||
import com.cloudbees.groovy.cps.NonCPS
|
||||
|
||||
class InfluxData implements Serializable{
|
||||
|
||||
// each Map in influxCustomDataMap represents a measurement in Influx.
|
||||
// Additional measurements can be added as a new Map entry of influxCustomDataMap
|
||||
protected Map fields = [jenkins_custom_data: [:], pipeline_data: [:], step_data: [:]]
|
||||
// each Map in influxCustomDataMapTags represents tags for certain measurement in Influx.
|
||||
// Tags are required in Influx for easier querying data
|
||||
protected Map tags = [jenkins_custom_data: [:], pipeline_data: [:], step_data: [:]]
|
||||
|
||||
public Map getFields(){ return fields }
|
||||
public Map getTags(){ return tags }
|
||||
|
||||
protected static InfluxData instance
|
||||
|
||||
@NonCPS
|
||||
public static InfluxData getInstance(){
|
||||
if(!instance) instance = new InfluxData()
|
||||
return instance
|
||||
}
|
||||
|
||||
public static void addField(String measurement, String key, value) {
|
||||
add(getInstance().getFields(), measurement, key, value)
|
||||
}
|
||||
|
||||
public static void addTag(String measurement, String key, value) {
|
||||
add(getInstance().getTags(), measurement, key, value)
|
||||
}
|
||||
|
||||
protected static void add(Map dataMap, String measurement, String field, value) {
|
||||
if (!dataMap[measurement]) dataMap[measurement] = [:]
|
||||
dataMap[measurement][field] = value
|
||||
}
|
||||
|
||||
public static void reset(){
|
||||
instance = null
|
||||
}
|
||||
}
|
@ -1,7 +1,8 @@
|
||||
package com.sap.piper.analytics
|
||||
|
||||
import com.cloudbees.groovy.cps.NonCPS
|
||||
import org.jenkinsci.plugins.workflow.steps.MissingContextVariableException
|
||||
|
||||
import org.jenkinsci.plugins.workflow.steps.FlowInterruptedException
|
||||
|
||||
class Telemetry implements Serializable{
|
||||
|
||||
@ -45,53 +46,39 @@ class Telemetry implements Serializable{
|
||||
}
|
||||
|
||||
protected static void piperOsDefaultReporting(Script steps, Map payload) {
|
||||
def swaEndpoint = 'https://webanalytics.cfapps.eu10.hana.ondemand.com/tracker/log'
|
||||
Map swaPayload = [
|
||||
'idsite': '827e8025-1e21-ae84-c3a3-3f62b70b0130',
|
||||
'url': 'https://github.com/SAP/jenkins-library',
|
||||
'action_name': payload.actionName,
|
||||
'event_type': payload.eventType,
|
||||
'custom3': payload.step, // custom3 = step name (passed as parameter step)
|
||||
'custom4': payload.jobUrlSha1, // custom4 = job url hashed (calculated)
|
||||
'custom5': payload.buildUrlSha1, // custom5 = build url hashed (calculated)
|
||||
'custom10': payload.stageName // custom10 = stage name
|
||||
]
|
||||
// step related parameters
|
||||
for(def key : [1, 2, 3, 4, 5]){ // custom11 - custom15 = step related parameter 1 - 5 (passed as parameter stepParam1 - stepParam5)
|
||||
if (payload["stepParam${key}"] != null) swaPayload.put("custom1${key}", payload["stepParam${key}"])
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
def swaCustom = [:]
|
||||
|
||||
/* SWA custom parameters:
|
||||
custom3 = step name (passed as parameter step)
|
||||
custom4 = job url hashed (calculated)
|
||||
custom5 = build url hashed (calculated)
|
||||
custom10 = stage name
|
||||
custom11 = step related parameter 1 (passed as parameter stepParam1)
|
||||
custom12 = step related parameter 2 (passed as parameter stepParam2)
|
||||
custom13 = step related parameter 3 (passed as parameter stepParam3)
|
||||
custom14 = step related parameter 4 (passed as parameter stepParam4)
|
||||
custom15 = step related parameter 5 (passed as parameter stepParam5)
|
||||
*/
|
||||
|
||||
def swaUrl = 'https://webanalytics.cfapps.eu10.hana.ondemand.com/tracker/log'
|
||||
def idsite = '827e8025-1e21-ae84-c3a3-3f62b70b0130'
|
||||
def url = 'https://github.com/SAP/jenkins-library'
|
||||
|
||||
swaCustom.custom3 = payload.step
|
||||
swaCustom.custom4 = payload.jobUrlSha1
|
||||
swaCustom.custom5 = payload.buildUrlSha1
|
||||
swaCustom.custom10 = payload.stageName
|
||||
swaCustom.custom11 = payload.stepParam1
|
||||
swaCustom.custom12 = payload.stepParam2
|
||||
swaCustom.custom13 = payload.stepParam3
|
||||
swaCustom.custom14 = payload.stepParam4
|
||||
swaCustom.custom15 = payload.stepParam5
|
||||
|
||||
def options = []
|
||||
options.push("-G")
|
||||
options.push("-v \"${swaUrl}\"")
|
||||
options.push("--data-urlencode \"action_name=${payload.actionName}\"")
|
||||
options.push("--data-urlencode \"idsite=${idsite}\"")
|
||||
options.push("--data-urlencode \"url=${url}\"")
|
||||
options.push("--data-urlencode \"event_type=${payload.eventType}\"")
|
||||
for(def key : ['custom3', 'custom4', 'custom5', 'custom10', 'custom11', 'custom12', 'custom13', 'custom14', 'custom15']){
|
||||
if (swaCustom[key] != null) options.push("--data-urlencode \"${key}=${swaCustom[key]}\"")
|
||||
steps.timeout(
|
||||
time: 10,
|
||||
unit: 'SECONDS'
|
||||
){
|
||||
steps.httpRequest(url: "${swaEndpoint}?${getPayloadString(swaPayload)}", timeout: 5, quiet: true)
|
||||
}
|
||||
options.push("--connect-timeout 5")
|
||||
options.push("--max-time 20")
|
||||
|
||||
steps.sh(returnStatus: true, script: "#!/bin/sh +x\ncurl ${options.join(' ')} > /dev/null 2>&1 || echo '[${payload.step}] Telemetry Report to SWA failed!'")
|
||||
|
||||
} catch (MissingContextVariableException noNode) {
|
||||
steps.echo "[${payload.step}] Telemetry Report to SWA skipped, no node available!"
|
||||
} catch (FlowInterruptedException ignore){
|
||||
// telemetry reporting timed out. This should not break anything though.
|
||||
steps.echo "[${payload.step}] Telemetry Report with listener failed: timeout"
|
||||
}
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
private static String getPayloadString(Map payload){
|
||||
return payload
|
||||
.collect { entry -> return "${entry.key}=${URLEncoder.encode(entry.value.toString(), "UTF-8")}" }
|
||||
.join('&')
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
package com.sap.piper.cm;
|
||||
|
||||
public enum BackendType {
|
||||
SOLMAN, CTS, NONE
|
||||
SOLMAN, CTS, RFC, NONE
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package com.sap.piper.cm
|
||||
|
||||
import com.sap.piper.GitUtils
|
||||
|
||||
import groovy.json.JsonSlurper
|
||||
import hudson.AbortException
|
||||
|
||||
|
||||
@ -62,8 +63,8 @@ public class ChangeManagement implements Serializable {
|
||||
return items[0]
|
||||
}
|
||||
|
||||
boolean isChangeInDevelopment(String changeId, String endpoint, String credentialsId, String clientOpts = '') {
|
||||
int rc = executeWithCredentials(BackendType.SOLMAN, endpoint, credentialsId, 'is-change-in-development', ['-cID', "'${changeId}'", '--return-code'],
|
||||
boolean isChangeInDevelopment(Map docker, String changeId, String endpoint, String credentialsId, String clientOpts = '') {
|
||||
int rc = executeWithCredentials(BackendType.SOLMAN, docker, endpoint, credentialsId, 'is-change-in-development', ['-cID', "'${changeId}'", '--return-code'],
|
||||
false,
|
||||
clientOpts) as int
|
||||
|
||||
@ -76,9 +77,9 @@ public class ChangeManagement implements Serializable {
|
||||
}
|
||||
}
|
||||
|
||||
String createTransportRequestCTS(String transportType, String targetSystemId, String description, String endpoint, String credentialsId, String clientOpts = '') {
|
||||
String createTransportRequestCTS(Map docker, String transportType, String targetSystemId, String description, String endpoint, String credentialsId, String clientOpts = '') {
|
||||
try {
|
||||
def transportRequest = executeWithCredentials(BackendType.CTS, endpoint, credentialsId, 'create-transport',
|
||||
def transportRequest = executeWithCredentials(BackendType.CTS, docker, endpoint, credentialsId, 'create-transport',
|
||||
['-tt', transportType, '-ts', targetSystemId, '-d', "\"${description}\""],
|
||||
true,
|
||||
clientOpts)
|
||||
@ -88,10 +89,10 @@ public class ChangeManagement implements Serializable {
|
||||
}
|
||||
}
|
||||
|
||||
String createTransportRequestSOLMAN(String changeId, String developmentSystemId, String endpoint, String credentialsId, String clientOpts = '') {
|
||||
String createTransportRequestSOLMAN(Map docker, String changeId, String developmentSystemId, String endpoint, String credentialsId, String clientOpts = '') {
|
||||
|
||||
try {
|
||||
def transportRequest = executeWithCredentials(BackendType.SOLMAN, endpoint, credentialsId, 'create-transport', ['-cID', changeId, '-dID', developmentSystemId],
|
||||
def transportRequest = executeWithCredentials(BackendType.SOLMAN, docker, endpoint, credentialsId, 'create-transport', ['-cID', changeId, '-dID', developmentSystemId],
|
||||
true,
|
||||
clientOpts)
|
||||
return (transportRequest as String)?.trim()
|
||||
@ -100,86 +101,317 @@ public class ChangeManagement implements Serializable {
|
||||
}
|
||||
}
|
||||
|
||||
void uploadFileToTransportRequest(BackendType type, String changeId, String transportRequestId, String applicationId, String filePath, String endpoint, String credentialsId, String cmclientOpts = '') {
|
||||
String createTransportRequestRFC(
|
||||
Map docker,
|
||||
String endpoint,
|
||||
String developmentInstance,
|
||||
String developmentClient,
|
||||
String credentialsId,
|
||||
String description,
|
||||
boolean verbose) {
|
||||
|
||||
def args = null
|
||||
def command = 'cts createTransportRequest'
|
||||
def args = [
|
||||
TRANSPORT_DESCRIPTION: description,
|
||||
ABAP_DEVELOPMENT_INSTANCE: developmentInstance,
|
||||
ABAP_DEVELOPMENT_CLIENT: developmentClient,
|
||||
VERBOSE: verbose,
|
||||
]
|
||||
|
||||
if(type == BackendType.SOLMAN) {
|
||||
args = ['-cID', changeId,
|
||||
'-tID', transportRequestId,
|
||||
applicationId, "\"$filePath\""]
|
||||
} else if (type == BackendType.CTS) {
|
||||
args = ['-tID', transportRequestId,
|
||||
"\"$filePath\""]
|
||||
} else {
|
||||
throw new IllegalArgumentException("Invalid backend type: ${type}")
|
||||
try {
|
||||
|
||||
def transportRequestId = executeWithCredentials(
|
||||
BackendType.RFC,
|
||||
docker,
|
||||
endpoint,
|
||||
credentialsId,
|
||||
command,
|
||||
args,
|
||||
true)
|
||||
|
||||
return new JsonSlurper().parseText(transportRequestId).REQUESTID
|
||||
|
||||
} catch(AbortException ex) {
|
||||
throw new ChangeManagementException(
|
||||
"Cannot create transport request: ${ex.getMessage()}", ex)
|
||||
}
|
||||
|
||||
int rc = executeWithCredentials(type,
|
||||
endpoint,
|
||||
credentialsId,
|
||||
'upload-file-to-transport',
|
||||
args,
|
||||
false,
|
||||
cmclientOpts) as int
|
||||
|
||||
if(rc == 0) {
|
||||
return
|
||||
} else {
|
||||
throw new ChangeManagementException("Cannot upload file '$filePath' for change document '$changeId' with transport request '$transportRequestId'. Return code from cmclient: $rc.")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
def executeWithCredentials(BackendType type, String endpoint, String credentialsId, String command, List<String> args, boolean returnStdout = false, String clientOpts = '') {
|
||||
void uploadFileToTransportRequestSOLMAN(
|
||||
Map docker,
|
||||
String changeId,
|
||||
String transportRequestId,
|
||||
String applicationId,
|
||||
String filePath,
|
||||
String endpoint,
|
||||
String credentialsId,
|
||||
String cmclientOpts = '') {
|
||||
|
||||
def args = [
|
||||
'-cID', changeId,
|
||||
'-tID', transportRequestId,
|
||||
applicationId, "\"$filePath\""
|
||||
]
|
||||
|
||||
int rc = executeWithCredentials(
|
||||
BackendType.SOLMAN,
|
||||
docker,
|
||||
endpoint,
|
||||
credentialsId,
|
||||
'upload-file-to-transport',
|
||||
args,
|
||||
false,
|
||||
cmclientOpts) as int
|
||||
|
||||
if(rc != 0) {
|
||||
throw new ChangeManagementException(
|
||||
"Cannot upload file into transport request. Return code from cm client: $rc.")
|
||||
}
|
||||
}
|
||||
|
||||
void uploadFileToTransportRequestCTS(
|
||||
Map docker,
|
||||
String transportRequestId,
|
||||
String filePath,
|
||||
String endpoint,
|
||||
String credentialsId,
|
||||
String cmclientOpts = '') {
|
||||
|
||||
def args = [
|
||||
'-tID', transportRequestId,
|
||||
"\"$filePath\""
|
||||
]
|
||||
|
||||
int rc = executeWithCredentials(
|
||||
BackendType.CTS,
|
||||
docker,
|
||||
endpoint,
|
||||
credentialsId,
|
||||
'upload-file-to-transport',
|
||||
args,
|
||||
false,
|
||||
cmclientOpts) as int
|
||||
|
||||
if(rc != 0) {
|
||||
throw new ChangeManagementException(
|
||||
"Cannot upload file into transport request. Return code from cm client: $rc.")
|
||||
}
|
||||
}
|
||||
|
||||
void uploadFileToTransportRequestRFC(
|
||||
Map docker,
|
||||
String transportRequestId,
|
||||
String applicationName,
|
||||
String filePath,
|
||||
String endpoint,
|
||||
String credentialsId,
|
||||
String developmentInstance,
|
||||
String developmentClient,
|
||||
String applicationDescription,
|
||||
String abapPackage,
|
||||
String codePage,
|
||||
boolean acceptUnixStyleEndOfLine,
|
||||
boolean failOnWarning,
|
||||
boolean verbose) {
|
||||
|
||||
def args = [
|
||||
ABAP_DEVELOPMENT_INSTANCE: developmentInstance,
|
||||
ABAP_DEVELOPMENT_CLIENT: developmentClient,
|
||||
ABAP_APPLICATION_NAME: applicationName,
|
||||
ABAP_APPLICATION_DESC: applicationDescription,
|
||||
ABAP_PACKAGE: abapPackage,
|
||||
ZIP_FILE_URL: filePath,
|
||||
CODE_PAGE: codePage,
|
||||
ABAP_ACCEPT_UNIX_STYLE_EOL: acceptUnixStyleEndOfLine ? 'X' : '-',
|
||||
FAIL_UPLOAD_ON_WARNING: Boolean.toString(failOnWarning),
|
||||
VERBOSE: Boolean.toString(verbose),
|
||||
]
|
||||
|
||||
int rc = executeWithCredentials(
|
||||
BackendType.RFC,
|
||||
docker,
|
||||
endpoint,
|
||||
credentialsId,
|
||||
"cts uploadToABAP:${transportRequestId}",
|
||||
args,
|
||||
false) as int
|
||||
|
||||
if(rc != 0) {
|
||||
throw new ChangeManagementException(
|
||||
"Cannot upload file into transport request. Return code from rfc client: $rc.")
|
||||
}
|
||||
}
|
||||
|
||||
def executeWithCredentials(
|
||||
BackendType type,
|
||||
Map docker,
|
||||
String endpoint,
|
||||
String credentialsId,
|
||||
String command,
|
||||
def args,
|
||||
boolean returnStdout = false,
|
||||
String clientOpts = '') {
|
||||
|
||||
def script = this.script
|
||||
|
||||
docker = docker ?: [:]
|
||||
|
||||
script.withCredentials([script.usernamePassword(
|
||||
credentialsId: credentialsId,
|
||||
passwordVariable: 'password',
|
||||
usernameVariable: 'username')]) {
|
||||
def cmScript = getCMCommandLine(type, endpoint, script.username, script.password,
|
||||
command, args,
|
||||
clientOpts)
|
||||
|
||||
Map shArgs = [:]
|
||||
|
||||
if(returnStdout)
|
||||
shArgs.put('returnStdout', true)
|
||||
else
|
||||
shArgs.put('returnStatus', true)
|
||||
|
||||
shArgs.put('script', cmScript)
|
||||
Map dockerEnvVars = docker.envVars ?: [:]
|
||||
|
||||
// user and password are masked by withCredentials
|
||||
script.echo """[INFO] Executing command line: "${cmScript}"."""
|
||||
return script.sh(shArgs)
|
||||
def result = 1
|
||||
|
||||
switch(type) {
|
||||
|
||||
case BackendType.RFC:
|
||||
|
||||
if(! (args in Map)) {
|
||||
throw new IllegalArgumentException("args expected as Map for backend types ${[BackendType.RFC]}")
|
||||
}
|
||||
|
||||
shArgs.script = command
|
||||
|
||||
args = args.plus([
|
||||
ABAP_DEVELOPMENT_SERVER: endpoint,
|
||||
ABAP_DEVELOPMENT_USER: script.username,
|
||||
ABAP_DEVELOPMENT_PASSWORD: script.password,
|
||||
])
|
||||
|
||||
dockerEnvVars += args
|
||||
|
||||
break
|
||||
|
||||
case BackendType.SOLMAN:
|
||||
case BackendType.CTS:
|
||||
|
||||
if(! (args in Collection))
|
||||
throw new IllegalArgumentException("args expected as Collection for backend types ${[BackendType.SOLMAN, BackendType.CTS]}")
|
||||
|
||||
shArgs.script = getCMCommandLine(type, endpoint, script.username, script.password,
|
||||
command, args,
|
||||
clientOpts)
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
// user and password are masked by withCredentials
|
||||
script.echo """[INFO] Executing command line: "${shArgs.script}"."""
|
||||
|
||||
script.dockerExecute(
|
||||
script: script,
|
||||
dockerImage: docker.image,
|
||||
dockerOptions: docker.options,
|
||||
dockerEnvVars: dockerEnvVars,
|
||||
dockerPullImage: docker.pullImage) {
|
||||
|
||||
result = script.sh(shArgs)
|
||||
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
void releaseTransportRequest(BackendType type,String changeId, String transportRequestId, String endpoint, String credentialsId, String clientOpts = '') {
|
||||
void releaseTransportRequestSOLMAN(
|
||||
Map docker,
|
||||
String changeId,
|
||||
String transportRequestId,
|
||||
String endpoint,
|
||||
String credentialsId,
|
||||
String clientOpts = '') {
|
||||
|
||||
def cmd
|
||||
List args = []
|
||||
def cmd = 'release-transport'
|
||||
def args = [
|
||||
'-cID',
|
||||
changeId,
|
||||
'-tID',
|
||||
transportRequestId,
|
||||
]
|
||||
|
||||
if(type == BackendType.SOLMAN) {
|
||||
cmd = 'release-transport'
|
||||
args << '-cID'
|
||||
args << changeId
|
||||
} else if(type == BackendType.CTS) {
|
||||
cmd = 'export-transport'
|
||||
} else {
|
||||
throw new IllegalStateException("Invalid backend type: '${type}'")
|
||||
}
|
||||
int rc = executeWithCredentials(
|
||||
BackendType.SOLMAN,
|
||||
docker,
|
||||
endpoint,
|
||||
credentialsId,
|
||||
cmd,
|
||||
args,
|
||||
false,
|
||||
clientOpts) as int
|
||||
|
||||
args << '-tID'
|
||||
args << transportRequestId
|
||||
|
||||
int rc = executeWithCredentials(type, endpoint, credentialsId, cmd, args, false, clientOpts) as int
|
||||
if(rc == 0) {
|
||||
return
|
||||
} else {
|
||||
if(rc != 0) {
|
||||
throw new ChangeManagementException("Cannot release Transport Request '$transportRequestId'. Return code from cmclient: $rc.")
|
||||
}
|
||||
}
|
||||
|
||||
void releaseTransportRequestCTS(
|
||||
Map docker,
|
||||
String transportRequestId,
|
||||
String endpoint,
|
||||
String credentialsId,
|
||||
String clientOpts = '') {
|
||||
|
||||
def cmd = 'export-transport'
|
||||
def args = [
|
||||
'-tID',
|
||||
transportRequestId,
|
||||
]
|
||||
|
||||
int rc = executeWithCredentials(
|
||||
BackendType.CTS,
|
||||
docker,
|
||||
endpoint,
|
||||
credentialsId,
|
||||
cmd,
|
||||
args,
|
||||
false) as int
|
||||
|
||||
if(rc != 0) {
|
||||
throw new ChangeManagementException("Cannot release Transport Request '$transportRequestId'. Return code from cmclient: $rc.")
|
||||
}
|
||||
}
|
||||
|
||||
void releaseTransportRequestRFC(
|
||||
Map docker,
|
||||
String transportRequestId,
|
||||
String endpoint,
|
||||
String developmentInstance,
|
||||
String developmentClient,
|
||||
String credentialsId,
|
||||
boolean verbose) {
|
||||
|
||||
def cmd = "cts releaseTransport:${transportRequestId}"
|
||||
def args = [
|
||||
ABAP_DEVELOPMENT_INSTANCE: developmentInstance,
|
||||
ABAP_DEVELOPMENT_CLIENT: developmentClient,
|
||||
VERBOSE: verbose,
|
||||
]
|
||||
|
||||
int rc = executeWithCredentials(
|
||||
BackendType.RFC,
|
||||
docker,
|
||||
endpoint,
|
||||
credentialsId,
|
||||
cmd,
|
||||
args,
|
||||
false) as int
|
||||
|
||||
if(rc != 0) {
|
||||
throw new ChangeManagementException("Cannot release Transport Request '$transportRequestId'. Return code from rfcclient: $rc.")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
String getCMCommandLine(BackendType type,
|
||||
String endpoint,
|
||||
String username,
|
||||
|
@ -15,7 +15,7 @@ public class StepHelpers {
|
||||
|
||||
}
|
||||
|
||||
transportRequestId = script.commonPipelineEnvironment.getTransportRequestId()
|
||||
transportRequestId = script.commonPipelineEnvironment.getValue('transportRequestId')
|
||||
|
||||
if(transportRequestId?.trim()) {
|
||||
script.echo "[INFO] Transport request id '${transportRequestId}' retrieved from common pipeline environment."
|
||||
@ -33,7 +33,7 @@ public class StepHelpers {
|
||||
configuration.changeManagement.git.format
|
||||
)
|
||||
|
||||
script.commonPipelineEnvironment.setTransportRequestId(transportRequestId)
|
||||
script.commonPipelineEnvironment.setValue('transportRequestId', "${transportRequestId}")
|
||||
script.echo "[INFO] Transport request id '${transportRequestId}' retrieved from commit history"
|
||||
|
||||
} catch(ChangeManagementException ex) {
|
||||
|
@ -0,0 +1,108 @@
|
||||
package com.sap.piper.integration
|
||||
|
||||
import com.cloudbees.groovy.cps.NonCPS
|
||||
import com.sap.piper.JsonUtils
|
||||
|
||||
class WhitesourceOrgAdminRepository implements Serializable {
|
||||
|
||||
final Script script
|
||||
final internalWhitesource
|
||||
final Map config
|
||||
|
||||
WhitesourceOrgAdminRepository(Script script, Map config) {
|
||||
this.script = script
|
||||
this.config = config
|
||||
if(!this.config.whitesource?.serviceUrl && !this.config.whitesourceAccessor)
|
||||
script.error "Parameter 'whitesource.serviceUrl' must be provided as part of the configuration."
|
||||
if(this.config.whitesourceAccessor instanceof String) {
|
||||
def clazz = this.class.classLoader.loadClass(this.config.whitesourceAccessor)
|
||||
this.internalWhitesource = clazz?.newInstance(this.script, this.config)
|
||||
}
|
||||
}
|
||||
|
||||
def fetchProductMetaInfo() {
|
||||
def requestBody = [
|
||||
requestType: "getOrganizationProductVitals",
|
||||
orgToken: config.whitesource.orgToken
|
||||
]
|
||||
def parsedResponse = issueHttpRequest(requestBody)
|
||||
|
||||
findProductMeta(parsedResponse)
|
||||
}
|
||||
|
||||
def findProductMeta(parsedResponse) {
|
||||
def foundMetaProduct = null
|
||||
for (product in parsedResponse.productVitals) {
|
||||
if (product.name == config.whitesource.productName) {
|
||||
foundMetaProduct = product
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return foundMetaProduct
|
||||
}
|
||||
|
||||
def createProduct() {
|
||||
def requestBody = [
|
||||
requestType: "createProduct",
|
||||
orgToken: config.whitesource.orgToken,
|
||||
productName: config.whitesource.productName
|
||||
]
|
||||
def parsedResponse = issueHttpRequest(requestBody)
|
||||
def metaInfo = parsedResponse
|
||||
|
||||
def groups = []
|
||||
def users = []
|
||||
config.whitesource.emailAddressesOfInitialProductAdmins.each {
|
||||
email -> users.add(["email": email])
|
||||
}
|
||||
|
||||
requestBody = [
|
||||
"requestType" : "setProductAssignments",
|
||||
"productToken" : metaInfo.productToken,
|
||||
"productMembership" : ["userAssignments":[], "groupAssignments":groups],
|
||||
"productAdmins" : ["userAssignments":users],
|
||||
"alertsEmailReceivers" : ["userAssignments":[]]
|
||||
]
|
||||
issueHttpRequest(requestBody)
|
||||
|
||||
return metaInfo
|
||||
}
|
||||
|
||||
def issueHttpRequest(requestBody) {
|
||||
def response = internalWhitesource ? internalWhitesource.httpWhitesource(requestBody) : httpWhitesource(requestBody)
|
||||
def parsedResponse = new JsonUtils().jsonStringToGroovyObject(response.content)
|
||||
if(parsedResponse?.errorCode){
|
||||
script.error "[WhiteSource] Request failed with error message '${parsedResponse.errorMessage}' (${parsedResponse.errorCode})."
|
||||
}
|
||||
return parsedResponse
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
protected def httpWhitesource(requestBody) {
|
||||
requestBody["userKey"] = config.whitesource.orgAdminUserKey
|
||||
def serializedBody = new JsonUtils().groovyObjectToPrettyJsonString(requestBody)
|
||||
def params = [
|
||||
url : config.whitesource.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : 'APPLICATION_JSON',
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: serializedBody,
|
||||
quiet : !config.verbose,
|
||||
timeout : config.whitesource.timeout
|
||||
]
|
||||
|
||||
if (script.env.HTTP_PROXY)
|
||||
params["httpProxy"] = script.env.HTTP_PROXY
|
||||
|
||||
if (config.verbose)
|
||||
script.echo "Sending http request with parameters ${params}"
|
||||
|
||||
def response = script.httpRequest(params)
|
||||
|
||||
if (config.verbose)
|
||||
script.echo "Received response ${response}"
|
||||
|
||||
return response
|
||||
}
|
||||
}
|
206
src/com/sap/piper/integration/WhitesourceRepository.groovy
Normal file
206
src/com/sap/piper/integration/WhitesourceRepository.groovy
Normal file
@ -0,0 +1,206 @@
|
||||
package com.sap.piper.integration
|
||||
|
||||
import com.cloudbees.groovy.cps.NonCPS
|
||||
import com.sap.piper.JsonUtils
|
||||
|
||||
class WhitesourceRepository implements Serializable {
|
||||
|
||||
final Script script
|
||||
final Map config
|
||||
|
||||
WhitesourceRepository(Script script, Map config) {
|
||||
this.script = script
|
||||
this.config = config
|
||||
|
||||
if(!config?.whitesource?.serviceUrl)
|
||||
script.error "Parameter 'whitesource.serviceUrl' must be provided as part of the configuration."
|
||||
}
|
||||
|
||||
List fetchVulnerabilities(whitesourceProjectsMetaInformation) {
|
||||
def fetchedVulnerabilities = []
|
||||
if (config.whitesource.projectNames) {
|
||||
for (int i = 0; i < whitesourceProjectsMetaInformation.size(); i++) {
|
||||
fetchSecurityAlertsPerItem(whitesourceProjectsMetaInformation[i].token, "getProjectAlertsByType", fetchedVulnerabilities)
|
||||
}
|
||||
} else {
|
||||
fetchSecurityAlertsPerItem(config.whitesource.productToken, "getProductAlertsByType", fetchedVulnerabilities)
|
||||
}
|
||||
|
||||
sortVulnerabilitiesByScore(fetchedVulnerabilities)
|
||||
|
||||
return fetchedVulnerabilities
|
||||
}
|
||||
|
||||
private fetchSecurityAlertsPerItem(token, type, List<Object> fetchedVulnerabilities) {
|
||||
def requestBody = [
|
||||
requestType : type,
|
||||
alertType : "SECURITY_VULNERABILITY",
|
||||
projectToken: token
|
||||
]
|
||||
|
||||
def response = fetchWhitesourceResource(requestBody)
|
||||
fetchedVulnerabilities.addAll(response.alerts)
|
||||
}
|
||||
|
||||
protected def fetchWhitesourceResource(Map requestBody) {
|
||||
final def response = httpWhitesource(requestBody)
|
||||
def parsedResponse = new JsonUtils().jsonStringToGroovyObject(response.content)
|
||||
|
||||
if(parsedResponse?.errorCode){
|
||||
script.error "[WhiteSource] Request failed with error message '${parsedResponse.errorMessage}' (${parsedResponse.errorCode})."
|
||||
}
|
||||
|
||||
return parsedResponse
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
void sortLibrariesAlphabeticallyGAV(List libraries) {
|
||||
script.echo "found a total of ${libraries.size()} dependencies (direct and indirect)"
|
||||
libraries.sort { o1, o2 ->
|
||||
String groupID1 = o1.groupId
|
||||
String groupID2 = o2.groupId
|
||||
def comparisionResult = groupID1 <=> groupID2;
|
||||
|
||||
if (comparisionResult != 0) {
|
||||
comparisionResult
|
||||
} else {
|
||||
String artifactID1 = o1.artifactId
|
||||
String artifactID2 = o2.artifactId
|
||||
|
||||
artifactID1 <=> artifactID2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
void sortVulnerabilitiesByScore(List vulnerabilities) {
|
||||
script.echo "${vulnerabilities.size() > 0 ? 'WARNING: ' : ''}found a total of ${vulnerabilities.size()} vulnerabilities"
|
||||
vulnerabilities.sort { o1, o2 ->
|
||||
def cvss3score1 = o1.vulnerability.cvss3_score == 0 ? o1.vulnerability.score : o1.vulnerability.cvss3_score
|
||||
def cvss3score2 = o2.vulnerability.cvss3_score == 0 ? o2.vulnerability.score : o2.vulnerability.cvss3_score
|
||||
|
||||
def comparisionResult = cvss3score1 <=> cvss3score2
|
||||
|
||||
if (comparisionResult != 0) {
|
||||
-comparisionResult
|
||||
} else {
|
||||
def score1 = o1.vulnerability.score
|
||||
def score2 = o2.vulnerability.score
|
||||
|
||||
-(score1 <=> score2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List fetchProjectsMetaInfo() {
|
||||
def projectsMetaInfo = []
|
||||
if(config.whitesource.projectNames){
|
||||
def requestBody = [
|
||||
requestType: "getProductProjectVitals",
|
||||
productToken: config.whitesource.productToken
|
||||
]
|
||||
def response = fetchWhitesourceResource(requestBody)
|
||||
|
||||
if(response?.projectVitals) {
|
||||
projectsMetaInfo.addAll(findProjectsMeta(response.projectVitals))
|
||||
} else {
|
||||
script.error "[WhiteSource] Could not fetch any projects for product '${config.whitesource.productName}' from backend, response was ${response}"
|
||||
}
|
||||
}
|
||||
return projectsMetaInfo
|
||||
}
|
||||
|
||||
List findProjectsMeta(projectVitals) {
|
||||
def matchedProjects = []
|
||||
for (int i = 0; i < config.whitesource.projectNames?.size(); i++) {
|
||||
def requestedProjectName = config.whitesource.projectNames[i].trim()
|
||||
def matchedProjectInfo = null
|
||||
|
||||
for (int j = 0; j < projectVitals.size(); j++) {
|
||||
def projectResponse = projectVitals[j]
|
||||
if (projectResponse.name == requestedProjectName) {
|
||||
matchedProjectInfo = projectResponse
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (matchedProjectInfo != null) {
|
||||
matchedProjects.add(matchedProjectInfo)
|
||||
} else {
|
||||
script.error "[WhiteSource] Could not fetch/find requested project '${requestedProjectName}' for product '${config.whitesource.productName}'"
|
||||
}
|
||||
}
|
||||
|
||||
return matchedProjects
|
||||
}
|
||||
|
||||
void fetchReportForProduct(reportName) {
|
||||
def headers = [[name: 'Cache-Control', value: 'no-cache, no-store, must-revalidate'], [name: 'Pragma', value: 'no-cache']]
|
||||
def requestContent = [
|
||||
requestType: "getProductRiskReport",
|
||||
productToken: config.whitesource.productToken
|
||||
]
|
||||
|
||||
//fetchFileFromWhiteSource(reportName, requestContent)
|
||||
httpWhitesource(requestContent, 'APPLICATION_OCTETSTREAM', headers, reportName)
|
||||
}
|
||||
|
||||
def fetchProductLicenseAlerts() {
|
||||
def requestContent = [
|
||||
requestType: "getProductAlertsByType",
|
||||
alertType: "REJECTED_BY_POLICY_RESOURCE",
|
||||
productToken: config.whitesource.productToken
|
||||
]
|
||||
def parsedResponse = fetchWhitesourceResource(requestContent)
|
||||
|
||||
return parsedResponse
|
||||
}
|
||||
|
||||
def fetchProjectLicenseAlerts(String projectToken) {
|
||||
def requestContent = [
|
||||
requestType: "getProjectAlertsByType",
|
||||
alertType: "REJECTED_BY_POLICY_RESOURCE",
|
||||
projectToken: projectToken
|
||||
]
|
||||
def parsedResponse = fetchWhitesourceResource(requestContent)
|
||||
|
||||
return parsedResponse
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
protected def httpWhitesource(requestBody, acceptType = 'APPLICATION_JSON', customHeaders = null, outputFile = null) {
|
||||
handleAdditionalRequestParameters(requestBody)
|
||||
def serializedBody = new JsonUtils().groovyObjectToPrettyJsonString(requestBody)
|
||||
def params = [
|
||||
url : config.whitesource.serviceUrl,
|
||||
httpMode : 'POST',
|
||||
acceptType : acceptType,
|
||||
contentType: 'APPLICATION_JSON',
|
||||
requestBody: serializedBody,
|
||||
quiet : !config.verbose,
|
||||
timeout : config.whitesource.timeout
|
||||
]
|
||||
|
||||
if(customHeaders) params["customHeaders"] = customHeaders
|
||||
|
||||
if (outputFile) params["outputFile"] = outputFile
|
||||
|
||||
if (script.env.HTTP_PROXY) params["httpProxy"] = script.env.HTTP_PROXY
|
||||
|
||||
if(config.verbose)
|
||||
script.echo "Sending http request with parameters ${params}"
|
||||
|
||||
def response = script.httpRequest(params)
|
||||
|
||||
if(config.verbose)
|
||||
script.echo "Received response ${response}"
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
protected void handleAdditionalRequestParameters(params) {
|
||||
if(config.whitesource.userKey)
|
||||
params["userKey"] = config.whitesource.userKey
|
||||
}
|
||||
}
|
@ -1,98 +0,0 @@
|
||||
package com.sap.piper.tools
|
||||
|
||||
import com.sap.piper.VersionUtils
|
||||
import com.sap.piper.EnvironmentUtils
|
||||
import com.sap.piper.FileUtils
|
||||
|
||||
import hudson.AbortException
|
||||
|
||||
|
||||
class JavaArchiveDescriptor implements Serializable {
|
||||
|
||||
final name
|
||||
final environmentKey
|
||||
final stepConfigurationKey
|
||||
final version
|
||||
final versionOption
|
||||
final javaTool
|
||||
final javaOptions
|
||||
|
||||
JavaArchiveDescriptor(name, environmentKey, stepConfigurationKey, version, versionOption, javaTool, javaOptions = '') {
|
||||
this.name = name
|
||||
this.environmentKey = environmentKey
|
||||
this.stepConfigurationKey = stepConfigurationKey
|
||||
this.version = version
|
||||
this.versionOption = versionOption
|
||||
this.javaTool = javaTool
|
||||
this.javaOptions = javaOptions
|
||||
}
|
||||
|
||||
def getFile(script, configuration, log = true) {
|
||||
|
||||
def javaArchiveFile
|
||||
if (EnvironmentUtils.isEnvironmentVariable(script, environmentKey)) {
|
||||
javaArchiveFile = EnvironmentUtils.getEnvironmentVariable(script, environmentKey)
|
||||
if (log) script.echo "$name file '$javaArchiveFile' retrieved from environment."
|
||||
if (!isJavaArchiveFile(javaArchiveFile)) {
|
||||
//script.error "The value '$javaArchiveFile' of the environment variable '$environmentKey' has an unexpected format."
|
||||
javaArchiveFile += '/mta.jar' // Compatibility code
|
||||
}
|
||||
}
|
||||
else if (configuration.containsKey(stepConfigurationKey)) {
|
||||
javaArchiveFile = configuration.get(stepConfigurationKey)
|
||||
if (log) script.echo "$name file '$javaArchiveFile' retrieved from configuration."
|
||||
if (!isJavaArchiveFile(javaArchiveFile)) {
|
||||
//script.error "The value '$javaArchiveFile' of the configuration key '$stepConfigurationKey' has an unexpected format."
|
||||
javaArchiveFile += '/mta.jar' // Compatibility code
|
||||
}
|
||||
} else {
|
||||
throw new AbortException(getMessage())
|
||||
}
|
||||
return javaArchiveFile
|
||||
}
|
||||
|
||||
def isJavaArchiveFile(String javaArchiveFile) {
|
||||
def group = javaArchiveFile =~ /(.+[\/\\])(.+[.]jar)/
|
||||
if (!group.matches() || group[0].size() == 0) group = javaArchiveFile =~ /(.+[.]jar)/
|
||||
if (!group.matches() || group[0].size() == 0) return false
|
||||
return true
|
||||
}
|
||||
|
||||
def getCall(script, configuration, log = true) {
|
||||
|
||||
def javaArchiveFile = getFile(script, configuration, log)
|
||||
if (log) script.echo "Using $name '$javaArchiveFile'."
|
||||
def javaExecutable = javaTool.getToolExecutable(script, configuration, false)
|
||||
def javaCall = "$javaExecutable -jar"
|
||||
if (javaOptions) javaCall += " $javaOptions"
|
||||
return "$javaCall $javaArchiveFile"
|
||||
}
|
||||
|
||||
def verify(script, configuration) {
|
||||
|
||||
verifyFile(script, configuration)
|
||||
verifyVersion(script, configuration)
|
||||
}
|
||||
|
||||
def verifyFile(script, configuration) {
|
||||
|
||||
def javaArchiveFile = getFile(script, configuration, false)
|
||||
script.echo "Verifying $name '$javaArchiveFile'."
|
||||
FileUtils.validateFile(script, javaArchiveFile)
|
||||
script.echo "Verification success. $name '$javaArchiveFile' exists."
|
||||
}
|
||||
|
||||
def verifyVersion(script, configuration) {
|
||||
|
||||
def javaArchiveCall = getCall(script, configuration, false)
|
||||
VersionUtils.verifyVersion(script, name, javaArchiveCall, version, versionOption)
|
||||
}
|
||||
|
||||
def getMessage() {
|
||||
def configOptions = "Please, configure $name. $name can be set "
|
||||
if (environmentKey) configOptions += "using the environment variable '$environmentKey'"
|
||||
if (environmentKey && stepConfigurationKey) configOptions += ", or "
|
||||
if (stepConfigurationKey) configOptions += "using the configuration key '$stepConfigurationKey'."
|
||||
return configOptions
|
||||
}
|
||||
}
|
@ -1,135 +0,0 @@
|
||||
package com.sap.piper.tools
|
||||
|
||||
import com.sap.piper.VersionUtils
|
||||
import com.sap.piper.EnvironmentUtils
|
||||
import com.sap.piper.FileUtils
|
||||
import com.sap.piper.Version
|
||||
|
||||
import hudson.AbortException
|
||||
|
||||
|
||||
class ToolDescriptor implements Serializable {
|
||||
|
||||
final name
|
||||
final environmentKey
|
||||
final stepConfigurationKey
|
||||
final executablePath
|
||||
final executableName
|
||||
final singleVersion
|
||||
final multipleVersions
|
||||
final versionOption
|
||||
|
||||
ToolDescriptor(name, environmentKey, stepConfigurationKey, executablePath, executableName, String singleVersion, versionOption) {
|
||||
this.name = name
|
||||
this.environmentKey = environmentKey
|
||||
this.stepConfigurationKey = stepConfigurationKey
|
||||
this.executablePath = executablePath
|
||||
this.executableName = executableName
|
||||
this.singleVersion = singleVersion
|
||||
this.multipleVersions = [:]
|
||||
this.versionOption = versionOption
|
||||
}
|
||||
|
||||
ToolDescriptor(name, environmentKey, stepConfigurationKey, executablePath, executableName, Map multipleVersions, versionOption) {
|
||||
this.name = name
|
||||
this.environmentKey = environmentKey
|
||||
this.stepConfigurationKey = stepConfigurationKey
|
||||
this.executablePath = executablePath
|
||||
this.executableName = executableName
|
||||
this.singleVersion = ''
|
||||
this.multipleVersions = multipleVersions
|
||||
this.versionOption = versionOption
|
||||
}
|
||||
|
||||
def getToolLocation(script, configuration, log = true) {
|
||||
|
||||
def toolLocation
|
||||
if (EnvironmentUtils.isEnvironmentVariable(script, environmentKey)) {
|
||||
toolLocation = EnvironmentUtils.getEnvironmentVariable(script, environmentKey)
|
||||
if (log) script.echo "$name home '$toolLocation' retrieved from environment."
|
||||
}
|
||||
else if (configuration.containsKey(stepConfigurationKey)) {
|
||||
toolLocation = configuration.get(stepConfigurationKey)
|
||||
if (log) script.echo "$name home '$toolLocation' retrieved from configuration."
|
||||
} else if (isOnPath(script, configuration)){
|
||||
toolLocation = ''
|
||||
if (log) script.echo "$name is on PATH."
|
||||
} else {
|
||||
throw new AbortException(getMessage())
|
||||
}
|
||||
return toolLocation
|
||||
}
|
||||
|
||||
def getTool(script, configuration, log = true) {
|
||||
|
||||
def toolLocation = getToolLocation(script, configuration, log)
|
||||
|
||||
if (toolLocation) {
|
||||
return "$toolLocation$executablePath$executableName"
|
||||
} else {
|
||||
return executableName
|
||||
}
|
||||
}
|
||||
|
||||
def getToolExecutable(script, configuration, log = true) {
|
||||
def executable = getTool(script, configuration, log)
|
||||
if (log) script.echo "Using $name '$executable'."
|
||||
return executable
|
||||
}
|
||||
|
||||
def verify(script, configuration) {
|
||||
|
||||
verifyToolLocation(script, configuration)
|
||||
verifyToolExecutable(script, configuration)
|
||||
verifyVersion(script, configuration)
|
||||
}
|
||||
|
||||
def verifyToolLocation(script, configuration) {
|
||||
|
||||
def toolLocation = getToolLocation(script, configuration)
|
||||
if (toolLocation) {
|
||||
script.echo "Verifying $name location '$toolLocation'."
|
||||
FileUtils.validateDirectoryIsNotEmpty(script, toolLocation)
|
||||
script.echo "Verification success. $name location '$toolLocation' exists."
|
||||
}
|
||||
}
|
||||
|
||||
def verifyToolExecutable(script, configuration) {
|
||||
|
||||
def home = getToolLocation(script, configuration, false)
|
||||
def tool = getTool(script, configuration, false)
|
||||
if (home) {
|
||||
script.echo "Verifying $name '$tool'."
|
||||
FileUtils.validateFile(script, tool)
|
||||
script.echo "Verification success. $name '$tool' exists."
|
||||
}
|
||||
}
|
||||
|
||||
def verifyVersion(script, configuration) {
|
||||
|
||||
def executable = getToolExecutable(script, configuration, false)
|
||||
def versionDesc = VersionUtils.getVersionDesc(script, name, executable, versionOption)
|
||||
if (singleVersion) VersionUtils.verifyVersion(script, name, versionDesc, singleVersion)
|
||||
if (multipleVersions) VersionUtils.verifyVersion(script, name, versionDesc, multipleVersions)
|
||||
}
|
||||
|
||||
def getMessage() {
|
||||
def configOptions = "Please, configure $name home. $name home can be set "
|
||||
if (environmentKey) configOptions += "using the environment variable '$environmentKey', or "
|
||||
if (stepConfigurationKey) configOptions += "using the configuration key '$stepConfigurationKey', or "
|
||||
configOptions += "on PATH."
|
||||
return configOptions
|
||||
}
|
||||
|
||||
def isOnPath(script, configuration) {
|
||||
|
||||
def exitStatus
|
||||
try {
|
||||
exitStatus = script.sh returnStatus: true, script: """set +x
|
||||
which $executableName"""
|
||||
} catch(AbortException e) {
|
||||
throw new AbortException("The verification of $name failed, while checking if it was on PATH. Reason: $e.message.")
|
||||
}
|
||||
return exitStatus == 0
|
||||
}
|
||||
}
|
@ -1,7 +1,6 @@
|
||||
package com.sap.piper.tools.neo
|
||||
|
||||
import com.sap.piper.BashUtils
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.StepAssertions
|
||||
|
||||
class NeoCommandHelper {
|
||||
@ -9,25 +8,23 @@ class NeoCommandHelper {
|
||||
private Script step
|
||||
private DeployMode deployMode
|
||||
private Map deploymentConfiguration
|
||||
private String pathToNeoExecutable
|
||||
private String user
|
||||
private String password
|
||||
private String source
|
||||
|
||||
//Warning: Commands generated with this class can contain passwords and should only be used within the step withCredentials
|
||||
NeoCommandHelper(Script step, DeployMode deployMode, Map deploymentConfiguration, String pathToNeoExecutable,
|
||||
NeoCommandHelper(Script step, DeployMode deployMode, Map deploymentConfiguration,
|
||||
String user, String password, String source) {
|
||||
this.step = step
|
||||
this.deployMode = deployMode
|
||||
this.deploymentConfiguration = deploymentConfiguration
|
||||
this.pathToNeoExecutable = pathToNeoExecutable
|
||||
this.user = user
|
||||
this.password = password
|
||||
this.source = source
|
||||
}
|
||||
|
||||
private String prolog() {
|
||||
return "\"${pathToNeoExecutable}\""
|
||||
return 'neo.sh'
|
||||
}
|
||||
|
||||
String statusCommand() {
|
||||
@ -60,20 +57,11 @@ class NeoCommandHelper {
|
||||
"/acc/${properties.account}/app/${properties.application}/dashboard"
|
||||
}
|
||||
|
||||
ConfigurationHelper configurationHelper = ConfigurationHelper.newInstance(step, deploymentConfiguration)
|
||||
|
||||
configurationHelper
|
||||
.withMandatoryProperty('host')
|
||||
.withMandatoryProperty('account')
|
||||
|
||||
if (deployMode == DeployMode.MTA) {
|
||||
return "https://account.${deploymentConfiguration.host}/cockpit#" +
|
||||
"/acc/${deploymentConfiguration.account}/mtas"
|
||||
}
|
||||
|
||||
configurationHelper
|
||||
.withMandatoryProperty('application')
|
||||
|
||||
return "https://account.${deploymentConfiguration.host}/cockpit#" +
|
||||
"/acc/${deploymentConfiguration.account}/app/${deploymentConfiguration.application}/dashboard"
|
||||
}
|
||||
@ -84,17 +72,9 @@ class NeoCommandHelper {
|
||||
return "${properties.host}/${properties.account}/${properties.application}"
|
||||
}
|
||||
|
||||
ConfigurationHelper configurationHelper = ConfigurationHelper.newInstance(step, deploymentConfiguration)
|
||||
configurationHelper
|
||||
.withMandatoryProperty('host')
|
||||
.withMandatoryProperty('account')
|
||||
|
||||
|
||||
String resource = "${deploymentConfiguration.host}/${deploymentConfiguration.account}"
|
||||
|
||||
if (deployMode == DeployMode.WAR_PARAMS) {
|
||||
configurationHelper
|
||||
.withMandatoryProperty('application')
|
||||
|
||||
resource += "/${deploymentConfiguration.application}"
|
||||
}
|
||||
@ -115,17 +95,10 @@ class NeoCommandHelper {
|
||||
return "${deploymentConfiguration.propertiesFile} ${usernamePassword}"
|
||||
}
|
||||
|
||||
ConfigurationHelper configurationHelper = ConfigurationHelper.newInstance(step, deploymentConfiguration)
|
||||
configurationHelper
|
||||
.withMandatoryProperty('host')
|
||||
.withMandatoryProperty('account')
|
||||
|
||||
String targetArgs = "--host ${BashUtils.quoteAndEscape(deploymentConfiguration.host)}"
|
||||
targetArgs += " --account ${BashUtils.quoteAndEscape(deploymentConfiguration.account)}"
|
||||
|
||||
if (deployMode == DeployMode.WAR_PARAMS) {
|
||||
configurationHelper
|
||||
.withMandatoryProperty('application')
|
||||
|
||||
targetArgs += " --application ${BashUtils.quoteAndEscape(deploymentConfiguration.application)}"
|
||||
}
|
||||
@ -138,13 +111,8 @@ class NeoCommandHelper {
|
||||
return ""
|
||||
}
|
||||
|
||||
ConfigurationHelper configurationHelper = ConfigurationHelper.newInstance(step, deploymentConfiguration)
|
||||
|
||||
String args = ""
|
||||
configurationHelper.withMandatoryProperty('runtime')
|
||||
args += " --runtime ${BashUtils.quoteAndEscape(deploymentConfiguration.runtime)}"
|
||||
|
||||
configurationHelper.withMandatoryProperty('runtimeVersion')
|
||||
args += " --runtime-version ${BashUtils.quoteAndEscape(deploymentConfiguration.runtimeVersion)}"
|
||||
|
||||
if (deploymentConfiguration.size) {
|
||||
|
@ -78,7 +78,7 @@ class ArtifactSetVersionTest extends BasePiperTest {
|
||||
return closure()
|
||||
})
|
||||
|
||||
shellRule.setReturnValue("date --universal +'%Y%m%d%H%M%S'", '20180101010203')
|
||||
shellRule.setReturnValue("date --utc +'%Y%m%d%H%M%S'", '20180101010203')
|
||||
shellRule.setReturnValue('git diff --quiet HEAD', 0)
|
||||
|
||||
helper.registerAllowedMethod('fileExists', [String.class], {true})
|
||||
|
@ -4,7 +4,7 @@ import org.junit.Test
|
||||
import org.junit.rules.ExpectedException
|
||||
import org.junit.rules.RuleChain
|
||||
|
||||
import com.sap.piper.GitUtils
|
||||
import com.sap.piper.cm.BackendType
|
||||
import com.sap.piper.cm.ChangeManagement
|
||||
import com.sap.piper.cm.ChangeManagementException
|
||||
|
||||
@ -52,6 +52,12 @@ class CheckChangeInDevelopmentTest extends BasePiperTest {
|
||||
failIfStatusIsNotInDevelopment: true)
|
||||
|
||||
assert cmUtilReceivedParams == [
|
||||
docker: [
|
||||
image: 'ppiper/cm-client',
|
||||
options:[],
|
||||
envVars:[:],
|
||||
pullImage:true,
|
||||
],
|
||||
changeId: '001',
|
||||
endpoint: 'https://example.org/cm',
|
||||
credentialsId: 'CM',
|
||||
@ -155,6 +161,23 @@ class CheckChangeInDevelopmentTest extends BasePiperTest {
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void stageConfigIsNotConsideredWithParamKeysTest() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [stages:[foo:[changeDocumentId:'12345']]]
|
||||
ChangeManagement cm = getChangeManagementUtils(true, '')
|
||||
|
||||
thrown.expect(IllegalArgumentException)
|
||||
thrown.expectMessage('No changeDocumentId provided.')
|
||||
|
||||
stepRule.step.checkChangeInDevelopment(
|
||||
script: nullScript,
|
||||
cmUtils: cm,
|
||||
changeManagement: [type: BackendType.SOLMAN,
|
||||
endpoint: 'https://example.org/cm'],
|
||||
stageName: 'foo')
|
||||
}
|
||||
|
||||
private ChangeManagement getChangeManagementUtils(boolean inDevelopment, String changeDocumentId = '001') {
|
||||
|
||||
return new ChangeManagement(nullScript, null) {
|
||||
@ -167,7 +190,8 @@ class CheckChangeInDevelopmentTest extends BasePiperTest {
|
||||
return changeDocumentId
|
||||
}
|
||||
|
||||
boolean isChangeInDevelopment(String changeId, String endpoint, String credentialsId, String cmclientOpts) {
|
||||
boolean isChangeInDevelopment(Map docker, String changeId, String endpoint, String credentialsId, String cmclientOpts) {
|
||||
cmUtilReceivedParams.docker = docker
|
||||
cmUtilReceivedParams.changeId = changeId
|
||||
cmUtilReceivedParams.endpoint = endpoint
|
||||
cmUtilReceivedParams.credentialsId = credentialsId
|
||||
|
@ -96,7 +96,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
stageName: 'acceptance',
|
||||
])
|
||||
// asserts
|
||||
assertThat(loggingRule.log, containsString('[cloudFoundryDeploy] General parameters: deployTool=, deployType=standard, cfApiEndpoint=https://api.cf.eu10.hana.ondemand.com, cfOrg=testOrg, cfSpace=testSpace, cfCredentialsId=myCreds, deployUser=testUser'))
|
||||
assertThat(loggingRule.log, containsString('[cloudFoundryDeploy] General parameters: deployTool=, deployType=standard, cfApiEndpoint=https://api.cf.eu10.hana.ondemand.com, cfOrg=testOrg, cfSpace=testSpace, cfCredentialsId=myCreds'))
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -125,7 +125,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
stageName: 'acceptance'
|
||||
])
|
||||
// asserts
|
||||
assertThat(loggingRule.log, containsString('[cloudFoundryDeploy] General parameters: deployTool=notAvailable, deployType=standard, cfApiEndpoint=https://api.cf.eu10.hana.ondemand.com, cfOrg=testOrg, cfSpace=testSpace, cfCredentialsId=myCreds, deployUser=testUser'))
|
||||
assertThat(loggingRule.log, containsString('[cloudFoundryDeploy] General parameters: deployTool=notAvailable, deployType=standard, cfApiEndpoint=https://api.cf.eu10.hana.ondemand.com, cfOrg=testOrg, cfSpace=testSpace, cfCredentialsId=myCreds'))
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -20,6 +20,7 @@ import org.junit.rules.RuleChain
|
||||
import groovy.io.FileType
|
||||
import hudson.AbortException
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsReadYamlRule
|
||||
import util.JenkinsStepRule
|
||||
import util.Rules
|
||||
|
||||
@ -30,6 +31,7 @@ public class CommonStepsTest extends BasePiperTest{
|
||||
|
||||
@Rule
|
||||
public RuleChain ruleChain = Rules.getCommonRules(this)
|
||||
.around(new JenkinsReadYamlRule(this))
|
||||
|
||||
/*
|
||||
* With that test we ensure the very first action inside a method body of a call method
|
||||
@ -49,8 +51,7 @@ public class CommonStepsTest extends BasePiperTest{
|
||||
'pipelineExecute',
|
||||
'piperPipeline',
|
||||
'prepareDefaultValues',
|
||||
'setupCommonPipelineEnvironment',
|
||||
'toolValidate',
|
||||
'setupCommonPipelineEnvironment'
|
||||
]
|
||||
|
||||
List steps = getSteps().stream()
|
||||
@ -101,7 +102,6 @@ public class CommonStepsTest extends BasePiperTest{
|
||||
}
|
||||
|
||||
private static fieldRelatedWhitelist = [
|
||||
'toolValidate', // step is intended to be configured by other steps
|
||||
'durationMeasure', // only expects parameters via signature
|
||||
'prepareDefaultValues', // special step (infrastructure)
|
||||
'piperPipeline', // special step (infrastructure)
|
||||
|
@ -7,6 +7,7 @@ import org.junit.rules.ExpectedException
|
||||
import org.junit.rules.RuleChain
|
||||
|
||||
|
||||
import groovy.json.JsonSlurper
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsDockerExecuteRule
|
||||
import util.JenkinsLoggingRule
|
||||
@ -54,7 +55,8 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
|
||||
def portList = []
|
||||
def containerCommands = []
|
||||
def pullImageMap = [:]
|
||||
|
||||
def namespace
|
||||
def securityContext
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
@ -71,21 +73,25 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
|
||||
helper.registerAllowedMethod('podTemplate', [Map.class, Closure.class], { Map options, Closure body ->
|
||||
podName = options.name
|
||||
podLabel = options.label
|
||||
options.containers.each { option ->
|
||||
containersList.add(option.name)
|
||||
imageList.add(option.image.toString())
|
||||
envList.add(option.envVars)
|
||||
portList.add(option.ports)
|
||||
if (option.command) {
|
||||
containerCommands.add(option.command)
|
||||
namespace = options.namespace
|
||||
def podSpec = new JsonSlurper().parseText(options.yaml) // this yaml is actually json
|
||||
def containers = podSpec.spec.containers
|
||||
securityContext = podSpec.spec.securityContext
|
||||
|
||||
containers.each { container ->
|
||||
containersList.add(container.name)
|
||||
imageList.add(container.image.toString())
|
||||
envList.add(container.env)
|
||||
if(container.ports) {
|
||||
portList.add(container.ports)
|
||||
}
|
||||
pullImageMap.put(option.image.toString(), option.alwaysPullImage)
|
||||
if (container.command) {
|
||||
containerCommands.add(container.command)
|
||||
}
|
||||
pullImageMap.put(container.image.toString(), container.imagePullPolicy == "Always")
|
||||
}
|
||||
body()
|
||||
})
|
||||
helper.registerAllowedMethod('node', [String.class, Closure.class], { String nodeName, Closure body -> body() })
|
||||
helper.registerAllowedMethod('envVar', [Map.class], { Map option -> return option })
|
||||
helper.registerAllowedMethod('containerTemplate', [Map.class], { Map option -> return option })
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -234,7 +240,7 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
|
||||
],
|
||||
containerName: 'mavenexecute',
|
||||
containerPortMappings: [
|
||||
'selenium/standalone-chrome': [[containerPort: 4444, hostPort: 4444]]
|
||||
'selenium/standalone-chrome': [[containerPort: 4444]]
|
||||
],
|
||||
containerWorkspaces: [
|
||||
'selenium/standalone-chrome': ''
|
||||
@ -257,10 +263,9 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
|
||||
hasItem('maven:3.5-jdk-8-alpine'),
|
||||
hasItem('selenium/standalone-chrome'),
|
||||
))
|
||||
assertThat(portList, hasItem(hasItem([name: 'selenium0', containerPort: 4444, hostPort: 4444])))
|
||||
assertThat(portMapping, hasItem([name: 'selenium0', containerPort: 4444, hostPort: 4444]))
|
||||
assertThat(portList, hasItem([[name: 'selenium0', containerPort: 4444]]))
|
||||
assertThat(containerCommands.size(), is(1))
|
||||
assertThat(envList, hasItem(hasItem(allOf(hasEntry('key', 'customEnvKey'), hasEntry ('value','customEnvValue')))))
|
||||
assertThat(envList, hasItem(hasItem(allOf(hasEntry('name', 'customEnvKey'), hasEntry ('value','customEnvValue')))))
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -286,7 +291,7 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
|
||||
) {
|
||||
//nothing to exeute
|
||||
}
|
||||
assertThat(containerCommands, hasItem('/busybox/tail -f /dev/null'))
|
||||
assertThat(containerCommands, hasItem(['/bin/sh', '-c', '/busybox/tail -f /dev/null']))
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -334,6 +339,36 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
|
||||
assertTrue(bodyExecuted)
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDockerExecuteOnKubernetesWithCustomNamespace() {
|
||||
def expectedNamespace = "sandbox"
|
||||
nullScript.commonPipelineEnvironment.configuration = [general: [jenkinsKubernetes: [namespace: expectedNamespace]]]
|
||||
|
||||
stepRule.step.dockerExecuteOnKubernetes(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
dockerImage: 'maven:3.5-jdk-8-alpine',
|
||||
) { bodyExecuted = true }
|
||||
assertTrue(bodyExecuted)
|
||||
assertThat(namespace, is(equalTo(expectedNamespace)))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDockerExecuteOnKubernetesWithSecurityContext() {
|
||||
def expectedSecurityContext = [ runAsUser: 1000, fsGroup: 1000 ]
|
||||
nullScript.commonPipelineEnvironment.configuration = [general: [jenkinsKubernetes: [
|
||||
securityContext: expectedSecurityContext]]]
|
||||
|
||||
stepRule.step.dockerExecuteOnKubernetes(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
dockerImage: 'maven:3.5-jdk-8-alpine',
|
||||
) { bodyExecuted = true }
|
||||
assertTrue(bodyExecuted)
|
||||
assertThat(securityContext, is(equalTo(expectedSecurityContext)))
|
||||
}
|
||||
|
||||
|
||||
private container(options, body) {
|
||||
containerName = options.name
|
||||
containerShell = options.shell
|
||||
|
@ -9,6 +9,7 @@ import org.junit.rules.RuleChain
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsLoggingRule
|
||||
import util.JenkinsReadYamlRule
|
||||
import util.JenkinsShellCallRule
|
||||
import util.JenkinsStepRule
|
||||
import util.PluginMock
|
||||
import util.Rules
|
||||
@ -23,6 +24,7 @@ class DockerExecuteTest extends BasePiperTest {
|
||||
private DockerMock docker
|
||||
private JenkinsLoggingRule loggingRule = new JenkinsLoggingRule(this)
|
||||
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
|
||||
private JenkinsShellCallRule shellRule = new JenkinsShellCallRule(this)
|
||||
|
||||
@Rule
|
||||
public RuleChain ruleChain = Rules
|
||||
@ -30,8 +32,8 @@ class DockerExecuteTest extends BasePiperTest {
|
||||
.around(new JenkinsReadYamlRule(this))
|
||||
.around(loggingRule)
|
||||
.around(stepRule)
|
||||
.around(shellRule)
|
||||
|
||||
int dockerPsReturnValue = 0
|
||||
def bodyExecuted
|
||||
def containerName
|
||||
|
||||
@ -41,7 +43,7 @@ class DockerExecuteTest extends BasePiperTest {
|
||||
docker = new DockerMock()
|
||||
JenkinsUtils.metaClass.static.isPluginActive = {def s -> new PluginMock(s).isActive()}
|
||||
binding.setVariable('docker', docker)
|
||||
helper.registerAllowedMethod('sh', [Map.class], {return dockerPsReturnValue})
|
||||
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, "docker .*", 0)
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -201,7 +203,7 @@ class DockerExecuteTest extends BasePiperTest {
|
||||
|
||||
@Test
|
||||
void testDockerNotInstalledResultsInLocalExecution() throws Exception {
|
||||
dockerPsReturnValue = 1
|
||||
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, "docker .*", 1)
|
||||
stepRule.step.dockerExecute(script: nullScript,
|
||||
dockerOptions: '-it') {
|
||||
bodyExecuted = true
|
||||
@ -241,6 +243,18 @@ class DockerExecuteTest extends BasePiperTest {
|
||||
))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSidecarHealthCheck(){
|
||||
stepRule.step.dockerExecute(
|
||||
script: nullScript,
|
||||
dockerImage: 'maven:3.5-jdk-8-alpine',
|
||||
sidecarImage: 'selenium/standalone-chrome',
|
||||
sidecarName: 'testAlias',
|
||||
sidecarReadyCommand: "isReady.sh"
|
||||
) {}
|
||||
assertThat(shellRule.shell, hasItem("docker exec uniqueId isReady.sh"))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSidecarKubernetes(){
|
||||
boolean dockerExecuteOnKubernetesCalled = false
|
||||
@ -275,6 +289,33 @@ class DockerExecuteTest extends BasePiperTest {
|
||||
assertThat(dockerExecuteOnKubernetesCalled, is(true))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSidecarKubernetesHealthCheck(){
|
||||
binding.setVariable('env', [ON_K8S: 'true'])
|
||||
|
||||
helper.registerAllowedMethod('dockerExecuteOnKubernetes', [Map.class, Closure.class], { params, body ->
|
||||
body()
|
||||
})
|
||||
|
||||
def containerCalled = false
|
||||
helper.registerAllowedMethod('container', [Map.class, Closure.class], { params, body ->
|
||||
containerCalled = true
|
||||
assertThat(params.name, is('testAlias'))
|
||||
body()
|
||||
})
|
||||
|
||||
stepRule.step.dockerExecute(
|
||||
script: nullScript,
|
||||
dockerImage: 'maven:3.5-jdk-8-alpine',
|
||||
sidecarImage: 'selenium/standalone-chrome',
|
||||
sidecarName: 'testAlias',
|
||||
sidecarReadyCommand: "isReady.sh"
|
||||
) {}
|
||||
|
||||
assertThat(containerCalled, is(true))
|
||||
assertThat(shellRule.shell, hasItem("isReady.sh"))
|
||||
}
|
||||
|
||||
private class DockerMock {
|
||||
private String imageName
|
||||
private boolean imagePulled = false
|
||||
|
@ -1,10 +1,16 @@
|
||||
#!groovy
|
||||
|
||||
import com.sap.piper.analytics.InfluxData
|
||||
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import util.BasePiperTest
|
||||
|
||||
import static org.junit.Assert.assertTrue
|
||||
import static org.hamcrest.Matchers.hasKey
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.hamcrest.Matchers.not
|
||||
|
||||
import static org.junit.Assert.assertThat
|
||||
import org.junit.rules.RuleChain
|
||||
|
||||
import util.Rules
|
||||
@ -27,8 +33,12 @@ class DurationMeasureTest extends BasePiperTest {
|
||||
stepRule.step.durationMeasure(script: nullScript, measurementName: 'test') {
|
||||
bodyExecuted = true
|
||||
}
|
||||
assertTrue(nullScript.commonPipelineEnvironment.getPipelineMeasurement('test') != null)
|
||||
assertTrue(bodyExecuted)
|
||||
// doesnt work
|
||||
//assertThat(InfluxData.getInstance().getFields(), hasEntry('pipeline_data', hasEntry('test', is(anything()))))
|
||||
assertThat(InfluxData.getInstance().getFields(), hasKey('pipeline_data'))
|
||||
assertThat(InfluxData.getInstance().getFields().pipeline_data, hasKey('test'))
|
||||
assertThat(InfluxData.getInstance().getFields().pipeline_data.test, is(not(null)))
|
||||
assertThat(bodyExecuted, is(true))
|
||||
assertJobStatusSuccess()
|
||||
}
|
||||
}
|
||||
|
@ -72,14 +72,6 @@ class FioriOnCloudPlatformPipelineTest extends BasePiperTest {
|
||||
// needed since we have dockerExecute inside mtaBuild
|
||||
JenkinsUtils.metaClass.static.isPluginActive = {def s -> false}
|
||||
|
||||
//
|
||||
// Things we validate:
|
||||
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*echo \\$JAVA_HOME.*', '/opt/sap/java')
|
||||
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*echo \\$MTA_JAR_LOCATION.*', '/opt/sap')
|
||||
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*echo \\$NEO_HOME.*', '/opt/sap/neo')
|
||||
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, ".*bin/java -version.*", '1.8.0') // the java version
|
||||
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, ".*bin/java -jar .*mta.jar", '1.36.0') // the mta version
|
||||
|
||||
//
|
||||
// there is a check for the mta.yaml file and for the deployable test.mtar file
|
||||
helper.registerAllowedMethod('fileExists', [String],{
|
||||
@ -93,6 +85,8 @@ class FioriOnCloudPlatformPipelineTest extends BasePiperTest {
|
||||
it == 'test.mtar'
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod("deleteDir",[], null)
|
||||
|
||||
//
|
||||
// the properties below we read out of the yaml file
|
||||
readYamlRule.registerYaml('mta.yaml', ('''
|
||||
@ -105,6 +99,8 @@ class FioriOnCloudPlatformPipelineTest extends BasePiperTest {
|
||||
// to be able to extend the path we have to have some initial value.
|
||||
binding.setVariable('PATH', '/usr/bin')
|
||||
|
||||
binding.setVariable('scm', null)
|
||||
|
||||
helper.registerAllowedMethod('pwd', [], { return "./" })
|
||||
}
|
||||
|
||||
@ -115,9 +111,11 @@ class FioriOnCloudPlatformPipelineTest extends BasePiperTest {
|
||||
.commonPipelineEnvironment
|
||||
.configuration = [steps:
|
||||
[neoDeploy:
|
||||
[ host: 'hana.example.com',
|
||||
account: 'myTestAccount',
|
||||
]
|
||||
[neo:
|
||||
[ host: 'hana.example.com',
|
||||
account: 'myTestAccount',
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
@ -126,7 +124,7 @@ class FioriOnCloudPlatformPipelineTest extends BasePiperTest {
|
||||
//
|
||||
// the mta build call:
|
||||
assertThat(shellRule.shell, hasItem(
|
||||
allOf( containsString('java -jar /opt/sap/mta.jar'),
|
||||
allOf( containsString('java -jar /opt/sap/mta/lib/mta.jar'), // default mtaJarLocation
|
||||
containsString('--mtar test.mtar'),
|
||||
containsString('--build-target=NEO'),
|
||||
containsString('build'))))
|
||||
@ -139,7 +137,7 @@ class FioriOnCloudPlatformPipelineTest extends BasePiperTest {
|
||||
// the neo deploy call:
|
||||
Assert.assertThat(shellRule.shell,
|
||||
new CommandLineMatcher()
|
||||
.hasProlog("\"/opt/sap/neo/tools/neo.sh\" deploy-mta")
|
||||
.hasProlog("neo.sh deploy-mta")
|
||||
.hasSingleQuotedOption('host', 'hana\\.example\\.com')
|
||||
.hasSingleQuotedOption('account', 'myTestAccount')
|
||||
.hasSingleQuotedOption('password', 'terceSpot')
|
||||
|
@ -1,4 +1,6 @@
|
||||
#!groovy
|
||||
import hudson.AbortException
|
||||
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.hamcrest.Matchers.not
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
@ -11,6 +13,7 @@ import static org.junit.Assert.assertThat
|
||||
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsLoggingRule
|
||||
import util.JenkinsReadYamlRule
|
||||
import util.JenkinsStepRule
|
||||
import util.Rules
|
||||
|
||||
@ -22,6 +25,7 @@ class HandlePipelineStepErrorsTest extends BasePiperTest {
|
||||
@Rule
|
||||
public RuleChain rules = Rules
|
||||
.getCommonRules(this)
|
||||
.around(new JenkinsReadYamlRule(this))
|
||||
.around(loggingRule)
|
||||
.around(stepRule)
|
||||
.around(thrown)
|
||||
@ -79,4 +83,82 @@ class HandlePipelineStepErrorsTest extends BasePiperTest {
|
||||
assertThat(loggingRule.log, containsString('[something:anything]'))
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testHandleErrorsIgnoreFailure() {
|
||||
def errorOccured = false
|
||||
try {
|
||||
stepRule.step.handlePipelineStepErrors([
|
||||
stepName: 'test',
|
||||
stepParameters: [jenkinsUtilsStub: jenkinsUtils, script: nullScript],
|
||||
failOnError: false
|
||||
]) {
|
||||
throw new AbortException('TestError')
|
||||
}
|
||||
} catch (err) {
|
||||
errorOccured = true
|
||||
}
|
||||
assertThat(errorOccured, is(false))
|
||||
assertThat(nullScript.currentBuild.result, is('UNSTABLE'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testHandleErrorsIgnoreFailureBlacklist() {
|
||||
def errorOccured = false
|
||||
|
||||
//define blacklist in defaults
|
||||
helper.registerAllowedMethod("readYaml", [Map], { Map m ->
|
||||
return [steps: [handlePipelineStepErrors: [mandatorySteps: ['step1', 'test']]]]
|
||||
})
|
||||
|
||||
try {
|
||||
stepRule.step.handlePipelineStepErrors([
|
||||
stepName: 'test',
|
||||
stepParameters: [jenkinsUtilsStub: jenkinsUtils, script: nullScript],
|
||||
failOnError: false
|
||||
]) {
|
||||
throw new AbortException('TestError')
|
||||
}
|
||||
} catch (err) {
|
||||
errorOccured = true
|
||||
}
|
||||
assertThat(errorOccured, is(true))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testHandleErrorsIgnoreFailureNoScript() {
|
||||
def errorOccured = false
|
||||
try {
|
||||
stepRule.step.handlePipelineStepErrors([
|
||||
stepName: 'test',
|
||||
stepParameters: [jenkinsUtilsStub: jenkinsUtils],
|
||||
failOnError: false
|
||||
]) {
|
||||
throw new AbortException('TestError')
|
||||
}
|
||||
} catch (err) {
|
||||
errorOccured = true
|
||||
}
|
||||
assertThat(errorOccured, is(false))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testHandleErrorsTimeout() {
|
||||
def timeout = 0
|
||||
helper.registerAllowedMethod('timeout', [Map.class, Closure.class], {m, body ->
|
||||
timeout = m.time
|
||||
throw new org.jenkinsci.plugins.workflow.steps.FlowInterruptedException(hudson.model.Result.ABORTED, new jenkins.model.CauseOfInterruption.UserInterruption('Test'))
|
||||
})
|
||||
|
||||
stepRule.step.handlePipelineStepErrors([
|
||||
stepName: 'test',
|
||||
stepParameters: [jenkinsUtilsStub: jenkinsUtils, script: nullScript],
|
||||
failOnError: false,
|
||||
stepTimeouts: [test: 10]
|
||||
]) {
|
||||
//do something
|
||||
}
|
||||
assertThat(timeout, is(10))
|
||||
assertThat(nullScript.currentBuild.result, is('UNSTABLE'))
|
||||
}
|
||||
}
|
@ -1,5 +1,7 @@
|
||||
#!groovy
|
||||
import com.sap.piper.DefaultValueCache
|
||||
import com.sap.piper.analytics.InfluxData
|
||||
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
@ -147,9 +149,9 @@ class InfluxWriteDataTest extends BasePiperTest {
|
||||
void testInfluxCustomDataFromCPE() {
|
||||
nullScript.commonPipelineEnvironment.reset()
|
||||
nullScript.commonPipelineEnvironment.setArtifactVersion('1.2.3')
|
||||
nullScript.commonPipelineEnvironment.setInfluxCustomDataTagsEntry('tag1', 'testTag1')
|
||||
nullScript.commonPipelineEnvironment.setInfluxCustomDataMapEntry('test_data', 'key1', 'keyValue1')
|
||||
nullScript.commonPipelineEnvironment.setInfluxCustomDataMapTagsEntry('test_data', 'tag1', 'tagValue1')
|
||||
InfluxData.addTag('jenkins_custom_data', 'tag1', 'testTag1')
|
||||
InfluxData.addField('test_data', 'key1', 'keyValue1')
|
||||
InfluxData.addTag('test_data', 'tag1', 'tagValue1')
|
||||
stepRule.step.influxWriteData(
|
||||
//juStabUtils: utils,
|
||||
script: nullScript,
|
||||
|
@ -17,9 +17,6 @@ import util.Rules
|
||||
|
||||
public class MtaBuildTest extends BasePiperTest {
|
||||
|
||||
def toolMtaValidateCalled = false
|
||||
def toolJavaValidateCalled = false
|
||||
|
||||
private ExpectedException thrown = new ExpectedException()
|
||||
private JenkinsLoggingRule loggingRule = new JenkinsLoggingRule(this)
|
||||
private JenkinsShellCallRule shellRule = new JenkinsShellCallRule(this)
|
||||
@ -89,9 +86,6 @@ public class MtaBuildTest extends BasePiperTest {
|
||||
stepRule.step.mtaBuild(script: nullScript, mtaJarLocation: '/mylocation/mta/mta.jar', buildTarget: 'NEO')
|
||||
|
||||
assert shellRule.shell.find { c -> c.contains('-jar /mylocation/mta/mta.jar --mtar')}
|
||||
|
||||
assert loggingRule.log.contains("SAP Multitarget Application Archive Builder file '/mylocation/mta/mta.jar' retrieved from configuration.")
|
||||
assert loggingRule.log.contains("Using SAP Multitarget Application Archive Builder '/mylocation/mta/mta.jar'.")
|
||||
}
|
||||
|
||||
|
||||
@ -130,19 +124,6 @@ public class MtaBuildTest extends BasePiperTest {
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void mtaJarLocationFromEnvironmentTest() {
|
||||
|
||||
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*\\$MTA_JAR_LOCATION.*', '/env/mta/mta.jar')
|
||||
|
||||
stepRule.step.mtaBuild(script: nullScript, buildTarget: 'NEO')
|
||||
|
||||
assert shellRule.shell.find { c -> c.contains("-jar /env/mta/mta.jar --mtar")}
|
||||
assert loggingRule.log.contains("SAP Multitarget Application Archive Builder file '/env/mta/mta.jar' retrieved from environment.")
|
||||
assert loggingRule.log.contains("Using SAP Multitarget Application Archive Builder '/env/mta/mta.jar'.")
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void mtaJarLocationFromCustomStepConfigurationTest() {
|
||||
|
||||
@ -151,9 +132,7 @@ public class MtaBuildTest extends BasePiperTest {
|
||||
stepRule.step.mtaBuild(script: nullScript,
|
||||
buildTarget: 'NEO')
|
||||
|
||||
assert shellRule.shell.find(){ c -> c.contains("-jar /config/mta/mta.jar --mtar")}
|
||||
assert loggingRule.log.contains("SAP Multitarget Application Archive Builder file '/config/mta/mta.jar' retrieved from configuration.")
|
||||
assert loggingRule.log.contains("Using SAP Multitarget Application Archive Builder '/config/mta/mta.jar'.")
|
||||
assert shellRule.shell.find(){ c -> c.contains('java -jar /config/mta/mta.jar --mtar')}
|
||||
}
|
||||
|
||||
|
||||
@ -163,9 +142,7 @@ public class MtaBuildTest extends BasePiperTest {
|
||||
stepRule.step.mtaBuild(script: nullScript,
|
||||
buildTarget: 'NEO')
|
||||
|
||||
assert shellRule.shell.find(){ c -> c.contains("-jar /opt/sap/mta/lib/mta.jar --mtar")}
|
||||
assert loggingRule.log.contains("SAP Multitarget Application Archive Builder file '/opt/sap/mta/lib/mta.jar' retrieved from configuration.")
|
||||
assert loggingRule.log.contains("Using SAP Multitarget Application Archive Builder '/opt/sap/mta/lib/mta.jar'.")
|
||||
assert shellRule.shell.find(){ c -> c.contains('java -jar /opt/sap/mta/lib/mta.jar --mtar')}
|
||||
}
|
||||
|
||||
|
||||
|
256
test/groovy/MulticloudDeployTest.groovy
Normal file
256
test/groovy/MulticloudDeployTest.groovy
Normal file
@ -0,0 +1,256 @@
|
||||
import com.sap.piper.JenkinsUtils
|
||||
import com.sap.piper.Utils
|
||||
|
||||
import hudson.AbortException
|
||||
|
||||
import org.junit.Assert
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.ExpectedException
|
||||
import org.junit.rules.RuleChain
|
||||
|
||||
import util.*
|
||||
|
||||
|
||||
class MulticloudDeployTest extends BasePiperTest {
|
||||
|
||||
private ExpectedException thrown = new ExpectedException().none()
|
||||
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
|
||||
private JenkinsMockStepRule neoDeployRule = new JenkinsMockStepRule(this, 'neoDeploy')
|
||||
private JenkinsMockStepRule cloudFoundryDeployRule = new JenkinsMockStepRule(this, 'cloudFoundryDeploy')
|
||||
private JenkinsReadMavenPomRule readMavenPomRule = new JenkinsReadMavenPomRule(this, 'test/resources/deploy')
|
||||
|
||||
private Map neo1 = [:]
|
||||
private Map neo2 = [:]
|
||||
private Map cloudFoundry1 = [:]
|
||||
private Map cloudFoundry2 = [:]
|
||||
|
||||
@Rule
|
||||
public RuleChain ruleChain = Rules
|
||||
.getCommonRules(this)
|
||||
.around(new JenkinsReadYamlRule(this))
|
||||
.around(thrown)
|
||||
.around(stepRule)
|
||||
.around(neoDeployRule)
|
||||
.around(cloudFoundryDeployRule)
|
||||
.around(readMavenPomRule)
|
||||
|
||||
private Map neoDeployParameters = [:]
|
||||
private Map cloudFoundryDeployParameters = [:]
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
|
||||
neo1 = [
|
||||
host: 'test.deploy.host1.com',
|
||||
account: 'trialuser1',
|
||||
credentialsId: 'credentialsId1'
|
||||
]
|
||||
|
||||
neo2 = [
|
||||
host: 'test.deploy.host2.com',
|
||||
account: 'trialuser2',
|
||||
credentialsId: 'credentialsId2'
|
||||
]
|
||||
|
||||
cloudFoundry1 = [
|
||||
appName:'testAppName1',
|
||||
manifest: 'test.yml',
|
||||
org: 'testOrg1',
|
||||
space: 'testSpace1',
|
||||
credentialsId: 'cfCredentialsId1'
|
||||
]
|
||||
|
||||
cloudFoundry2 = [
|
||||
appName:'testAppName2',
|
||||
manifest: 'test.yml',
|
||||
org: 'testOrg2',
|
||||
space: 'testSpace2',
|
||||
credentialsId: 'cfCredentialsId2'
|
||||
]
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [
|
||||
general: [
|
||||
neoTargets: [
|
||||
neo1, neo2
|
||||
],
|
||||
cfTargets: [
|
||||
cloudFoundry1, cloudFoundry2
|
||||
]
|
||||
],
|
||||
stages: [
|
||||
acceptance: [
|
||||
org: 'testOrg',
|
||||
space: 'testSpace',
|
||||
deployUser: 'testUser'
|
||||
]
|
||||
],
|
||||
steps: [
|
||||
cloudFoundryDeploy: [
|
||||
deployTool: 'cf_native',
|
||||
deployType: 'blue-green',
|
||||
keepOldInstance: true,
|
||||
cf_native: [
|
||||
dockerImage: 's4sdk/docker-cf-cli',
|
||||
dockerWorkspace: '/home/piper'
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
}
|
||||
|
||||
@Test
|
||||
void errorNoTargetsDefined() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = []
|
||||
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
|
||||
|
||||
thrown.expect(Exception)
|
||||
thrown.expectMessage('Deployment skipped because no targets defined!')
|
||||
|
||||
stepRule.step.multicloudDeploy(
|
||||
script: nullScript,
|
||||
stage: 'test'
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
void errorNoSourceForNeoDeploymentTest() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = [neo1]
|
||||
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
|
||||
|
||||
thrown.expect(Exception)
|
||||
thrown.expectMessage('ERROR - NO VALUE AVAILABLE FOR source')
|
||||
|
||||
stepRule.step.multicloudDeploy(
|
||||
script: nullScript,
|
||||
stage: 'test'
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
void neoDeploymentTest() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = [neo1]
|
||||
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
|
||||
|
||||
stepRule.step.multicloudDeploy(
|
||||
script: nullScript,
|
||||
stage: 'test',
|
||||
source: 'file.mtar'
|
||||
)
|
||||
|
||||
assert neoDeployRule.hasParameter('script', nullScript)
|
||||
assert neoDeployRule.hasParameter('warAction', 'deploy')
|
||||
assert neoDeployRule.hasParameter('source', 'file.mtar')
|
||||
assert neoDeployRule.hasParameter('neo', neo1)
|
||||
}
|
||||
|
||||
@Test
|
||||
void neoRollingUpdateTest() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = []
|
||||
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
|
||||
|
||||
def neoParam = [
|
||||
host: 'test.param.deploy.host.com',
|
||||
account: 'trialparamNeoUser',
|
||||
credentialsId: 'paramNeoCredentialsId'
|
||||
]
|
||||
|
||||
stepRule.step.multicloudDeploy(
|
||||
script: nullScript,
|
||||
stage: 'test',
|
||||
neoTargets: [neoParam],
|
||||
source: 'file.mtar',
|
||||
enableZeroDowntimeDeployment: true
|
||||
)
|
||||
|
||||
assert neoDeployRule.hasParameter('script', nullScript)
|
||||
assert neoDeployRule.hasParameter('warAction', 'rolling-update')
|
||||
assert neoDeployRule.hasParameter('source', 'file.mtar')
|
||||
assert neoDeployRule.hasParameter('neo', neoParam)
|
||||
}
|
||||
|
||||
@Test
|
||||
void cfDeploymentTest() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = []
|
||||
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = []
|
||||
|
||||
def cloudFoundry = [
|
||||
appName:'paramTestAppName',
|
||||
manifest: 'test.yml',
|
||||
org: 'paramTestOrg',
|
||||
space: 'paramTestSpace',
|
||||
credentialsId: 'paramCfCredentialsId'
|
||||
]
|
||||
|
||||
stepRule.step.multicloudDeploy([
|
||||
script: nullScript,
|
||||
stage: 'acceptance',
|
||||
cfTargets: [cloudFoundry]
|
||||
])
|
||||
|
||||
assert cloudFoundryDeployRule.hasParameter('script', nullScript)
|
||||
assert cloudFoundryDeployRule.hasParameter('deployType', 'standard')
|
||||
assert cloudFoundryDeployRule.hasParameter('cloudFoundry', cloudFoundry)
|
||||
assert cloudFoundryDeployRule.hasParameter('mtaPath', nullScript.commonPipelineEnvironment.mtarFilePath)
|
||||
assert cloudFoundryDeployRule.hasParameter('deployTool', 'cf_native')
|
||||
}
|
||||
|
||||
@Test
|
||||
void cfBlueGreenDeploymentTest() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration.general.neoTargets = []
|
||||
nullScript.commonPipelineEnvironment.configuration.general.cfTargets = [cloudFoundry1]
|
||||
|
||||
stepRule.step.multicloudDeploy([
|
||||
script: nullScript,
|
||||
stage: 'acceptance',
|
||||
enableZeroDowntimeDeployment: true
|
||||
])
|
||||
|
||||
assert cloudFoundryDeployRule.hasParameter('script', nullScript)
|
||||
assert cloudFoundryDeployRule.hasParameter('deployType', 'blue-green')
|
||||
assert cloudFoundryDeployRule.hasParameter('cloudFoundry', cloudFoundry1)
|
||||
assert cloudFoundryDeployRule.hasParameter('mtaPath', nullScript.commonPipelineEnvironment.mtarFilePath)
|
||||
assert cloudFoundryDeployRule.hasParameter('deployTool', 'cf_native')
|
||||
}
|
||||
|
||||
@Test
|
||||
void multicloudDeploymentTest() {
|
||||
|
||||
stepRule.step.multicloudDeploy([
|
||||
script: nullScript,
|
||||
stage: 'acceptance',
|
||||
enableZeroDowntimeDeployment: true,
|
||||
source: 'file.mtar'
|
||||
])
|
||||
|
||||
assert neoDeployRule.hasParameter('script', nullScript)
|
||||
assert neoDeployRule.hasParameter('warAction', 'rolling-update')
|
||||
assert neoDeployRule.hasParameter('source', 'file.mtar')
|
||||
assert neoDeployRule.hasParameter('neo', neo1)
|
||||
|
||||
assert neoDeployRule.hasParameter('script', nullScript)
|
||||
assert neoDeployRule.hasParameter('warAction', 'rolling-update')
|
||||
assert neoDeployRule.hasParameter('source', 'file.mtar')
|
||||
assert neoDeployRule.hasParameter('neo', neo2)
|
||||
|
||||
assert cloudFoundryDeployRule.hasParameter('script', nullScript)
|
||||
assert cloudFoundryDeployRule.hasParameter('deployType', 'blue-green')
|
||||
assert cloudFoundryDeployRule.hasParameter('cloudFoundry', cloudFoundry1)
|
||||
assert cloudFoundryDeployRule.hasParameter('mtaPath', nullScript.commonPipelineEnvironment.mtarFilePath)
|
||||
assert cloudFoundryDeployRule.hasParameter('deployTool', 'cf_native')
|
||||
|
||||
assert cloudFoundryDeployRule.hasParameter('script', nullScript)
|
||||
assert cloudFoundryDeployRule.hasParameter('deployType', 'blue-green')
|
||||
assert cloudFoundryDeployRule.hasParameter('cloudFoundry', cloudFoundry2)
|
||||
assert cloudFoundryDeployRule.hasParameter('mtaPath', nullScript.commonPipelineEnvironment.mtarFilePath)
|
||||
assert cloudFoundryDeployRule.hasParameter('deployTool', 'cf_native')
|
||||
}
|
||||
|
||||
}
|
@ -1,15 +1,20 @@
|
||||
import com.sap.piper.Utils
|
||||
import hudson.AbortException
|
||||
|
||||
import static org.hamcrest.Matchers.allOf
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
import static org.hamcrest.Matchers.not
|
||||
|
||||
import org.hamcrest.Matchers
|
||||
import org.hamcrest.BaseMatcher
|
||||
import org.hamcrest.Description
|
||||
import org.jenkinsci.plugins.credentialsbinding.impl.CredentialNotFoundException
|
||||
import org.junit.Assert
|
||||
import org.junit.Before
|
||||
import org.junit.BeforeClass
|
||||
import org.junit.ClassRule
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.ExpectedException
|
||||
import org.junit.rules.RuleChain
|
||||
import org.junit.rules.TemporaryFolder
|
||||
import util.BasePiperTest
|
||||
import util.CommandLineMatcher
|
||||
import util.JenkinsCredentialsRule
|
||||
@ -21,27 +26,24 @@ import util.JenkinsShellCallRule
|
||||
import util.JenkinsShellCallRule.Type
|
||||
import util.JenkinsStepRule
|
||||
import util.JenkinsWithEnvRule
|
||||
import util.JenkinsFileExistsRule
|
||||
import util.Rules
|
||||
|
||||
class NeoDeployTest extends BasePiperTest {
|
||||
|
||||
def toolJavaValidateCalled = false
|
||||
|
||||
@ClassRule
|
||||
public static TemporaryFolder tmp = new TemporaryFolder()
|
||||
|
||||
private ExpectedException thrown = new ExpectedException().none()
|
||||
private JenkinsLoggingRule loggingRule = new JenkinsLoggingRule(this)
|
||||
private JenkinsShellCallRule shellRule = new JenkinsShellCallRule(this)
|
||||
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
|
||||
private JenkinsLockRule lockRule = new JenkinsLockRule(this)
|
||||
private JenkinsFileExistsRule fileExistsRule = new JenkinsFileExistsRule(this, ['warArchive.war', 'archive.mtar', 'war.properties'])
|
||||
|
||||
|
||||
@Rule
|
||||
public RuleChain ruleChain = Rules
|
||||
.getCommonRules(this)
|
||||
.around(new JenkinsReadYamlRule(this))
|
||||
.around(new JenkinsPropertiesRule(this, propertiesFileName, configProperties))
|
||||
.around(new JenkinsPropertiesRule(this, warPropertiesFileName, warProperties))
|
||||
.around(thrown)
|
||||
.around(loggingRule)
|
||||
.around(shellRule)
|
||||
@ -51,152 +53,31 @@ class NeoDeployTest extends BasePiperTest {
|
||||
.around(stepRule)
|
||||
.around(lockRule)
|
||||
.around(new JenkinsWithEnvRule(this))
|
||||
.around(fileExistsRule)
|
||||
|
||||
|
||||
private static workspacePath
|
||||
private static warArchiveName
|
||||
private static propertiesFileName
|
||||
private static archiveName
|
||||
private static configProperties
|
||||
private static warArchiveName = 'warArchive.war'
|
||||
private static warPropertiesFileName = 'war.properties'
|
||||
private static archiveName = 'archive.mtar'
|
||||
private static warProperties
|
||||
|
||||
|
||||
@BeforeClass
|
||||
static void createTestFiles() {
|
||||
|
||||
workspacePath = "${tmp.getRoot()}"
|
||||
warArchiveName = 'warArchive.war'
|
||||
propertiesFileName = 'config.properties'
|
||||
archiveName = 'archive.mtar'
|
||||
|
||||
configProperties = new Properties()
|
||||
configProperties.put('account', 'trialuser123')
|
||||
configProperties.put('host', 'test.deploy.host.com')
|
||||
configProperties.put('application', 'testApp')
|
||||
|
||||
tmp.newFile(warArchiveName) << 'dummy war archive'
|
||||
tmp.newFile(propertiesFileName) << 'dummy properties file'
|
||||
tmp.newFile(archiveName) << 'dummy archive'
|
||||
}
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
|
||||
warProperties = new Properties()
|
||||
warProperties.put('account', 'trialuser123')
|
||||
warProperties.put('host', 'test.deploy.host.com')
|
||||
warProperties.put('application', 'testApp')
|
||||
|
||||
helper.registerAllowedMethod('dockerExecute', [Map, Closure], null)
|
||||
helper.registerAllowedMethod('fileExists', [String], { s -> return new File(workspacePath, s).exists() })
|
||||
helper.registerAllowedMethod('pwd', [], { return workspacePath })
|
||||
mockShellCommands()
|
||||
helper.registerAllowedMethod('pwd', [], { return './' })
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [steps: [neoDeploy: [neo: [host: 'test.deploy.host.com', account: 'trialuser123']]]]
|
||||
}
|
||||
|
||||
@Test
|
||||
void straightForwardTestCompatibilityConfiguration(){
|
||||
shellRule.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.* status .*', 'Status: STARTED')
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [
|
||||
steps: [
|
||||
neoDeploy: [
|
||||
host: 'test.deploy.host.com',
|
||||
account: 'trialuser123',
|
||||
neoCredentialsId: 'myCredentialsId'
|
||||
]]]
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
archivePath: warArchiveName,
|
||||
deployMode: 'warParams',
|
||||
applicationName: 'testApp',
|
||||
runtime: 'neo-javaee6-wp',
|
||||
runtimeVersion: '2.125',
|
||||
warAction: 'rolling-update',
|
||||
vmSize: 'lite')
|
||||
|
||||
Assert.assertThat(shellRule.shell,
|
||||
new CommandLineMatcher().hasProlog("\"/opt/neo/tools/neo.sh\" rolling-update")
|
||||
.hasSingleQuotedOption('host', 'test\\.deploy\\.host\\.com')
|
||||
.hasSingleQuotedOption('account', 'trialuser123')
|
||||
.hasSingleQuotedOption('application', 'testApp')
|
||||
.hasSingleQuotedOption('runtime', 'neo-javaee6-wp')
|
||||
.hasSingleQuotedOption('runtime-version', '2\\.125')
|
||||
.hasSingleQuotedOption('size', 'lite')
|
||||
.hasSingleQuotedOption('user', 'anonymous')
|
||||
.hasSingleQuotedOption('password', '\\*\\*\\*\\*\\*\\*\\*\\*')
|
||||
.hasSingleQuotedOption('source', '.*\\.war'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void straightForwardTestConfigViaConfigProperties() {
|
||||
|
||||
boolean buildStatusHasBeenSet = false
|
||||
boolean notifyOldConfigFrameworkUsed = false
|
||||
|
||||
nullScript.commonPipelineEnvironment.setConfigProperty('DEPLOY_HOST', 'test.deploy.host.com')
|
||||
nullScript.commonPipelineEnvironment.setConfigProperty('CI_DEPLOY_ACCOUNT', 'trialuser123')
|
||||
nullScript.commonPipelineEnvironment.configuration = [:]
|
||||
|
||||
nullScript.currentBuild = [setResult: { buildStatusHasBeenSet = true }]
|
||||
|
||||
def utils = new Utils() {
|
||||
void pushToSWA(Map parameters, Map config) {
|
||||
notifyOldConfigFrameworkUsed = parameters.stepParam4
|
||||
}
|
||||
}
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
source: archiveName,
|
||||
neo: [credentialsId: 'myCredentialsId'],
|
||||
utils: utils
|
||||
)
|
||||
|
||||
Assert.assertThat(shellRule.shell,
|
||||
new CommandLineMatcher().hasProlog("\"/opt/neo/tools/neo.sh\" deploy-mta")
|
||||
.hasSingleQuotedOption('host', 'test\\.deploy\\.host\\.com')
|
||||
.hasSingleQuotedOption('account', 'trialuser123')
|
||||
.hasOption('synchronous', '')
|
||||
.hasSingleQuotedOption('user', 'anonymous')
|
||||
.hasSingleQuotedOption('password', '\\*\\*\\*\\*\\*\\*\\*\\*')
|
||||
.hasSingleQuotedOption('source', '.*'))
|
||||
|
||||
assert !buildStatusHasBeenSet
|
||||
assert notifyOldConfigFrameworkUsed
|
||||
}
|
||||
|
||||
@Test
|
||||
void testConfigViaConfigPropertiesSetsBuildToUnstable() {
|
||||
|
||||
def buildStatus = 'SUCCESS'
|
||||
|
||||
nullScript.commonPipelineEnvironment.setConfigProperty('DEPLOY_HOST', 'test.deploy.host.com')
|
||||
nullScript.commonPipelineEnvironment.setConfigProperty('CI_DEPLOY_ACCOUNT', 'trialuser123')
|
||||
nullScript.commonPipelineEnvironment.configuration = [:]
|
||||
|
||||
nullScript.currentBuild = [setResult: { r -> buildStatus = r }]
|
||||
|
||||
System.setProperty('com.sap.piper.featureFlag.buildUnstableWhenOldConfigFrameworkIsUsedByNeoDeploy',
|
||||
Boolean.TRUE.toString())
|
||||
|
||||
try {
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
source: archiveName,
|
||||
neo:[credentialsId: 'myCredentialsId'],
|
||||
utils: utils
|
||||
)
|
||||
} finally {
|
||||
System.clearProperty('com.sap.piper.featureFlag.buildUnstableWhenOldConfigFrameworkIsUsedByNeoDeploy')
|
||||
}
|
||||
|
||||
assert buildStatus == 'UNSTABLE'
|
||||
}
|
||||
|
||||
@Test
|
||||
void straightForwardTestConfigViaConfiguration() {
|
||||
|
||||
boolean notifyOldConfigFrameworkUsed = true
|
||||
|
||||
def utils = new Utils() {
|
||||
void pushToSWA(Map parameters, Map config) {
|
||||
notifyOldConfigFrameworkUsed = parameters.stepParam4
|
||||
}
|
||||
}
|
||||
void straightForwardTestConfigViaParameters() {
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
source: archiveName,
|
||||
@ -205,59 +86,64 @@ class NeoDeployTest extends BasePiperTest {
|
||||
)
|
||||
|
||||
Assert.assertThat(shellRule.shell,
|
||||
new CommandLineMatcher().hasProlog("\"/opt/neo/tools/neo.sh\" deploy-mta")
|
||||
new CommandLineMatcher().hasProlog("neo.sh deploy-mta")
|
||||
.hasSingleQuotedOption('host', 'test\\.deploy\\.host\\.com')
|
||||
.hasSingleQuotedOption('account', 'trialuser123')
|
||||
.hasOption('synchronous', '')
|
||||
.hasSingleQuotedOption('user', 'anonymous')
|
||||
.hasSingleQuotedOption('password', '\\*\\*\\*\\*\\*\\*\\*\\*')
|
||||
.hasSingleQuotedOption('source', '.*'))
|
||||
|
||||
assert !notifyOldConfigFrameworkUsed
|
||||
}
|
||||
|
||||
@Test
|
||||
void straightForwardTestConfigViaConfigurationAndViaConfigProperties() {
|
||||
void straightForwardTestConfigViaConfiguration() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.setConfigProperty('DEPLOY_HOST', 'configProperties.deploy.host.com')
|
||||
nullScript.commonPipelineEnvironment.setConfigProperty('CI_DEPLOY_ACCOUNT', 'configPropsUser123')
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [steps: [neoDeploy: [neo: [host : 'configuration-frwk.deploy.host.com',
|
||||
account: 'configurationFrwkUser123']]]]
|
||||
nullScript.commonPipelineEnvironment.configuration = [steps: [
|
||||
neoDeploy: [
|
||||
neo: [
|
||||
host: 'configuration-frwk.deploy.host.com',
|
||||
account: 'configurationFrwkUser123'
|
||||
],
|
||||
source: archiveName
|
||||
]
|
||||
]]
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
source: archiveName,
|
||||
neo:[credentialsId: 'myCredentialsId']
|
||||
)
|
||||
|
||||
Assert.assertThat(shellRule.shell,
|
||||
new CommandLineMatcher().hasProlog("\"/opt/neo/tools/neo.sh\" deploy-mta")
|
||||
new CommandLineMatcher().hasProlog("neo.sh deploy-mta")
|
||||
.hasSingleQuotedOption('host', 'configuration-frwk\\.deploy\\.host\\.com')
|
||||
.hasSingleQuotedOption('account', 'configurationFrwkUser123')
|
||||
.hasOption('synchronous', '')
|
||||
.hasSingleQuotedOption('user', 'anonymous')
|
||||
.hasSingleQuotedOption('password', '\\*\\*\\*\\*\\*\\*\\*\\*')
|
||||
.hasSingleQuotedOption('source', '.*'))
|
||||
.hasSingleQuotedOption('source', archiveName))
|
||||
}
|
||||
|
||||
@Test
|
||||
void archivePathFromCPETest() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.setMtarFilePath('archive.mtar')
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript)
|
||||
|
||||
Assert.assertThat(shellRule.shell,
|
||||
new CommandLineMatcher().hasProlog("\"/opt/neo/tools/neo.sh\" deploy-mta")
|
||||
new CommandLineMatcher().hasProlog("neo.sh deploy-mta")
|
||||
.hasSingleQuotedOption('source', 'archive.mtar'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void archivePathFromParamsHasHigherPrecedenceThanCPETest() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.setMtarFilePath('archive2.mtar')
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
source: "archive.mtar")
|
||||
|
||||
Assert.assertThat(shellRule.shell,
|
||||
new CommandLineMatcher().hasProlog("\"/opt/neo/tools/neo.sh\" deploy-mta")
|
||||
new CommandLineMatcher().hasProlog("neo.sh deploy-mta")
|
||||
.hasSingleQuotedOption('source', 'archive.mtar'))
|
||||
}
|
||||
|
||||
@ -282,7 +168,7 @@ class NeoDeployTest extends BasePiperTest {
|
||||
)
|
||||
|
||||
Assert.assertThat(shellRule.shell,
|
||||
new CommandLineMatcher().hasProlog("\"/opt/neo/tools/neo.sh\" deploy-mta")
|
||||
new CommandLineMatcher().hasProlog("neo.sh deploy-mta")
|
||||
.hasSingleQuotedOption('host', 'test\\.deploy\\.host\\.com')
|
||||
.hasSingleQuotedOption('account', 'trialuser123')
|
||||
.hasOption('synchronous', '')
|
||||
@ -292,79 +178,6 @@ class NeoDeployTest extends BasePiperTest {
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void neoHomeNotSetTest() {
|
||||
|
||||
mockHomeVariablesNotSet()
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
source: archiveName
|
||||
)
|
||||
|
||||
assert shellRule.shell.find { c -> c.contains('"neo.sh" deploy-mta') }
|
||||
assert loggingRule.log.contains('SAP Cloud Platform Console Client is on PATH.')
|
||||
assert loggingRule.log.contains("Using SAP Cloud Platform Console Client 'neo.sh'.")
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void neoHomeAsParameterTest() {
|
||||
|
||||
mockHomeVariablesNotSet()
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
source: archiveName,
|
||||
neo:[credentialsId: 'myCredentialsId'],
|
||||
neoHome: '/param/neo'
|
||||
)
|
||||
|
||||
assert shellRule.shell.find { c -> c = "\"/param/neo/tools/neo.sh\" deploy-mta" }
|
||||
assert loggingRule.log.contains("SAP Cloud Platform Console Client home '/param/neo' retrieved from configuration.")
|
||||
assert loggingRule.log.contains("Using SAP Cloud Platform Console Client '/param/neo/tools/neo.sh'.")
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void neoHomeFromEnvironmentTest() {
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
source: archiveName
|
||||
)
|
||||
|
||||
assert shellRule.shell.find { c -> c.contains("\"/opt/neo/tools/neo.sh\" deploy-mta") }
|
||||
assert loggingRule.log.contains("SAP Cloud Platform Console Client home '/opt/neo' retrieved from environment.")
|
||||
assert loggingRule.log.contains("Using SAP Cloud Platform Console Client '/opt/neo/tools/neo.sh'.")
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void neoHomeFromCustomStepConfigurationTest() {
|
||||
|
||||
mockHomeVariablesNotSet()
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [steps: [neoDeploy: [neo: [host: 'test.deploy.host.com', account: 'trialuser123'], neoHome: '/config/neo']]]
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
source: archiveName
|
||||
)
|
||||
|
||||
assert shellRule.shell.find { c -> c = "\"/config/neo/tools/neo.sh\" deploy-mta" }
|
||||
assert loggingRule.log.contains("SAP Cloud Platform Console Client home '/config/neo' retrieved from configuration.")
|
||||
assert loggingRule.log.contains("Using SAP Cloud Platform Console Client '/config/neo/tools/neo.sh'.")
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void archiveNotProvidedTest() {
|
||||
|
||||
thrown.expect(Exception)
|
||||
thrown.expectMessage('ERROR - NO VALUE AVAILABLE FOR source')
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript)
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void wrongArchivePathProvidedTest() {
|
||||
|
||||
@ -376,14 +189,55 @@ class NeoDeployTest extends BasePiperTest {
|
||||
|
||||
|
||||
@Test
|
||||
void scriptNotProvidedTest() {
|
||||
void sanityChecksDeployModeMTATest() {
|
||||
|
||||
thrown.expect(Exception)
|
||||
thrown.expectMessage('ERROR - NO VALUE AVAILABLE FOR host')
|
||||
thrown.expectMessage(
|
||||
allOf(
|
||||
containsString('ERROR - NO VALUE AVAILABLE FOR:'),
|
||||
containsString('neo/host'),
|
||||
containsString('neo/account'),
|
||||
containsString('source')))
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [:]
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript, source: archiveName)
|
||||
// deployMode mta is the default, but for the sake of transparency it is better to repeat it.
|
||||
stepRule.step.neoDeploy(script: nullScript, deployMode: 'mta')
|
||||
}
|
||||
|
||||
@Test
|
||||
public void sanityChecksDeployModeWarPropertiesFileTest() {
|
||||
|
||||
thrown.expect(IllegalArgumentException)
|
||||
// using this deploy mode 'account' and 'host' are provided by the properties file
|
||||
thrown.expectMessage(
|
||||
allOf(
|
||||
containsString('ERROR - NO VALUE AVAILABLE FOR source'),
|
||||
not(containsString('neo/host')),
|
||||
not(containsString('neo/account'))))
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [:]
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript, deployMode: 'warPropertiesFile')
|
||||
}
|
||||
|
||||
@Test
|
||||
public void sanityChecksDeployModeWarParamsTest() {
|
||||
|
||||
thrown.expect(IllegalArgumentException)
|
||||
thrown.expectMessage(
|
||||
allOf(
|
||||
containsString('ERROR - NO VALUE AVAILABLE FOR:'),
|
||||
containsString('source'),
|
||||
containsString('neo/application'),
|
||||
containsString('neo/runtime'),
|
||||
containsString('neo/runtimeVersion'),
|
||||
containsString('neo/host'),
|
||||
containsString('neo/account')))
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [:]
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript, deployMode: 'warParams')
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -392,7 +246,7 @@ class NeoDeployTest extends BasePiperTest {
|
||||
stepRule.step.neoDeploy(script: nullScript, source: archiveName, deployMode: 'mta')
|
||||
|
||||
Assert.assertThat(shellRule.shell,
|
||||
new CommandLineMatcher().hasProlog("\"/opt/neo/tools/neo.sh\" deploy-mta")
|
||||
new CommandLineMatcher().hasProlog("neo.sh deploy-mta")
|
||||
.hasSingleQuotedOption('host', 'test\\.deploy\\.host\\.com')
|
||||
.hasSingleQuotedOption('account', 'trialuser123')
|
||||
.hasOption('synchronous', '')
|
||||
@ -417,7 +271,7 @@ class NeoDeployTest extends BasePiperTest {
|
||||
source: warArchiveName)
|
||||
|
||||
Assert.assertThat(shellRule.shell,
|
||||
new CommandLineMatcher().hasProlog("\"/opt/neo/tools/neo.sh\" deploy")
|
||||
new CommandLineMatcher().hasProlog("neo.sh deploy")
|
||||
.hasSingleQuotedOption('host', 'test\\.deploy\\.host\\.com')
|
||||
.hasSingleQuotedOption('account', 'trialuser123')
|
||||
.hasSingleQuotedOption('application', 'testApp')
|
||||
@ -448,7 +302,7 @@ class NeoDeployTest extends BasePiperTest {
|
||||
)
|
||||
|
||||
Assert.assertThat(shellRule.shell,
|
||||
new CommandLineMatcher().hasProlog("\"/opt/neo/tools/neo.sh\" rolling-update")
|
||||
new CommandLineMatcher().hasProlog("neo.sh rolling-update")
|
||||
.hasSingleQuotedOption('host', 'test\\.deploy\\.host\\.com')
|
||||
.hasSingleQuotedOption('account', 'trialuser123')
|
||||
.hasSingleQuotedOption('application', 'testApp')
|
||||
@ -478,7 +332,7 @@ class NeoDeployTest extends BasePiperTest {
|
||||
|
||||
Assert.assertThat(shellRule.shell,
|
||||
new CommandLineMatcher()
|
||||
.hasProlog("\"/opt/neo/tools/neo.sh\" deploy")
|
||||
.hasProlog("neo.sh deploy")
|
||||
.hasSingleQuotedOption('application', 'testApp'))
|
||||
}
|
||||
|
||||
@ -532,7 +386,7 @@ class NeoDeployTest extends BasePiperTest {
|
||||
deployMode: 'warPropertiesFile',
|
||||
warAction: 'deploy',
|
||||
neo: [
|
||||
propertiesFile: propertiesFileName,
|
||||
propertiesFile: warPropertiesFileName,
|
||||
application: 'testApp',
|
||||
runtime: 'neo-javaee6-wp',
|
||||
runtimeVersion: '2.125',
|
||||
@ -541,8 +395,8 @@ class NeoDeployTest extends BasePiperTest {
|
||||
)
|
||||
|
||||
Assert.assertThat(shellRule.shell,
|
||||
new CommandLineMatcher().hasProlog("\"/opt/neo/tools/neo.sh\" deploy")
|
||||
.hasArgument("config.properties")
|
||||
new CommandLineMatcher().hasProlog("neo.sh deploy")
|
||||
.hasArgument('war.properties')
|
||||
.hasSingleQuotedOption('user', 'defaultUser')
|
||||
.hasSingleQuotedOption('password', '\\*\\*\\*\\*\\*\\*\\*\\*')
|
||||
.hasSingleQuotedOption('source', '.*\\.war'))
|
||||
@ -558,7 +412,7 @@ class NeoDeployTest extends BasePiperTest {
|
||||
deployMode: 'warPropertiesFile',
|
||||
warAction: 'rolling-update',
|
||||
neo: [
|
||||
propertiesFile: propertiesFileName,
|
||||
propertiesFile: warPropertiesFileName,
|
||||
application: 'testApp',
|
||||
runtime: 'neo-javaee6-wp',
|
||||
runtimeVersion: '2.125',
|
||||
@ -566,59 +420,13 @@ class NeoDeployTest extends BasePiperTest {
|
||||
])
|
||||
|
||||
Assert.assertThat(shellRule.shell,
|
||||
new CommandLineMatcher().hasProlog("\"/opt/neo/tools/neo.sh\" rolling-update")
|
||||
.hasArgument('config.properties')
|
||||
new CommandLineMatcher().hasProlog("neo.sh rolling-update")
|
||||
.hasArgument('war.properties')
|
||||
.hasSingleQuotedOption('user', 'defaultUser')
|
||||
.hasSingleQuotedOption('password', '\\*\\*\\*\\*\\*\\*\\*\\*')
|
||||
.hasSingleQuotedOption('source', '.*\\.war'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void applicationNameNotProvidedTest() {
|
||||
|
||||
thrown.expect(Exception)
|
||||
thrown.expectMessage('ERROR - NO VALUE AVAILABLE FOR application')
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
source: warArchiveName,
|
||||
deployMode: 'warParams',
|
||||
neo: [
|
||||
runtime: 'neo-javaee6-wp',
|
||||
runtimeVersion: '2.125'
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
void runtimeNotProvidedTest() {
|
||||
|
||||
thrown.expect(Exception)
|
||||
thrown.expectMessage('ERROR - NO VALUE AVAILABLE FOR runtime')
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
source: warArchiveName,
|
||||
neo: [
|
||||
application: 'testApp',
|
||||
runtimeVersion: '2.125'
|
||||
],
|
||||
deployMode: 'warParams')
|
||||
}
|
||||
|
||||
@Test
|
||||
void runtimeVersionNotProvidedTest() {
|
||||
|
||||
thrown.expect(Exception)
|
||||
thrown.expectMessage('ERROR - NO VALUE AVAILABLE FOR runtimeVersion')
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
source: warArchiveName,
|
||||
neo: [
|
||||
application: 'testApp',
|
||||
runtime: 'neo-javaee6-wp'
|
||||
],
|
||||
deployMode: 'warParams')
|
||||
}
|
||||
|
||||
@Test
|
||||
void illegalDeployModeTest() {
|
||||
|
||||
@ -656,55 +464,55 @@ class NeoDeployTest extends BasePiperTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void deployHostProvidedAsDeprecatedParameterTest() {
|
||||
void dontSwallowExceptionWhenUnableToProvideLogsTest() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.setConfigProperty('CI_DEPLOY_ACCOUNT', 'configPropsUser123')
|
||||
thrown.expect(AbortException)
|
||||
thrown.expectMessage('Something went wrong during neo deployment')
|
||||
thrown.expect(new BaseMatcher() {
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
source: archiveName,
|
||||
deployHost: "my.deploy.host.com"
|
||||
def expectedException = AbortException
|
||||
def expectedText = 'Cannot provide logs.'
|
||||
|
||||
boolean matches(def ex) {
|
||||
def suppressed = ex.getSuppressed()
|
||||
return (suppressed.size() == 1 &&
|
||||
suppressed[0] in expectedException &&
|
||||
suppressed[0].message == expectedText)
|
||||
|
||||
}
|
||||
|
||||
void describeTo(Description d) {
|
||||
d.appendText(" a suppressed ${expectedException} with message ${expectedText}.")
|
||||
}
|
||||
})
|
||||
|
||||
loggingRule.expect('Unable to provide the logs.')
|
||||
|
||||
helper.registerAllowedMethod('fileExists', [String],
|
||||
{ f ->
|
||||
f == 'archive.mtar'
|
||||
}
|
||||
)
|
||||
helper.registerAllowedMethod('sh', [Map],
|
||||
{ m ->
|
||||
if(m.script.toString().contains('neo.sh deploy-mta'))
|
||||
throw new AbortException('Something went wrong during neo deployment.')
|
||||
}
|
||||
)
|
||||
|
||||
assert loggingRule.log.contains("[WARNING][neoDeploy] Deprecated parameter 'deployHost' is used. This will not work anymore in future versions. Use parameter 'host' instead.")
|
||||
}
|
||||
|
||||
@Test
|
||||
void deployAccountProvidedAsDeprecatedParameterTest() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.setConfigProperty('CI_DEPLOY_ACCOUNT', 'configPropsUser123')
|
||||
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
source: archiveName,
|
||||
neo: [
|
||||
host: "my.deploy.host.com",
|
||||
],
|
||||
deployAccount: "myAccount"
|
||||
helper.registerAllowedMethod("sh", [String],
|
||||
{ cmd ->
|
||||
if (cmd == 'cat logs/neo/*')
|
||||
throw new AbortException('Cannot provide logs.')
|
||||
}
|
||||
)
|
||||
|
||||
assert loggingRule.log.contains("Deprecated parameter 'deployAccount' is used. This will not work anymore in future versions. Use parameter 'account' instead.")
|
||||
}
|
||||
stepRule.step.neoDeploy(script: nullScript,
|
||||
|
||||
private mockShellCommands() {
|
||||
String javaVersion = '''openjdk version \"1.8.0_121\"
|
||||
OpenJDK Runtime Environment (build 1.8.0_121-8u121-b13-1~bpo8+1-b13)
|
||||
OpenJDK 64-Bit Server VM (build 25.121-b13, mixed mode)'''
|
||||
shellRule.setReturnValue(Type.REGEX, '.*java -version.*', javaVersion)
|
||||
|
||||
String neoVersion = '''SAP Cloud Platform Console Client
|
||||
SDK version : 3.39.10
|
||||
Runtime : neo-java-web'''
|
||||
shellRule.setReturnValue(Type.REGEX, '.*neo.sh version.*', neoVersion)
|
||||
|
||||
shellRule.setReturnValue(Type.REGEX, '.*JAVA_HOME.*', '/opt/java')
|
||||
shellRule.setReturnValue(Type.REGEX, '.*NEO_HOME.*', '/opt/neo')
|
||||
shellRule.setReturnValue(Type.REGEX, '.*which java.*', 0)
|
||||
shellRule.setReturnValue(Type.REGEX, '.*which neo.*', 0)
|
||||
}
|
||||
|
||||
private mockHomeVariablesNotSet() {
|
||||
shellRule.setReturnValue(Type.REGEX, '.*JAVA_HOME.*', '')
|
||||
shellRule.setReturnValue(Type.REGEX, '.*NEO_HOME.*', '')
|
||||
shellRule.setReturnValue(Type.REGEX, '.*which java.*', 0)
|
||||
shellRule.setReturnValue(Type.REGEX, '.*which neo.*', 0)
|
||||
source: archiveName,
|
||||
neo:[credentialsId: 'myCredentialsId'],
|
||||
deployMode: 'mta',
|
||||
utils: utils,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -11,6 +11,7 @@ import util.JenkinsReadYamlRule
|
||||
import util.JenkinsStepRule
|
||||
|
||||
class PipelineExecuteTest extends BasePiperTest {
|
||||
|
||||
private ExpectedException thrown = new ExpectedException().none()
|
||||
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
|
||||
|
||||
@ -27,9 +28,6 @@ class PipelineExecuteTest extends BasePiperTest {
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
pipelinePath = null
|
||||
checkoutParameters.clear()
|
||||
load = null
|
||||
|
||||
helper.registerAllowedMethod('deleteDir', [], null)
|
||||
helper.registerAllowedMethod('checkout', [Map], { m ->
|
||||
@ -44,17 +42,19 @@ class PipelineExecuteTest extends BasePiperTest {
|
||||
|
||||
@Test
|
||||
void straightForwardTest() {
|
||||
|
||||
stepRule.step.pipelineExecute(repoUrl: "https://test.com/myRepo.git")
|
||||
|
||||
assert load == "Jenkinsfile"
|
||||
assert checkoutParameters.branch == 'master'
|
||||
assert checkoutParameters.repoUrl == "https://test.com/myRepo.git"
|
||||
assert checkoutParameters.credentialsId == ''
|
||||
assert checkoutParameters.path == 'Jenkinsfile'
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void parameterizeTest() {
|
||||
|
||||
stepRule.step.pipelineExecute(repoUrl: "https://test.com/anotherRepo.git",
|
||||
branch: 'feature',
|
||||
path: 'path/to/Jenkinsfile',
|
||||
@ -65,11 +65,11 @@ class PipelineExecuteTest extends BasePiperTest {
|
||||
assert checkoutParameters.repoUrl == "https://test.com/anotherRepo.git"
|
||||
assert checkoutParameters.credentialsId == 'abcd1234'
|
||||
assert checkoutParameters.path == 'path/to/Jenkinsfile'
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void noRepoUrlTest() {
|
||||
|
||||
thrown.expect(Exception)
|
||||
thrown.expectMessage("ERROR - NO VALUE AVAILABLE FOR repoUrl")
|
||||
|
||||
|
@ -22,22 +22,6 @@ class PipelineStashFilesAfterBuildTest extends BasePiperTest {
|
||||
|
||||
@Test
|
||||
void testStashAfterBuild() {
|
||||
helper.registerAllowedMethod("fileExists", [String.class], {
|
||||
searchTerm ->
|
||||
return false
|
||||
})
|
||||
stepRule.step.pipelineStashFilesAfterBuild(
|
||||
script: nullScript,
|
||||
juStabUtils: utils
|
||||
)
|
||||
// asserts
|
||||
assertFalse(loggingRule.log.contains('Stash content: checkmarx'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: classFiles'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: sonar'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStashAfterBuildWithCheckmarx() {
|
||||
helper.registerAllowedMethod("fileExists", [String.class], {
|
||||
searchTerm ->
|
||||
return true
|
||||
@ -52,21 +36,4 @@ class PipelineStashFilesAfterBuildTest extends BasePiperTest {
|
||||
assertThat(loggingRule.log, containsString('Stash content: classFiles'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: sonar'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStashAfterBuildWithCheckmarxConfig() {
|
||||
helper.registerAllowedMethod("fileExists", [String.class], {
|
||||
searchTerm ->
|
||||
return true
|
||||
})
|
||||
stepRule.step.pipelineStashFilesAfterBuild(
|
||||
script: [commonPipelineEnvironment: [configuration: [steps: [executeCheckmarxScan: [checkmarxProject: 'TestProject']]]]],
|
||||
juStabUtils: utils,
|
||||
)
|
||||
// asserts
|
||||
assertThat(loggingRule.log, containsString('Stash content: checkmarx'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: classFiles'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: sonar'))
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -22,27 +22,7 @@ class PipelineStashFilesBeforeBuildTest extends BasePiperTest {
|
||||
.around(stepRule)
|
||||
|
||||
@Test
|
||||
void testStashBeforeBuildNoOpa() {
|
||||
|
||||
stepRule.step.pipelineStashFilesBeforeBuild(script: nullScript, juStabUtils: utils)
|
||||
|
||||
// asserts
|
||||
assertEquals('mkdir -p gitmetadata', shellRule.shell[0])
|
||||
assertEquals('cp -rf .git/* gitmetadata', shellRule.shell[1])
|
||||
assertEquals('chmod -R u+w gitmetadata', shellRule.shell[2])
|
||||
|
||||
assertThat(loggingRule.log, containsString('Stash content: buildDescriptor'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: deployDescriptor'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: git'))
|
||||
assertFalse(loggingRule.log.contains('Stash content: opa5'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: opensourceConfiguration'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: pipelineConfigAndTests'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: securityDescriptor'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: tests'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStashBeforeBuildOpa() {
|
||||
void testStashBeforeBuild() {
|
||||
|
||||
stepRule.step.pipelineStashFilesBeforeBuild(script: nullScript, juStabUtils: utils, runOpaTests: true)
|
||||
|
||||
@ -56,4 +36,21 @@ class PipelineStashFilesBeforeBuildTest extends BasePiperTest {
|
||||
assertThat(loggingRule.log, containsString('Stash content: securityDescriptor'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: tests'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStashBeforeBuildCustomConfig() {
|
||||
|
||||
stepRule.step.pipelineStashFilesBeforeBuild(script: nullScript, juStabUtils: utils, runOpaTests: true, stashIncludes: ['myStash': '**.myTest'])
|
||||
|
||||
// asserts
|
||||
assertThat(loggingRule.log, containsString('Stash content: buildDescriptor'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: deployDescriptor'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: git'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: opa5'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: opensourceConfiguration'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: pipelineConfigAndTests'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: securityDescriptor'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: tests'))
|
||||
assertThat(loggingRule.log, containsString('Stash content: myStash'))
|
||||
}
|
||||
}
|
||||
|
@ -16,8 +16,8 @@ import static org.junit.Assert.assertNotNull
|
||||
|
||||
|
||||
class SetupCommonPipelineEnvironmentTest extends BasePiperTest {
|
||||
|
||||
def usedConfigFile
|
||||
def swaOldConfigUsed
|
||||
|
||||
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
|
||||
|
||||
@ -28,6 +28,7 @@ class SetupCommonPipelineEnvironmentTest extends BasePiperTest {
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
|
||||
def examplePipelineConfig = new File('test/resources/test_pipeline_config.yml').text
|
||||
|
||||
helper.registerAllowedMethod("readYaml", [Map], { Map parameters ->
|
||||
@ -38,14 +39,6 @@ class SetupCommonPipelineEnvironmentTest extends BasePiperTest {
|
||||
usedConfigFile = parameters.file
|
||||
return yamlParser.load(examplePipelineConfig)
|
||||
})
|
||||
helper.registerAllowedMethod("readProperties", [Map], { Map parameters ->
|
||||
usedConfigFile = parameters.file
|
||||
Properties props = new Properties()
|
||||
props.setProperty('key', 'value')
|
||||
return props
|
||||
})
|
||||
|
||||
swaOldConfigUsed = null
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -55,35 +48,12 @@ class SetupCommonPipelineEnvironmentTest extends BasePiperTest {
|
||||
return path.endsWith('.pipeline/config.yml')
|
||||
})
|
||||
|
||||
stepRule.step.setupCommonPipelineEnvironment(script: nullScript, utils: getSWAMockedUtils())
|
||||
stepRule.step.setupCommonPipelineEnvironment(script: nullScript)
|
||||
|
||||
assertEquals(Boolean.FALSE.toString(), swaOldConfigUsed)
|
||||
assertEquals('.pipeline/config.yml', usedConfigFile)
|
||||
assertNotNull(nullScript.commonPipelineEnvironment.configuration)
|
||||
assertEquals('develop', nullScript.commonPipelineEnvironment.configuration.general.productiveBranch)
|
||||
assertEquals('my-maven-docker', nullScript.commonPipelineEnvironment.configuration.steps.mavenExecute.dockerImage)
|
||||
}
|
||||
|
||||
@Test
|
||||
void testIsPropertiesConfigurationAvailable() {
|
||||
|
||||
helper.registerAllowedMethod("fileExists", [String], { String path ->
|
||||
return path.endsWith('.pipeline/config.properties')
|
||||
})
|
||||
|
||||
stepRule.step.setupCommonPipelineEnvironment(script: nullScript, utils: getSWAMockedUtils())
|
||||
|
||||
assertEquals(Boolean.TRUE.toString(), swaOldConfigUsed)
|
||||
assertEquals('.pipeline/config.properties', usedConfigFile)
|
||||
assertNotNull(nullScript.commonPipelineEnvironment.configProperties)
|
||||
assertEquals('value', nullScript.commonPipelineEnvironment.configProperties['key'])
|
||||
}
|
||||
|
||||
private getSWAMockedUtils() {
|
||||
new Utils() {
|
||||
void pushToSWA(Map payload, Map config) {
|
||||
SetupCommonPipelineEnvironmentTest.this.swaOldConfigUsed = payload.stepParam5
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,270 +0,0 @@
|
||||
import org.apache.commons.exec.*
|
||||
import hudson.AbortException
|
||||
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.ExpectedException
|
||||
import org.junit.rules.RuleChain
|
||||
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsLoggingRule
|
||||
import util.JenkinsReadYamlRule
|
||||
import util.JenkinsStepRule
|
||||
import util.Rules
|
||||
|
||||
class ToolValidateTest extends BasePiperTest {
|
||||
|
||||
private ExpectedException thrown = new ExpectedException().none()
|
||||
private JenkinsLoggingRule loggingRule = new JenkinsLoggingRule(this)
|
||||
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
|
||||
|
||||
@Rule
|
||||
public RuleChain ruleChain = Rules
|
||||
.getCommonRules(this)
|
||||
.around(new JenkinsReadYamlRule(this))
|
||||
.around(thrown)
|
||||
.around(loggingRule)
|
||||
.around(stepRule)
|
||||
|
||||
def home = 'home'
|
||||
|
||||
@Test
|
||||
void nullHomeTest() {
|
||||
|
||||
thrown.expect(IllegalArgumentException)
|
||||
thrown.expectMessage("The parameter 'home' can not be null or empty.")
|
||||
|
||||
stepRule.step.toolValidate(tool: 'java')
|
||||
}
|
||||
|
||||
@Test
|
||||
void emptyHomeTest() {
|
||||
|
||||
thrown.expect(IllegalArgumentException)
|
||||
thrown.expectMessage("The parameter 'home' can not be null or empty.")
|
||||
|
||||
stepRule.step.toolValidate(tool: 'java', home: '')
|
||||
}
|
||||
|
||||
@Test
|
||||
void nullToolTest() {
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> return 0 })
|
||||
|
||||
thrown.expect(IllegalArgumentException)
|
||||
thrown.expectMessage("The parameter 'tool' can not be null or empty.")
|
||||
|
||||
stepRule.step.toolValidate(tool: null, home: home)
|
||||
}
|
||||
|
||||
@Test
|
||||
void emptyToolTest() {
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> return 0 })
|
||||
|
||||
thrown.expect(IllegalArgumentException)
|
||||
thrown.expectMessage("The parameter 'tool' can not be null or empty.")
|
||||
|
||||
stepRule.step.toolValidate(tool: '', home: home)
|
||||
}
|
||||
|
||||
@Test
|
||||
void invalidToolTest() {
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> return 0 })
|
||||
|
||||
thrown.expect(AbortException)
|
||||
thrown.expectMessage("The tool 'test' is not supported.")
|
||||
|
||||
stepRule.step.toolValidate(tool: 'test', home: home)
|
||||
}
|
||||
|
||||
@Test
|
||||
void unableToValidateJavaTest() {
|
||||
|
||||
thrown.expect(AbortException)
|
||||
thrown.expectMessage('The verification of Java failed.')
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> getNoVersion(m) })
|
||||
|
||||
stepRule.step.toolValidate(tool: 'java', home: home)
|
||||
}
|
||||
|
||||
@Test
|
||||
void unableToValidateMtaTest() {
|
||||
|
||||
thrown.expect(AbortException)
|
||||
thrown.expectMessage('The verification of SAP Multitarget Application Archive Builder failed.')
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> getNoVersion(m) })
|
||||
|
||||
stepRule.step.toolValidate(tool: 'mta', home: home)
|
||||
}
|
||||
|
||||
@Test
|
||||
void unableToValidateNeoTest() {
|
||||
|
||||
thrown.expect(AbortException)
|
||||
thrown.expectMessage('The verification of SAP Cloud Platform Console Client failed.')
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> getNoVersion(m) })
|
||||
|
||||
stepRule.step.toolValidate(tool: 'neo', home: home)
|
||||
}
|
||||
|
||||
@Test
|
||||
void unableToValidateCmTest() {
|
||||
|
||||
thrown.expect(AbortException)
|
||||
thrown.expectMessage('The verification of Change Management Command Line Interface failed.')
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> getNoVersion(m) })
|
||||
|
||||
stepRule.step.toolValidate(tool: 'cm', home: home)
|
||||
}
|
||||
|
||||
@Test
|
||||
void validateIncompatibleVersionJavaTest() {
|
||||
|
||||
thrown.expect(AbortException)
|
||||
thrown.expectMessage('The installed version of Java is 1.7.0.')
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> getIncompatibleVersion(m) })
|
||||
|
||||
stepRule.step.toolValidate(tool: 'java', home: home)
|
||||
}
|
||||
|
||||
@Test
|
||||
void validateIncompatibleVersionMtaTest() {
|
||||
|
||||
thrown.expect(AbortException)
|
||||
thrown.expectMessage('The installed version of SAP Multitarget Application Archive Builder is 1.0.5.')
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> getIncompatibleVersion(m) })
|
||||
|
||||
stepRule.step.toolValidate(tool: 'mta', home: home)
|
||||
}
|
||||
|
||||
@Test
|
||||
void validateCmIncompatibleVersionTest() {
|
||||
|
||||
thrown.expect(AbortException)
|
||||
thrown.expectMessage('The installed version of Change Management Command Line Interface is 0.0.0.')
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> getIncompatibleVersion(m) })
|
||||
binding.setVariable('tool', 'cm')
|
||||
|
||||
stepRule.step.toolValidate(tool: 'cm', home: home)
|
||||
}
|
||||
|
||||
@Test
|
||||
void validateJavaTest() {
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> getVersion(m) })
|
||||
|
||||
stepRule.step.toolValidate(tool: 'java', home: home)
|
||||
|
||||
assert loggingRule.log.contains('Verifying Java version 1.8.0 or compatible version.')
|
||||
assert loggingRule.log.contains('Java version 1.8.0 is installed.')
|
||||
}
|
||||
|
||||
@Test
|
||||
void validateMtaTest() {
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> getVersion(m) })
|
||||
|
||||
stepRule.step.toolValidate(tool: 'mta', home: home)
|
||||
|
||||
assert loggingRule.log.contains('Verifying SAP Multitarget Application Archive Builder version 1.0.6 or compatible version.')
|
||||
assert loggingRule.log.contains('SAP Multitarget Application Archive Builder version 1.0.6 is installed.')
|
||||
}
|
||||
|
||||
@Test
|
||||
void validateNeoTest() {
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> getVersion(m) })
|
||||
|
||||
stepRule.step.toolValidate(tool: 'neo', home: home)
|
||||
}
|
||||
|
||||
@Test
|
||||
void validateCmTest() {
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> getVersion(m) })
|
||||
|
||||
stepRule.step.toolValidate(tool: 'cm', home: home)
|
||||
|
||||
assert loggingRule.log.contains('Verifying Change Management Command Line Interface version 0.0.1 or compatible version.')
|
||||
assert loggingRule.log.contains('Change Management Command Line Interface version 0.0.1 is installed.')
|
||||
}
|
||||
|
||||
|
||||
private getToolHome(Map m) {
|
||||
|
||||
if(m.script.contains('JAVA_HOME')) {
|
||||
return '/env/java'
|
||||
} else if(m.script.contains('MTA_JAR_LOCATION')) {
|
||||
return '/env/mta/mta.jar'
|
||||
} else if(m.script.contains('NEO_HOME')) {
|
||||
return '/env/neo'
|
||||
} else if(m.script.contains('CM_CLI_HOME')) {
|
||||
return '/env/cmclient'
|
||||
} else {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
private getNoVersion(Map m) {
|
||||
|
||||
if(m.script.contains('java -version')) {
|
||||
throw new AbortException('script returned exit code 127')
|
||||
} else if(m.script.contains('mta.jar -v')) {
|
||||
throw new AbortException('script returned exit code 127')
|
||||
} else if(m.script.contains('neo.sh version')) {
|
||||
throw new AbortException('script returned exit code 127')
|
||||
} else if(m.script.contains('cmclient -v')) {
|
||||
throw new AbortException('script returned exit code 127')
|
||||
} else {
|
||||
return getToolHome(m)
|
||||
}
|
||||
}
|
||||
|
||||
private getVersion(Map m) {
|
||||
|
||||
if(m.script.contains('java -version')) {
|
||||
return '''openjdk version \"1.8.0_121\"
|
||||
OpenJDK Runtime Environment (build 1.8.0_121-8u121-b13-1~bpo8+1-b13)
|
||||
OpenJDK 64-Bit Server VM (build 25.121-b13, mixed mode)'''
|
||||
} else if(m.script.contains('mta.jar -v')) {
|
||||
return '1.0.6'
|
||||
} else if(m.script.contains('neo.sh version')) {
|
||||
return '''SAP Cloud Platform Console Client
|
||||
SDK version : 3.39.10
|
||||
Runtime : neo-java-web'''
|
||||
} else if(m.script.contains('cmclient -v')) {
|
||||
return '0.0.1-beta-2 : fc9729964a6acf5c1cad9c6f9cd6469727625a8e'
|
||||
} else {
|
||||
return getToolHome(m)
|
||||
}
|
||||
}
|
||||
|
||||
private getIncompatibleVersion(Map m) {
|
||||
|
||||
if(m.script.contains('java -version')) {
|
||||
return '''openjdk version \"1.7.0_121\"
|
||||
OpenJDK Runtime Environment (build 1.7.0_121-8u121-b13-1~bpo8+1-b13)
|
||||
OpenJDK 64-Bit Server VM (build 25.121-b13, mixed mode)'''
|
||||
} else if(m.script.contains('mta.jar -v')) {
|
||||
return '1.0.5'
|
||||
} else if(m.script.contains('neo.sh version')) {
|
||||
return '''SAP Cloud Platform Console Client
|
||||
SDK version : 1.126.51
|
||||
Runtime : neo-java-web'''
|
||||
} else if(m.script.contains('cmclient -v')) {
|
||||
return '0.0.0-beta-1 : fc9729964a6acf5c1cad9c6f9cd6469727625a8e'
|
||||
} else {
|
||||
return getToolHome(m)
|
||||
}
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user