mirror of
https://github.com/SAP/jenkins-library.git
synced 2024-12-12 10:55:20 +02:00
Merge remote-tracking branch 'github/master' into HEAD
This commit is contained in:
commit
50fea23d2f
@ -10,7 +10,7 @@ plugins:
|
||||
editorconfig: .editorconfig
|
||||
# https://docs.codeclimate.com/docs/advanced-configuration#section-exclude-patterns
|
||||
exclude_patterns:
|
||||
- "documentation/docs/images/"
|
||||
- "documentation/**/images/"
|
||||
- "cfg/id_rsa.enc"
|
||||
fixme:
|
||||
enabled: true
|
||||
|
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,5 +1,4 @@
|
||||
**changes:**
|
||||
# Changes
|
||||
|
||||
-
|
||||
- [ ] add tests
|
||||
- [ ] add documentation
|
||||
- [ ] Tests
|
||||
- [ ] Documentation
|
||||
|
@ -27,11 +27,14 @@ jobs:
|
||||
after_script:
|
||||
- JACOCO_SOURCE_PATH="src vars test" ./cc-test-reporter format-coverage target/site/jacoco/jacoco.xml --input-type jacoco
|
||||
- ./cc-test-reporter upload-coverage
|
||||
- mvn -DrepoToken=$COVERALLS_REPO_TOKEN org.eluder.coveralls:coveralls-maven-plugin:report
|
||||
- name: Docs Build
|
||||
if: type = pull_request
|
||||
install: docker pull squidfunk/mkdocs-material:3.0.4
|
||||
script: docker run --rm -it -v ${TRAVIS_BUILD_DIR}:/docs -w /docs/documentation squidfunk/mkdocs-material:3.0.4 build --clean --verbose --strict
|
||||
script:
|
||||
- |
|
||||
cp -r documentation/docs documentation/docs-tmp
|
||||
documentation/bin/createDocu.sh vars documentation/docs-tmp/steps
|
||||
docker run --rm -it -v ${TRAVIS_BUILD_DIR}:/docs -w /docs/documentation squidfunk/mkdocs-material:3.0.4 build --clean --verbose --strict
|
||||
|
||||
- stage: Docs
|
||||
name: Deploy
|
||||
@ -43,6 +46,8 @@ jobs:
|
||||
PRIVATE_KEY="cfg/id_rsa"
|
||||
openssl aes-256-cbc -K $encrypted_12c8071d2874_key -iv $encrypted_12c8071d2874_iv -in cfg/id_rsa.enc -out "${PRIVATE_KEY}" -d
|
||||
chmod a+x gh-pages-deploy.sh
|
||||
cp -r documentation/docs documentation/docs-tmp
|
||||
documentation/bin/createDocu.sh vars documentation/docs-tmp/steps
|
||||
script: docker run --rm -it --entrypoint "./gh-pages-deploy.sh" -e "TRAVIS_REPO_SLUG=${TRAVIS_REPO_SLUG}" -v ${TRAVIS_BUILD_DIR}:/docs -w /docs squidfunk/mkdocs-material:3.0.4
|
||||
# TODO: make use of GHPages deploy provider: https://docs.travis-ci.com/user/deployment/pages/
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
[![Build Status](https://travis-ci.org/SAP/jenkins-library.svg?branch=master)](https://travis-ci.org/SAP/jenkins-library)
|
||||
[![Coverage Status](https://coveralls.io/repos/github/SAP/jenkins-library/badge.svg?branch=master)](https://coveralls.io/github/SAP/jenkins-library?branch=master)
|
||||
[![Maintainability](https://api.codeclimate.com/v1/badges/0e6a23344616e29b4ed0/maintainability)](https://codeclimate.com/github/SAP/jenkins-library/maintainability)
|
||||
[![Test Coverage](https://api.codeclimate.com/v1/badges/0e6a23344616e29b4ed0/test_coverage)](https://codeclimate.com/github/SAP/jenkins-library/test_coverage)
|
||||
|
||||
@ -103,6 +102,7 @@ project][piper-library-issues].
|
||||
Feel free to open new issues for feature requests, bugs or general feedback on
|
||||
the [GitHub issues page of this project][piper-library-issues].
|
||||
|
||||
Register to our [google group][google-group] in order to get updates or for asking questions.
|
||||
# Contributing
|
||||
|
||||
Read and understand our [contribution guidelines][piper-library-contribution]
|
||||
@ -126,3 +126,4 @@ otherwise in the [LICENSE file][piper-library-license]
|
||||
[jenkins-doc-libraries]: https://jenkins.io/doc/book/pipeline/shared-libraries
|
||||
[jenkins-doc-steps]: https://jenkins.io/doc/pipeline/steps
|
||||
[jenkins-plugin-sharedlibs]: https://wiki.jenkins-ci.org/display/JENKINS/Pipeline+Shared+Groovy+Libraries+Plugin
|
||||
[google-group]: https://groups.google.com/forum/#!forum/project-piper
|
||||
|
@ -1,6 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
export CLASSPATH_FILE='cp.txt'
|
||||
mvn dependency:build-classpath -Dmdep.outputFile=${CLASSPATH_FILE} > /dev/null 2>&1
|
||||
#~/Library/groovy-2.4.13/bin/groovy -cp src:`cat $CLASSPATH_FILE` createDocu
|
||||
groovy -cp "src:$(cat $CLASSPATH_FILE)" createDocu "${@}"
|
@ -1,6 +1,7 @@
|
||||
import groovy.io.FileType;
|
||||
import org.yaml.snakeyaml.Yaml
|
||||
import org.codehaus.groovy.control.CompilerConfiguration
|
||||
import com.sap.piper.GenerateDocumentation
|
||||
import com.sap.piper.DefaultValueCache
|
||||
import java.util.regex.Matcher
|
||||
|
||||
@ -57,25 +58,24 @@ class TemplateHelper {
|
||||
t += "* `${it}` - ${props.docu ?: ''}\n"
|
||||
}
|
||||
|
||||
t
|
||||
t.trim()
|
||||
}
|
||||
|
||||
static createStepConfigurationSection(Map parameters) {
|
||||
|
||||
def t = '''|
|
||||
|We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
def t = '''|We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
|
||||
|In following sections the configuration is possible:\n\n'''.stripMargin()
|
||||
|In following sections of the config.yml the configuration is possible:\n\n'''.stripMargin()
|
||||
|
||||
t += '| parameter | general | step | stage |\n'
|
||||
t += '|-----------|---------|------|-------|\n'
|
||||
|
||||
parameters.keySet().toSorted().each {
|
||||
def props = parameters.get(it)
|
||||
t += "| `${it}` | ${props.GENERAL_CONFIG ? 'X' : ''} | ${props.STEP_CONFIG ? 'X' : ''} | ${props.PARAMS ? 'X' : ''} |\n"
|
||||
t += "| `${it}` | ${props.GENERAL_CONFIG ? 'X' : ''} | ${props.STEP_CONFIG ? 'X' : ''} | ${props.STAGE_CONFIG ? 'X' : ''} |\n"
|
||||
}
|
||||
|
||||
t
|
||||
t.trim()
|
||||
}
|
||||
}
|
||||
|
||||
@ -301,7 +301,7 @@ class Helper {
|
||||
|
||||
params.put('STEP_CONFIG', script.STEP_CONFIG_KEYS ?: [])
|
||||
params.put('GENERAL_CONFIG', script.GENERAL_CONFIG_KEYS ?: [] )
|
||||
params.put('PARAMS', script.PARAMETER_KEYS ?: [] )
|
||||
params.put('STAGE_CONFIG', script.PARAMETER_KEYS ?: [] )
|
||||
|
||||
return params
|
||||
}
|
||||
@ -324,6 +324,25 @@ class Helper {
|
||||
if(p in Map) getValue(p, pPath.tail())
|
||||
else return p
|
||||
}
|
||||
|
||||
static resolveDocuRelevantSteps(GroovyScriptEngine gse, File stepsDir) {
|
||||
|
||||
def docuRelevantSteps = []
|
||||
|
||||
stepsDir.traverse(type: FileType.FILES, maxDepth: 0) {
|
||||
if(it.getName().endsWith('.groovy')) {
|
||||
def scriptName = (it =~ /vars\/(.*)\.groovy/)[0][1]
|
||||
def stepScript = gse.createScript("${scriptName}.groovy", new Binding())
|
||||
for (def method in stepScript.getClass().getMethods()) {
|
||||
if(method.getName() == 'call' && method.getAnnotation(GenerateDocumentation) != null) {
|
||||
docuRelevantSteps << scriptName
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
docuRelevantSteps
|
||||
}
|
||||
}
|
||||
|
||||
roots = [
|
||||
@ -351,7 +370,9 @@ stepsDocuDir = stepsDocuDir ?: new File('documentation/docs/steps')
|
||||
|
||||
|
||||
if(args.length >= 3)
|
||||
steps << args[2]
|
||||
steps = (args as List).drop(2) // the first two entries are stepsDir and docuDir
|
||||
// the other parts are considered as step names
|
||||
|
||||
|
||||
// assign parameters
|
||||
//
|
||||
@ -372,18 +393,16 @@ if( !stepsDir.exists() ) {
|
||||
// sanity checks
|
||||
//
|
||||
|
||||
def gse = new GroovyScriptEngine( [ stepsDir.getName() ] as String[] , getClass().getClassLoader() )
|
||||
|
||||
//
|
||||
// find all the steps we have to document (if no step has been provided from outside)
|
||||
if( ! steps) {
|
||||
stepsDir.traverse(type: FileType.FILES, maxDepth: 0)
|
||||
{ if(it.getName().endsWith('.groovy')) steps << (it =~ /vars\/(.*)\.groovy/)[0][1] }
|
||||
steps = Helper.resolveDocuRelevantSteps(gse, stepsDir)
|
||||
} else {
|
||||
System.err << "[INFO] Generating docu only for step ${steps.size > 1 ? 's' : ''} ${steps}.\n"
|
||||
}
|
||||
|
||||
def gse = new GroovyScriptEngine( [ stepsDir.getName() ] as String[] , getClass().getClassLoader() )
|
||||
|
||||
def prepareDefaultValuesStep = Helper.getPrepareDefaultValuesStep(gse)
|
||||
|
||||
boolean exceptionCaught = false
|
||||
@ -490,9 +509,9 @@ def handleStep(stepName, prepareDefaultValuesStep, gse) {
|
||||
'commonPipelineEnvironment for retrieving, for example, configuration parameters.',
|
||||
required: true,
|
||||
|
||||
GENERAL_CONFIG: 'false',
|
||||
STEP_CONFIG: 'false',
|
||||
PARAMS: 'true'
|
||||
GENERAL_CONFIG: false,
|
||||
STEP_CONFIG: false,
|
||||
STAGE_CONFIG: false
|
||||
]
|
||||
|
||||
// END special handling for 'script' parameter
|
||||
@ -501,9 +520,11 @@ def handleStep(stepName, prepareDefaultValuesStep, gse) {
|
||||
|
||||
it ->
|
||||
|
||||
def defaultValue = Helper.getValue(defaultConfig, it.split('/'))
|
||||
|
||||
def parameterProperties = [
|
||||
defaultValue: Helper.getValue(defaultConfig, it.split('/')),
|
||||
required: requiredParameters.contains((it as String))
|
||||
defaultValue: defaultValue,
|
||||
required: requiredParameters.contains((it as String)) && defaultValue == null
|
||||
]
|
||||
|
||||
step.parameters.put(it, parameterProperties)
|
8
documentation/bin/createDocu.sh
Executable file
8
documentation/bin/createDocu.sh
Executable file
@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
d=$(dirname "$0")
|
||||
[ ! -z "$d" ] && d="$d/"
|
||||
|
||||
export CLASSPATH_FILE='target/cp.txt'
|
||||
mvn compile dependency:build-classpath -Dmdep.outputFile=${CLASSPATH_FILE} > /dev/null 2>&1
|
||||
groovy -cp "target/classes:$(cat $CLASSPATH_FILE)" "${d}createDocu" "${@}"
|
BIN
documentation/docs/images/SolMan_Scenario.png
Normal file
BIN
documentation/docs/images/SolMan_Scenario.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 41 KiB |
90
documentation/docs/scenarios/changeManagement.md
Normal file
90
documentation/docs/scenarios/changeManagement.md
Normal file
@ -0,0 +1,90 @@
|
||||
# Develop Hybrid Applications with Jenkins and SAP Solution Manager
|
||||
|
||||
Set up an agile development process with Jenkins CI, which automatically feeds changes into SAP Solution Manager.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* You have installed the Java Runtime Environment 8.
|
||||
* You have installed Jenkins 2.60.3 or higher.
|
||||
* You have set up Project “Piper”. See [README](https://github.com/SAP/jenkins-library/blob/master/README.md).
|
||||
* You have installed SAP Solution Manager 7.2 SP6. See [README](https://github.com/SAP/devops-cm-client/blob/master/README.md).
|
||||
* You have installed the Multi-Target Application (MTA) Archive Builder 1.0.6 or newer. See [SAP Development Tools](https://tools.hana.ondemand.com/#cloud).
|
||||
* You have installed Node.js including node and npm. See [Node.js](https://nodejs.org/en/download/).
|
||||
|
||||
## Context
|
||||
|
||||
In many SAP development scenarios, it is vital to synchronize both backend and frontend deliveries. These deliveries are typically an SAP UI5 application and an ABAP backend from which it is served. The SAP UI5 parts are often developed using agile practices and use Continuous Integration pipelines that automatically build, test, and deploy the application.
|
||||
|
||||
In this scenario, we want to show how an agile development process with Jenkins CI can automatically feed changes into SAP Solution Manager. In SAP Solution Manager, all parts of the application stack come together and can be subject to classic change and transport management.
|
||||
|
||||
The basic workflow is as follows:
|
||||
|
||||
1. The pipeline scans the Git commit messages between `origin/master` and `HEAD` for a line like `ChangeDocument : <changeDocumentId>`, and validates that the change is in the correct status `in development`.The template for the commit message looks as follows:
|
||||
|
||||
```
|
||||
<Commit Message Header>
|
||||
|
||||
<Commit Message Description>
|
||||
|
||||
ChangeDocument: <Your Change Document ID>
|
||||
```
|
||||
|
||||
2. To communicate with SAP Solution Manager, the pipeline uses credentials that must be stored on Jenkins under the label `CM`.
|
||||
3. The required transport request is created on the fly. However, the change document can contain more components (for example, UI and backend components).
|
||||
4. The changes of your development team trigger the Jenkins pipeline. It builds and validates the changes and attaches them to the respective transport request.
|
||||
5. As soon as the development process is completed, the change document in SAP Solution Manager can be set to status `to be tested` and all components can be transported to the test system.
|
||||
|
||||
![Hybrid Application Development Workflow](../images/SolMan_Scenario.png "Hybrid Application Development Workflow")
|
||||
##### Hybrid Application Development Worflow
|
||||
|
||||
## Example
|
||||
|
||||
### Jenkinsfile
|
||||
|
||||
```groovy
|
||||
@Library('piper-library-os') _
|
||||
|
||||
node() {
|
||||
|
||||
stage('prepare') {
|
||||
checkout scm
|
||||
setupCommonPipelineEnvironment script:this
|
||||
checkChangeInDevelopment script: this
|
||||
}
|
||||
|
||||
stage('buildMta') {
|
||||
mtaBuild script: this
|
||||
}
|
||||
|
||||
stage('uploadToTransportRequest') {
|
||||
transportRequestCreate script: this
|
||||
transportRequestUploadFile script:this
|
||||
transportRequestRelease script: this
|
||||
}
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
### Configuration (`.pipeline/config.yml`)
|
||||
|
||||
```yaml
|
||||
#Steps Specific Configuration
|
||||
general:
|
||||
changeManagement:
|
||||
endpoint: 'https://<backend-system>/sap/opu/odata/sap/AI_CRM_GW_CM_CI_SRV'
|
||||
credentialsId: 'CM'
|
||||
type: 'SOLMAN'
|
||||
steps:
|
||||
mtaBuild:
|
||||
buildTarget: 'NEO'
|
||||
transportRequestUploadFile:
|
||||
applicationId: 'HCP'
|
||||
```
|
||||
|
||||
### Parameters
|
||||
|
||||
For the detailed description of the relevant parameters, see:
|
||||
|
||||
* [checkChangeInDevelopment](https://sap.github.io/jenkins-library/steps/checkChangeInDevelopment/)
|
||||
* [mtaBuild](https://sap.github.io/jenkins-library/steps/mtaBuild/)
|
||||
* [transportRequestUploadFile](https://sap.github.io/jenkins-library/steps/transportRequestUploadFile/)
|
87
documentation/docs/scenarios/ui5-sap-cp/Readme.md
Normal file
87
documentation/docs/scenarios/ui5-sap-cp/Readme.md
Normal file
@ -0,0 +1,87 @@
|
||||
# Create a Pipeline for SAP UI5 or SAP Fiori on SAP Cloud Platform
|
||||
|
||||
Create an application based on SAP UI5 or SAP Fiori and deploy the build result into an SAP Cloud Platform account in the Neo environment.
|
||||
|
||||
This document describes a scenario step, which means that it combines various different steps to create a complete pipeline.
|
||||
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* You have installed the Java Runtime Environment 8.
|
||||
* You have installed Jenkins 2.60.3 or higher.
|
||||
* You have set up Project “Piper”. See [README](https://github.com/SAP/jenkins-library/blob/master/README.md).
|
||||
* You have installed the Multi-Target Application (MTA) Archive Builder 1.0.6 or newer. See [SAP Development Tools](https://tools.hana.ondemand.com/#cloud).
|
||||
* You have installed Node.js including node and npm. See [Node.js](https://nodejs.org/en/download/).
|
||||
* You have installed the SAP Cloud Platform Neo Environment SDK. See [SAP Development Tools](https://tools.hana.ondemand.com/#cloud).
|
||||
|
||||
|
||||
### Project Prerequisites
|
||||
|
||||
This scenario step requires additional files in your project and the execution environment on your Jenkins instance. On the project level, provide and adjust the following template:
|
||||
|
||||
| File Name | Description |
|
||||
|-----|-----|
|
||||
| [`.npmrc`](https://github.com/marcusholl/jenkins-library/tree/pr/scenarioUI5SAPCP/documentation/docs/scenarios/ui5-sap-cp/files/.npmrc) | This file contains a reference to the SAP NPM registry (`@sap:registry https://npm.sap.com`), which is required to fetch dependencies to build the application. Place it in the root directory of your project. |
|
||||
| [`mta.yaml`](https://github.com/marcusholl/jenkins-library/tree/pr/scenarioUI5SAPCP/documentation/docs/scenarios/ui5-sap-cp/files/mta.yaml) | This file controls the behavior of the MTA toolset. Place it in your application root folder and adjust the values in brackets with your data. |
|
||||
| [`package.json`](https://github.com/marcusholl/jenkins-library/tree/pr/scenarioUI5SAPCP/documentation/docs/scenarios/ui5-sap-cp/files/package.json) | This file lists the required development dependencies for the build. Add the content to your existing `package.json` file. |
|
||||
| [`Gruntfile.js`](https://github.com/marcusholl/jenkins-library/tree/pr/scenarioUI5SAPCP/documentation/docs/scenarios/ui5-sap-cp/files/Gruntfile.js) | This file controls the grunt build. By default the tasks `clean`, `build`, and `lint` are executed. Place it in the root directory of your project. |
|
||||
|
||||
|
||||
## Context
|
||||
|
||||
In this scenario step, we want to show how to build an application based on SAP UI5 or SAP Fiori by using the multi-target application (MTA) concept and how to deploy the build result into an SAP Cloud Platform account in the Neo environment. This document comprises the [mtaBuild](https://sap.github.io/jenkins-library/steps/mtaBuild/) and the [neoDeploy](https://sap.github.io/jenkins-library/steps/neoDeploy/) steps.
|
||||
|
||||
![This pipeline in Jenkins Blue Ocean](images/pipeline.jpg)
|
||||
|
||||
## Example
|
||||
|
||||
### Jenkinsfile
|
||||
|
||||
Following the convention for pipeline definitions, use a `Jenkinsfile` which resides in the root directory of your development sources.
|
||||
|
||||
```groovy
|
||||
@Library('piper-lib-os') _
|
||||
|
||||
fioriOnCloudPlatformPipeline script:this
|
||||
```
|
||||
|
||||
### Configuration (`.pipeline/config.yml`)
|
||||
|
||||
This is a basic configuration example, which is also located in the sources of the project.
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
mtaBuild:
|
||||
buildTarget: 'NEO'
|
||||
mtaJarLocation: '/opt/sap/mta.jar'
|
||||
neoDeploy:
|
||||
neoCredentialsId: 'NEO_DEPLOY'
|
||||
neoHome: '/opt/sap/neo-sdk/'
|
||||
account: 'your-account-id'
|
||||
host: 'hana.ondemand.com'
|
||||
```
|
||||
|
||||
#### Configuration for the MTA Build
|
||||
|
||||
| Parameter | Description |
|
||||
| -----------------|----------------|
|
||||
| `buildTarget` | The target platform to which the mtar can be deployed. Possible values are: `CF`, `NEO`, `XSA` |
|
||||
| `mtaJarLocation` | The location of the multi-target application archive builder jar file, including file name and extension. |
|
||||
|
||||
|
||||
#### Configuration for the Deployment to SAP Cloud Platform
|
||||
|
||||
| Parameter | Description |
|
||||
| -------------------|-------------|
|
||||
| `account` | The SAP Cloud Platform account to deploy to. |
|
||||
| `host` | The SAP Cloud Platform host to deploy to. |
|
||||
| `neoCredentialsId` | The Jenkins credentials that contain the user and password which are used for the deployment on SAP Cloud Platform. |
|
||||
| `neoHome` | The path to the `neo-java-web-sdk` tool that is used for the deployment. |
|
||||
|
||||
|
||||
### Parameters
|
||||
|
||||
For the detailed description of the relevant parameters, see:
|
||||
|
||||
* [mtaBuild](https://sap.github.io/jenkins-library/steps/mtaBuild/)
|
||||
* [neoDeploy](https://sap.github.io/jenkins-library/steps/neoDeploy/)
|
11
documentation/docs/scenarios/ui5-sap-cp/files/.npmrc
Normal file
11
documentation/docs/scenarios/ui5-sap-cp/files/.npmrc
Normal file
@ -0,0 +1,11 @@
|
||||
# This file can be ommitted in the project if it is ensured
|
||||
# that the corresponding configuration is provided on a
|
||||
# higher level in the npm config (either on user level ~/.npmrc or
|
||||
# globally). For more details with regards to configuring npm check
|
||||
# man pages for npm-config/npmrc
|
||||
|
||||
# The public npm registry from where to fetch e.g. Grunt
|
||||
registry=https://registry.npmjs.org
|
||||
|
||||
# The SAP npm registry from where to fetch SAP specific Grunt modules
|
||||
@sap:registry=https://npm.sap.com
|
10
documentation/docs/scenarios/ui5-sap-cp/files/Gruntfile.js
Normal file
10
documentation/docs/scenarios/ui5-sap-cp/files/Gruntfile.js
Normal file
@ -0,0 +1,10 @@
|
||||
module.exports = function (grunt) {
|
||||
'use strict';
|
||||
grunt.loadNpmTasks('@sap/grunt-sapui5-bestpractice-build');
|
||||
|
||||
grunt.registerTask('default', [
|
||||
'lint',
|
||||
'clean',
|
||||
'build'
|
||||
]);
|
||||
};
|
16
documentation/docs/scenarios/ui5-sap-cp/files/mta.yaml
Normal file
16
documentation/docs/scenarios/ui5-sap-cp/files/mta.yaml
Normal file
@ -0,0 +1,16 @@
|
||||
_schema-version: "2.0.0"
|
||||
ID: "<Id of your MTA>"
|
||||
version: <version number of your application>
|
||||
|
||||
parameters:
|
||||
hcp-deployer-version: "1.0.0"
|
||||
|
||||
modules:
|
||||
- name: "<Name of your Fiori application>"
|
||||
type: html5
|
||||
path: .
|
||||
parameters:
|
||||
version: <version number of your application>-${timestamp}
|
||||
build-parameters:
|
||||
builder: grunt
|
||||
build-result: dist
|
10
documentation/docs/scenarios/ui5-sap-cp/files/package.json
Normal file
10
documentation/docs/scenarios/ui5-sap-cp/files/package.json
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"name": "<name of the package>",
|
||||
"version": "<version of the package>",
|
||||
"description": "<description of the package>",
|
||||
"private": true,
|
||||
"devDependencies": {
|
||||
"grunt": "1.0.1",
|
||||
"@sap/grunt-sapui5-bestpractice-build": "1.3.17"
|
||||
}
|
||||
}
|
BIN
documentation/docs/scenarios/ui5-sap-cp/images/pipeline.jpg
Normal file
BIN
documentation/docs/scenarios/ui5-sap-cp/images/pipeline.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 18 KiB |
@ -2,14 +2,7 @@
|
||||
|
||||
## Description
|
||||
|
||||
Checks if a Change Document in SAP Solution Manager is in status 'in development'. The change document id is retrieved from the git commit history. The change document id
|
||||
can also be provided via parameter `changeDocumentId`. Any value provided as parameter has a higher precedence than a value from the commit history.
|
||||
|
||||
By default the git commit messages between `origin/master` and `HEAD` are scanned for a line like `ChangeDocument : <changeDocumentId>`. The commit
|
||||
range and the pattern can be configured. For details see 'parameters' table.
|
||||
|
||||
In case the change is not in status 'in development' an `hudson.AbortException` is thrown. In case `failIfStatusIsNotInDevelopment`
|
||||
is set to `false`, no `hudson.AbortException` will be thrown. In this case there is only a message in the log stating the change is not in status 'in development'.
|
||||
Content here is generated from corresponnding step, see `vars`.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
@ -17,30 +10,22 @@ is set to `false`, no `hudson.AbortException` will be thrown. In this case there
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -------------------|-----------|--------------------------------------------------------|--------------------|
|
||||
| `script` | yes | | |
|
||||
| `changeDocumentId` | yes | | |
|
||||
| `changeManagement/changeDocumentLabel` | no | `ChangeDocument\s?:` | regex pattern |
|
||||
| `changeManagement/credentialsId` | yes | | |
|
||||
| `changeManagement/endpoint` | yes | | |
|
||||
| `changeManagement/git/from` | no | `origin/master` | |
|
||||
| `changeManagement/git/to` | no | `HEAD` | |
|
||||
| `changeManagement/git/format` | no | `%b` | see `git log --help` |
|
||||
| `failIfStatusIsNotInDevelopment` | no | `true` | `true`, `false` |
|
||||
|
||||
* `script` - The common script environment of the Jenkinsfile running. Typically the reference to the script calling the pipeline step is provided with the `this` parameter, as in `script: this`. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for retrieving, for example, configuration parameters.
|
||||
* `changeDocumentId` - The id of the change document to transport. If not provided, it is retrieved from the git commit history.
|
||||
* `changeManagement/changeDocumentLabel` - A pattern used for identifying lines holding the change document id.
|
||||
* `changeManagement/credentialsId` - The id of the credentials to connect to the Solution Manager. The credentials needs to be maintained on Jenkins.
|
||||
* `changeManagement/endpoint` - The address of the Solution Manager.
|
||||
* `changeManagement/git/from` - The starting point for retrieving the change document id
|
||||
* `changeManagement/git/to` - The end point for retrieving the change document id
|
||||
* `changeManagement/git/format` - Specifies what part of the commit is scanned. By default the body of the commit message is scanned.
|
||||
* `failIfStatusIsNotInDevelopment` - when set to `false` the step will not fail in case the step is not in status 'in development'.
|
||||
Content here is generated from corresponnding step, see `vars`.
|
||||
|
||||
## Step configuration
|
||||
|
||||
Content here is generated from corresponnding step, see `vars`.
|
||||
|
||||
## Exceptions
|
||||
|
||||
* `AbortException`:
|
||||
* If the change id is not provided via parameter and if the change document id cannot be retrieved from the commit history.
|
||||
* If the change is not in status `in development`. In this case no exception will be thrown when `failIfStatusIsNotInDevelopment` is set to `false`.
|
||||
* `IllegalArgumentException`:
|
||||
* If a mandatory property is not provided.
|
||||
|
||||
## Examples
|
||||
|
||||
The step is configured using a customer configuration file provided as
|
||||
resource in an custom shared library.
|
||||
|
||||
@ -87,17 +72,7 @@ The properties can also be configured on a per-step basis:
|
||||
failIfStatusIsNotInDevelopment: true
|
||||
```
|
||||
|
||||
The parameters can also be provided when the step is invoked. For examples see below.
|
||||
|
||||
## Exceptions
|
||||
|
||||
* `AbortException`:
|
||||
* If the change id is not provided via parameter and if the change document id cannot be retrieved from the commit history.
|
||||
* If the change is not in status `in development`. In this case no exception will be thrown when `failIfStatusIsNotInDevelopment` is set to `false`.
|
||||
* `IllegalArgumentException`:
|
||||
* If a mandatory property is not provided.
|
||||
|
||||
## Examples
|
||||
The parameters can also be provided when the step is invoked:
|
||||
|
||||
```groovy
|
||||
// simple case. All mandatory parameters provided via
|
||||
|
@ -32,7 +32,7 @@ Deployment can be done
|
||||
| keepOldInstance | no | false | true, false |
|
||||
| dockerImage | no | s4sdk/docker-cf-cli | |
|
||||
| dockerWorkspace | no | /home/piper | |
|
||||
| mtaDeployParameters | | -f | |
|
||||
| mtaDeployParameters | | for _deployType:standard_ `-f`<br />for _deployType:blue-green_ `-f --no-confirm` | |
|
||||
| mtaExtensionDescriptor | no | '' | |
|
||||
| mtaPath | no | '' | |
|
||||
| smokeTestScript | no | blueGreenCheckScript.sh (provided by library). <br />Can be overwritten using config property 'smokeTestScript' | |
|
||||
|
@ -7,11 +7,12 @@ The workspace is mounted into the docker image.
|
||||
Proxy environment variables defined on the Jenkins machine are also available in the Docker container.
|
||||
|
||||
## Parameters
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script|yes|||
|
||||
|containerCommand|no|||
|
||||
|containerPortMappings|no|||
|
||||
|containerShell|no|||
|
||||
|dockerEnvVars|no|`[:]`||
|
||||
|dockerImage|no|`''`||
|
||||
|dockerName|no|||
|
||||
@ -27,7 +28,9 @@ Proxy environment variables defined on the Jenkins machine are also available in
|
||||
|sidecarWorkspace|no|||
|
||||
|
||||
* `script` defines the global script environment of the Jenkinsfile run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
|
||||
* `containerCommand`: only used in case exeuction environment is Kubernetes, allows to specify start command for container created with dockerImage parameter to overwrite Piper default (`/usr/bin/tail -f /dev/null`).
|
||||
* `containerPortMappings`: Map which defines per docker image the port mappings, like `containerPortMappings: ['selenium/standalone-chrome': [[name: 'selPort', containerPort: 4444, hostPort: 4444]]]`
|
||||
* `containerShell`: only used in case exeuction environment is Kubernetes, allows to specify the shell to be used for execution of commands
|
||||
* `dockerEnvVars`: Environment variables to set in the container, e.g. [http_proxy:'proxy:8080']
|
||||
* `dockerImage`: Name of the docker image that should be used. If empty, Docker is not used and the command is executed directly on the Jenkins system.
|
||||
* `dockerName`: Kubernetes case: Name of the container launching `dockerImage`, SideCar: Name of the container in local network
|
||||
|
@ -16,11 +16,13 @@ Executes a closure inside a container in a kubernetes pod. Proxy environment var
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script|yes|||
|
||||
|containerCommand|no|||
|
||||
|containerCommands|no|||
|
||||
|containerEnvVars|no|||
|
||||
|containerMap|no|`[:]`||
|
||||
|containerName|no|||
|
||||
|containerPortMappings|no|||
|
||||
|containerShell|no|||
|
||||
|containerWorkspaces|no|||
|
||||
|dockerEnvVars|no|`[:]`||
|
||||
|dockerImage|yes|||
|
||||
@ -30,13 +32,14 @@ Executes a closure inside a container in a kubernetes pod. Proxy environment var
|
||||
|stashIncludes|no|`[workspace:**/*.*]`||
|
||||
|
||||
* `script` defines the global script environment of the Jenkins file run. Typically `this` is passed to this parameter. This allows the function to access the [`commonPipelineEnvironment`](commonPipelineEnvironment.md) for storing the measured duration.
|
||||
* `containerCommand`: allows to specify start command for container created with dockerImage parameter to overwrite Piper default (`/usr/bin/tail -f /dev/null`).
|
||||
* `containerCommands` specifies start command for containers to overwrite Piper default (`/usr/bin/tail -f /dev/null`). If container's defaultstart command should be used provide empty string like: `['selenium/standalone-chrome': '']`.
|
||||
* `containerEnvVars` specifies environment variables per container. If not provided `dockerEnvVars` will be used.
|
||||
* `containerMap` A map of docker image to the name of the container. The pod will be created with all the images from this map and they are labled based on the value field of each map entry.
|
||||
Example: `['maven:3.5-jdk-8-alpine': 'mavenExecute', 'selenium/standalone-chrome': 'selenium', 'famiko/jmeter-base': 'checkJMeter', 's4sdk/docker-cf-cli': 'cloudfoundry']`
|
||||
|
||||
* `containerName`: optional configuration in combination with containerMap to define the container where the commands should be executed in
|
||||
* `containerPortMappings`: Map which defines per docker image the port mappings, like `containerPortMappings: ['selenium/standalone-chrome': [[name: 'selPort', containerPort: 4444, hostPort: 4444]]]`
|
||||
* `containerShell` allows to specify the shell to be executed for container with containerName
|
||||
* `containerWorkspaces` specifies workspace (=home directory of user) per container. If not provided `dockerWorkspace` will be used. If empty, home directory will not be set.
|
||||
* `dockerImage` Name of the docker image that should be used. If empty, Docker is not used.
|
||||
* `dockerEnvVars` Environment variables to set in the container, e.g. [http_proxy:'proxy:8080']
|
||||
|
@ -16,7 +16,7 @@ You basically need three components:
|
||||
It will create following files for you and archive them into your build:
|
||||
|
||||
* `jenkins_data.json`: This file gives you build-specific information, like e.g. build result, stage where the build failed
|
||||
* `pipeline_data.json`: This file gives you detailed information about your pipeline, e.g. stage durations, steps executed, ...
|
||||
* `influx_data.json`: This file gives you detailed information about your pipeline, e.g. stage durations, steps executed, ...
|
||||
|
||||
## Prerequisites
|
||||
|
||||
@ -83,17 +83,33 @@ influxDBServer=jenkins
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
| script | yes | | |
|
||||
| artifactVersion | yes | commonPipelineEnvironment.getArtifactVersion() | |
|
||||
| influxServer | no | `jenkins` | |
|
||||
| influxPrefix | no | `null` | |
|
||||
|script|yes|||
|
||||
|artifactVersion|no|`commonPipelineEnvironment.getArtifactVersion()`||
|
||||
|customData|no|`commonPipelineEnvironment.getInfluxCustomData()`||
|
||||
|customDataMap|no|`commonPipelineEnvironment.getInfluxCustomDataMap()`||
|
||||
|customDataMapTags|no|`commonPipelineEnvironment.getInfluxCustomDataTags()`||
|
||||
|customDataTags|no|`commonPipelineEnvironment.getInfluxCustomDataTags()`||
|
||||
|influxPrefix|no|||
|
||||
|influxServer|no|`''`||
|
||||
|wrapInNode|no|`false`||
|
||||
|
||||
## Step configuration
|
||||
|
||||
The following parameters can also be specified as step parameters using the global configuration file:
|
||||
We recommend to define values of step parameters via [config.yml file](../configuration.md).
|
||||
|
||||
- `influxServer`
|
||||
- `influxPrefix`
|
||||
In following sections the configuration is possible:
|
||||
|
||||
| parameter | general | step | stage |
|
||||
| ----------|-----------|---------|-----------------|
|
||||
|script||||
|
||||
|artifactVersion||X|X|
|
||||
|customData||X|X|
|
||||
|customDataMap||X|X|
|
||||
|customDataMapTags||X|X|
|
||||
|customDataTags||X|X|
|
||||
|influxPrefix||X|X|
|
||||
|influxServer||X|X|
|
||||
|wrapInNode||X|X|
|
||||
|
||||
## Example
|
||||
|
||||
@ -153,7 +169,7 @@ Measurements are potentially pre-fixed - see parameter `influxPrefix` above.
|
||||
| sonarqube_data | <ul><li>blocker_issues</li><li>critical_issues</li><li>info_issues</li><li>major_issues</li><li>minor_issues</li><li>lines_of_code</li><li>...</li></ul> | Details see [InfluxDB plugin documentation](https://wiki.jenkins.io/display/JENKINS/InfluxDB+Plugin) |
|
||||
| jenkins_custom_data | Piper fills following colums by default: <br /><ul><li>build_result</li><li>build_result_key</li><li>build_step (->step in case of error)</li><li>build_error (->error message in case of error)</li></ul> | filled by `commonPipelineEnvironment.setInfluxCustomDataProperty()` |
|
||||
| pipeline_data | Examples from the Piper templates:<br /><ul><li>build_duration</li><li>opa_duration</li><li>deploy_test_duration</li><li>deploy_test_duration</li><li>fortify_duration</li><li>release_duration</li><li>...</li></ul>| filled by step [`measureDuration`](durationMeasure.md) using parameter `measurementName`|
|
||||
| step_data | Considered, e.g.:<br /><ul><li>build_quality (Milestone/Release)</li><li>build_url</li><li>bats</li><li>checkmarx</li><li>fortify</li><li>gauge</li><li>nsp</li><li>opa</li><li>opensourcedependency</li><li>ppms</li><li>jmeter</li><li>supa</li><li>snyk</li><li>sonar</li><li>sourceclear</li><li>uiveri5</li><li>vulas</li><li>whitesource</li><li>traceability</li><li>...</li><li>xmakestage</li><li>xmakepromote</li></ul>| filled by `commonPipelineEnvironment.setInfluxStepData()` |
|
||||
| step_data | Considered, e.g.:<br /><ul><li>build_url</li><li>bats</li><li>checkmarx</li><li>fortify</li><li>gauge</li><li>nsp</li><li>snyk</li><li>sonar</li><li>...</li></ul>| filled by `commonPipelineEnvironment.setInfluxStepData()` |
|
||||
|
||||
### Examples for InfluxDB queries which can be used in Grafana
|
||||
|
||||
|
@ -46,7 +46,7 @@ Note that a version is formed by `major.minor.patch`, and a version is compatibl
|
||||
| `script` | yes | | |
|
||||
| `warAction` | yes | `'deploy'` | `'deploy'`, `'rolling-update'` |
|
||||
|
||||
## Parameters when using WAR file deployment method witout .properties file - with parameters (WAR_PARAMS)
|
||||
## Parameters when using WAR file deployment method without .properties file - with parameters (WAR_PARAMS)
|
||||
|
||||
| parameter | mandatory | default | possible values |
|
||||
| -------------------|-----------|-------------------------------|-------------------------------------------------|
|
||||
|
@ -16,11 +16,11 @@ Pipeline step:
|
||||
newmanExecute script: this
|
||||
```
|
||||
|
||||
This step should be used in combination with `publishTestResults`:
|
||||
This step should be used in combination with `testsPublishResults`:
|
||||
|
||||
```groovy
|
||||
newmanExecute script: this, failOnError: false
|
||||
publishTestResults script: this, junit: [pattern: '**/newman/TEST-newman.xml']
|
||||
testsPublishResults script: this, junit: [pattern: '**/newman/TEST-*.xml']
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
@ -22,6 +22,7 @@ nav:
|
||||
- mavenExecute: steps/mavenExecute.md
|
||||
- mtaBuild: steps/mtaBuild.md
|
||||
- neoDeploy: steps/neoDeploy.md
|
||||
- newmanExecute: steps/newmanExecute.md
|
||||
- pipelineExecute: steps/pipelineExecute.md
|
||||
- pipelineRestartSteps: steps/pipelineRestartSteps.md
|
||||
- pipelineStashFiles: steps/pipelineStashFiles.md
|
||||
@ -33,6 +34,9 @@ nav:
|
||||
- transportRequestCreate: steps/transportRequestCreate.md
|
||||
- transportRequestRelease: steps/transportRequestRelease.md
|
||||
- transportRequestUploadFile: steps/transportRequestUploadFile.md
|
||||
- 'Scenarios':
|
||||
- 'Develop Hybrid Applications with Jenkins and SAP Solution Manager': scenarios/changeManagement.md
|
||||
- 'Create a Pipeline for SAP UI5 or SAP Fiori on SAP Cloud Platform': scenarios/ui5-sap-cp/Readme.md
|
||||
- 'Required Plugins': jenkins/requiredPlugins.md
|
||||
|
||||
theme:
|
||||
@ -55,6 +59,6 @@ markdown_extensions:
|
||||
extra_css:
|
||||
- 'css/extra.css'
|
||||
edit_uri: edit/master/documentation/docs
|
||||
docs_dir: docs
|
||||
docs_dir: docs-tmp
|
||||
site_dir: docs-gen
|
||||
repo_url: https://github.com/SAP/jenkins-library
|
||||
|
7
pom.xml
7
pom.xml
@ -10,7 +10,7 @@
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>com.sap.cp.jenkins</groupId>
|
||||
<artifactId>jenkins-library</artifactId>
|
||||
<version>0.8</version>
|
||||
<version>0.9</version>
|
||||
|
||||
<name>SAP CP Piper Library</name>
|
||||
<description>Shared library containing steps and utilities to set up continuous deployment processes for SAP technologies.</description>
|
||||
@ -236,11 +236,6 @@
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.eluder.coveralls</groupId>
|
||||
<artifactId>coveralls-maven-plugin</artifactId>
|
||||
<version>4.3.0</version>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
@ -187,7 +187,7 @@ steps:
|
||||
healthExecuteCheck:
|
||||
healthEndpoint: ''
|
||||
influxWriteData:
|
||||
influxServer: 'jenkins'
|
||||
influxServer: ''
|
||||
karmaExecuteTests:
|
||||
containerPortMappings:
|
||||
'node:8-stretch':
|
||||
|
@ -101,11 +101,19 @@ class ConfigurationHelper implements Serializable {
|
||||
return this
|
||||
}
|
||||
|
||||
ConfigurationHelper addIfNull(key, value){
|
||||
if (config[key] == null){
|
||||
config[key] = value
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
@NonCPS // required because we have a closure in the
|
||||
// method body that cannot be CPS transformed
|
||||
Map use(){
|
||||
handleValidationFailures()
|
||||
MapUtils.traverse(config, { v -> (v instanceof GString) ? v.toString() : v })
|
||||
if(config.verbose) step.echo "[${name}] Configuration: ${config}"
|
||||
return config
|
||||
}
|
||||
|
||||
|
11
src/com/sap/piper/GenerateDocumentation.groovy
Normal file
11
src/com/sap/piper/GenerateDocumentation.groovy
Normal file
@ -0,0 +1,11 @@
|
||||
package com.sap.piper
|
||||
|
||||
import java.lang.annotation.ElementType
|
||||
import java.lang.annotation.Retention
|
||||
import java.lang.annotation.RetentionPolicy
|
||||
import java.lang.annotation.Target
|
||||
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target([ElementType.METHOD, ElementType.TYPE])
|
||||
public @interface GenerateDocumentation {
|
||||
}
|
@ -19,3 +19,32 @@ def nodeAvailable() {
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
def getCurrentBuildInstance() {
|
||||
return currentBuild
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
def getRawBuild() {
|
||||
return getCurrentBuildInstance().rawBuild
|
||||
}
|
||||
|
||||
def isJobStartedByTimer() {
|
||||
return isJobStartedByCause(hudson.triggers.TimerTrigger.TimerTriggerCause.class)
|
||||
}
|
||||
|
||||
def isJobStartedByUser() {
|
||||
return isJobStartedByCause(hudson.model.Cause.UserIdCause.class)
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
def isJobStartedByCause(Class cause) {
|
||||
def startedByGivenCause = false
|
||||
def detectedCause = getRawBuild().getCause(cause)
|
||||
if (null != detectedCause) {
|
||||
startedByGivenCause = true
|
||||
echo "Found build cause ${detectedCause}"
|
||||
}
|
||||
return startedByGivenCause
|
||||
}
|
||||
|
@ -1,10 +1,10 @@
|
||||
#!groovy
|
||||
import com.sap.piper.JenkinsUtils
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.ExpectedException
|
||||
import org.junit.rules.RuleChain
|
||||
import org.yaml.snakeyaml.Yaml
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsCredentialsRule
|
||||
import util.JenkinsEnvironmentRule
|
||||
@ -16,9 +16,11 @@ import util.JenkinsWriteFileRule
|
||||
import util.JenkinsReadYamlRule
|
||||
import util.Rules
|
||||
|
||||
import static org.hamcrest.Matchers.stringContainsInOrder
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
import static org.hamcrest.Matchers.hasItem
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.hamcrest.Matchers.not
|
||||
import static org.hamcrest.Matchers.hasEntry
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
@ -34,6 +36,14 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
private JenkinsEnvironmentRule jer = new JenkinsEnvironmentRule(this)
|
||||
private JenkinsReadYamlRule jryr = new JenkinsReadYamlRule(this)
|
||||
|
||||
private writeInfluxMap = [:]
|
||||
|
||||
class JenkinsUtilsMock extends JenkinsUtils {
|
||||
def isJobStartedByUser() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
@Rule
|
||||
public RuleChain rules = Rules
|
||||
.getCommonRules(this)
|
||||
@ -47,6 +57,13 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
.around(new JenkinsCredentialsRule(this).withCredentials('test_cfCredentialsId', 'test_cf', '********'))
|
||||
.around(jsr) // needs to be activated after jedr, otherwise executeDocker is not mocked
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
helper.registerAllowedMethod('influxWriteData', [Map.class], {m ->
|
||||
writeInfluxMap = m
|
||||
})
|
||||
}
|
||||
|
||||
@Test
|
||||
void testNoTool() throws Exception {
|
||||
nullScript.commonPipelineEnvironment.configuration = [
|
||||
@ -69,6 +86,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
deployTool: '',
|
||||
stageName: 'acceptance',
|
||||
])
|
||||
@ -97,6 +115,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
deployTool: 'notAvailable',
|
||||
stageName: 'acceptance'
|
||||
])
|
||||
@ -114,6 +133,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
deployTool: 'cf_native',
|
||||
cfOrg: 'testOrg',
|
||||
cfSpace: 'testSpace',
|
||||
@ -140,6 +160,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
deployTool: 'cf_native',
|
||||
cfApiEndpoint: 'https://customApi',
|
||||
cfOrg: 'testOrg',
|
||||
@ -162,6 +183,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
deployTool: 'cf_native',
|
||||
cloudFoundry: [
|
||||
org: 'testOrg',
|
||||
@ -192,6 +214,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
deployTool: 'cf_native',
|
||||
cfOrg: 'testOrg',
|
||||
cfSpace: 'testSpace',
|
||||
@ -218,6 +241,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
deployTool: 'cf_native',
|
||||
cfOrg: 'testOrg',
|
||||
cfSpace: 'testSpace',
|
||||
@ -234,6 +258,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
deployTool: 'cf_native',
|
||||
deployType: 'blue-green',
|
||||
cfOrg: 'testOrg',
|
||||
@ -260,6 +285,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
deployTool: 'cf_native',
|
||||
deployType: 'blue-green',
|
||||
keepOldInstance: false,
|
||||
@ -288,6 +314,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
deployTool: 'cf_native',
|
||||
deployType: 'blue-green',
|
||||
keepOldInstance: true,
|
||||
@ -315,6 +342,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
deployTool: 'cf_native',
|
||||
deployType: 'standard',
|
||||
keepOldInstance: true,
|
||||
@ -340,6 +368,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
deployTool: 'cf_native',
|
||||
deployType: 'blue-green',
|
||||
cfOrg: 'testOrg',
|
||||
@ -355,6 +384,7 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
cfOrg: 'testOrg',
|
||||
cfSpace: 'testSpace',
|
||||
cfCredentialsId: 'test_cfCredentialsId',
|
||||
@ -368,4 +398,55 @@ class CloudFoundryDeployTest extends BasePiperTest {
|
||||
assertThat(jscr.shell, hasItem(containsString('cf deploy target/test.mtar -f')))
|
||||
assertThat(jscr.shell, hasItem(containsString('cf logout')))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMtaBlueGreen() {
|
||||
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
cfOrg: 'testOrg',
|
||||
cfSpace: 'testSpace',
|
||||
cfCredentialsId: 'test_cfCredentialsId',
|
||||
deployTool: 'mtaDeployPlugin',
|
||||
deployType: 'blue-green',
|
||||
mtaPath: 'target/test.mtar'
|
||||
])
|
||||
|
||||
assertThat(jscr.shell, hasItem(stringContainsInOrder(["cf login -u test_cf", 'cf bg-deploy', '-f', '--no-confirm'])))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testInfluxReporting() {
|
||||
jryr.registerYaml('test.yml', "applications: [[name: 'manifestAppName']]")
|
||||
helper.registerAllowedMethod('writeYaml', [Map], { Map parameters ->
|
||||
generatedFile = parameters.file
|
||||
data = parameters.data
|
||||
})
|
||||
nullScript.commonPipelineEnvironment.setArtifactVersion('1.2.3')
|
||||
jsr.step.cloudFoundryDeploy([
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
jenkinsUtilsStub: new JenkinsUtilsMock(),
|
||||
deployTool: 'cf_native',
|
||||
cfOrg: 'testOrg',
|
||||
cfSpace: 'testSpace',
|
||||
cfCredentialsId: 'test_cfCredentialsId',
|
||||
cfAppName: 'testAppName',
|
||||
cfManifest: 'test.yml'
|
||||
])
|
||||
// asserts
|
||||
assertThat(writeInfluxMap.customDataMap.deployment_data.artifactUrl, is('n/a'))
|
||||
assertThat(writeInfluxMap.customDataMap.deployment_data.deployTime, containsString(new Date().format( 'MMM dd, yyyy')))
|
||||
assertThat(writeInfluxMap.customDataMap.deployment_data.jobTrigger, is('USER'))
|
||||
|
||||
assertThat(writeInfluxMap.customDataMapTags.deployment_data.artifactVersion, is('1.2.3'))
|
||||
assertThat(writeInfluxMap.customDataMapTags.deployment_data.deployUser, is('test_cf'))
|
||||
assertThat(writeInfluxMap.customDataMapTags.deployment_data.deployResult, is('SUCCESS'))
|
||||
assertThat(writeInfluxMap.customDataMapTags.deployment_data.cfApiEndpoint, is('https://api.cf.eu10.hana.ondemand.com'))
|
||||
assertThat(writeInfluxMap.customDataMapTags.deployment_data.cfOrg, is('testOrg'))
|
||||
assertThat(writeInfluxMap.customDataMapTags.deployment_data.cfSpace, is('testSpace'))
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -49,6 +49,7 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
|
||||
def containersList = []
|
||||
def imageList = []
|
||||
def containerName = ''
|
||||
def containerShell = ''
|
||||
def envList = []
|
||||
def portList = []
|
||||
def containerCommands = []
|
||||
@ -260,9 +261,36 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
|
||||
assertThat(envList, hasItem(hasItem(allOf(hasEntry('key', 'customEnvKey'), hasEntry ('value','customEnvValue')))))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDockerExecuteOnKubernetesWithCustomShell() {
|
||||
jsr.step.dockerExecuteOnKubernetes(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
dockerImage: 'maven:3.5-jdk-8-alpine',
|
||||
containerShell: '/busybox/sh'
|
||||
) {
|
||||
//nothing to exeute
|
||||
}
|
||||
assertThat(containerShell, is('/busybox/sh'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDockerExecuteOnKubernetesWithCustomContainerCommand() {
|
||||
jsr.step.dockerExecuteOnKubernetes(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
dockerImage: 'maven:3.5-jdk-8-alpine',
|
||||
containerCommand: '/busybox/tail -f /dev/null'
|
||||
) {
|
||||
//nothing to exeute
|
||||
}
|
||||
assertThat(containerCommands, hasItem('/busybox/tail -f /dev/null'))
|
||||
}
|
||||
|
||||
|
||||
private container(options, body) {
|
||||
containerName = options.name
|
||||
containerShell = options.shell
|
||||
body()
|
||||
}
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ class DockerExecuteTest extends BasePiperTest {
|
||||
.around(jlr)
|
||||
.around(jsr)
|
||||
|
||||
int whichDockerReturnValue = 0
|
||||
int dockerPsReturnValue = 0
|
||||
def bodyExecuted
|
||||
def containerName
|
||||
|
||||
@ -41,7 +41,7 @@ class DockerExecuteTest extends BasePiperTest {
|
||||
docker = new DockerMock()
|
||||
JenkinsUtils.metaClass.static.isPluginActive = {def s -> new PluginMock(s).isActive()}
|
||||
binding.setVariable('docker', docker)
|
||||
helper.registerAllowedMethod('sh', [Map.class], {return whichDockerReturnValue})
|
||||
helper.registerAllowedMethod('sh', [Map.class], {return dockerPsReturnValue})
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -104,6 +104,28 @@ class DockerExecuteTest extends BasePiperTest {
|
||||
assertTrue(bodyExecuted)
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExecuteInsidePodWithCustomCommandAndShell() throws Exception {
|
||||
Map kubernetesConfig = [:]
|
||||
helper.registerAllowedMethod('dockerExecuteOnKubernetes', [Map.class, Closure.class], {Map config, Closure body ->
|
||||
kubernetesConfig = config
|
||||
return body()
|
||||
})
|
||||
binding.setVariable('env', [ON_K8S: 'true'])
|
||||
jsr.step.dockerExecute(
|
||||
script: nullScript,
|
||||
containerCommand: '/busybox/tail -f /dev/null',
|
||||
containerShell: '/busybox/sh',
|
||||
dockerImage: 'maven:3.5-jdk-8-alpine'
|
||||
){
|
||||
bodyExecuted = true
|
||||
}
|
||||
assertTrue(jlr.log.contains('Executing inside a Kubernetes Pod'))
|
||||
assertThat(kubernetesConfig.containerCommand, is('/busybox/tail -f /dev/null'))
|
||||
assertThat(kubernetesConfig.containerShell, is('/busybox/sh'))
|
||||
assertTrue(bodyExecuted)
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExecuteInsideDockerContainer() throws Exception {
|
||||
jsr.step.dockerExecute(script: nullScript, dockerImage: 'maven:3.5-jdk-8-alpine') {
|
||||
@ -119,14 +141,14 @@ class DockerExecuteTest extends BasePiperTest {
|
||||
void testExecuteInsideDockerContainerWithParameters() throws Exception {
|
||||
jsr.step.dockerExecute(script: nullScript,
|
||||
dockerImage: 'maven:3.5-jdk-8-alpine',
|
||||
dockerOptions: '-it',
|
||||
dockerOptions: '-description=lorem ipsum',
|
||||
dockerVolumeBind: ['my_vol': '/my_vol'],
|
||||
dockerEnvVars: ['http_proxy': 'http://proxy:8000']) {
|
||||
bodyExecuted = true
|
||||
}
|
||||
assertTrue(docker.getParameters().contains('--env https_proxy '))
|
||||
assertTrue(docker.getParameters().contains('--env http_proxy=http://proxy:8000'))
|
||||
assertTrue(docker.getParameters().contains('-it'))
|
||||
assertTrue(docker.getParameters().contains('description=lorem\\ ipsum'))
|
||||
assertTrue(docker.getParameters().contains('--volume my_vol:/my_vol'))
|
||||
assertTrue(bodyExecuted)
|
||||
}
|
||||
@ -135,23 +157,24 @@ class DockerExecuteTest extends BasePiperTest {
|
||||
void testExecuteInsideDockerContainerWithDockerOptionsList() throws Exception {
|
||||
jsr.step.dockerExecute(script: nullScript,
|
||||
dockerImage: 'maven:3.5-jdk-8-alpine',
|
||||
dockerOptions: ['-it', '--network=my-network'],
|
||||
dockerOptions: ['-it', '--network=my-network', 'description=lorem ipsum'],
|
||||
dockerEnvVars: ['http_proxy': 'http://proxy:8000']) {
|
||||
bodyExecuted = true
|
||||
}
|
||||
assertTrue(docker.getParameters().contains('--env http_proxy=http://proxy:8000'))
|
||||
assertTrue(docker.getParameters().contains('-it'))
|
||||
assertTrue(docker.getParameters().contains('--network=my-network'))
|
||||
assertTrue(docker.getParameters().contains('description=lorem\\ ipsum'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDockerNotInstalledResultsInLocalExecution() throws Exception {
|
||||
whichDockerReturnValue = 1
|
||||
dockerPsReturnValue = 1
|
||||
jsr.step.dockerExecute(script: nullScript,
|
||||
dockerOptions: '-it') {
|
||||
bodyExecuted = true
|
||||
}
|
||||
assertTrue(jlr.log.contains('No docker environment found'))
|
||||
assertTrue(jlr.log.contains('Cannot connect to docker daemon'))
|
||||
assertTrue(jlr.log.contains('Running on local environment'))
|
||||
assertTrue(bodyExecuted)
|
||||
assertFalse(docker.isImagePulled())
|
||||
|
136
test/groovy/FioriOnCloudPlatformPipelineTest.groovy
Normal file
136
test/groovy/FioriOnCloudPlatformPipelineTest.groovy
Normal file
@ -0,0 +1,136 @@
|
||||
import static org.hamcrest.Matchers.allOf
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
import static org.hamcrest.Matchers.equalTo
|
||||
import static org.hamcrest.Matchers.hasItem
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.hamcrest.Matchers.subString
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
import org.hamcrest.Matchers
|
||||
import org.junit.Assert
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.RuleChain
|
||||
|
||||
import com.sap.piper.JenkinsUtils
|
||||
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsCredentialsRule
|
||||
import util.JenkinsReadYamlRule
|
||||
import util.JenkinsShellCallRule
|
||||
import util.JenkinsStepRule
|
||||
import util.Rules
|
||||
|
||||
class FioriOnCloudPlatformPipelineTest extends BasePiperTest {
|
||||
|
||||
/* This scenario builds a fiori app and deploys it into an neo account.
|
||||
The build is performed using mta, which delegates to grunt. grunt in
|
||||
turn makes use of the 'sap/grunt-sapui5-bestpractice-build' plugin.
|
||||
The dependencies are resolved via npm.
|
||||
|
||||
In order to run the scenario the project needs to fullfill these
|
||||
prerequisites:
|
||||
|
||||
Build tools:
|
||||
* mta.jar available
|
||||
* npm installed
|
||||
|
||||
Project configuration:
|
||||
* sap registry `@sap:registry=https://npm.sap.com` configured in
|
||||
.npmrc (either in the project or on any other suitable level)
|
||||
* dependency to `@sap/grunt-sapui5-bestpractice-build` declared in
|
||||
package.json
|
||||
* npmTask `@sap/grunt-sapui5-bestpractice-build` loaded inside
|
||||
Gruntfile.js and configure default tasks (e.g. lint, clean, build)
|
||||
* mta.yaml
|
||||
*/
|
||||
|
||||
JenkinsStepRule jsr = new JenkinsStepRule(this)
|
||||
JenkinsReadYamlRule jryr = new JenkinsReadYamlRule(this)
|
||||
JenkinsShellCallRule jscr = new JenkinsShellCallRule(this)
|
||||
|
||||
@Rule
|
||||
public RuleChain ruleChain = Rules
|
||||
.getCommonRules(this)
|
||||
.around(jryr)
|
||||
.around(jsr)
|
||||
.around(jscr)
|
||||
.around(new JenkinsCredentialsRule(this)
|
||||
.withCredentials('CI_CREDENTIALS_ID', 'foo', 'terceSpot'))
|
||||
|
||||
@Before
|
||||
void setup() {
|
||||
//
|
||||
// needed since we have dockerExecute inside mtaBuild
|
||||
JenkinsUtils.metaClass.static.isPluginActive = {def s -> false}
|
||||
|
||||
//
|
||||
// Things we validate:
|
||||
jscr.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*echo \\$JAVA_HOME.*', '/opt/sap/java')
|
||||
jscr.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*echo \\$MTA_JAR_LOCATION.*', '/opt/sap')
|
||||
jscr.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*echo \\$NEO_HOME.*', '/opt/sap/neo')
|
||||
jscr.setReturnValue(JenkinsShellCallRule.Type.REGEX, ".*bin/java -version.*", '1.8.0') // the java version
|
||||
jscr.setReturnValue(JenkinsShellCallRule.Type.REGEX, ".*bin/java -jar .*mta.jar", '1.36.0') // the mta version
|
||||
|
||||
//
|
||||
// there is a check for the mta.yaml file and for the deployable test.mtar file
|
||||
helper.registerAllowedMethod('fileExists', [String],{
|
||||
|
||||
it ->
|
||||
|
||||
// called inside mtaBuild, this file contains build config
|
||||
it == 'mta.yaml' ||
|
||||
|
||||
// called inside neo deploy, this file gets deployed
|
||||
it == 'test.mtar'
|
||||
})
|
||||
|
||||
//
|
||||
// the properties below we read out of the yaml file
|
||||
jryr.registerYaml('mta.yaml', ('''
|
||||
|ID : "test"
|
||||
|PATH : "."
|
||||
|''' as CharSequence).stripMargin())
|
||||
|
||||
//
|
||||
// we need the path variable since we extend the path in the mtaBuild step. In order
|
||||
// to be able to extend the path we have to have some initial value.
|
||||
binding.setVariable('PATH', '/usr/bin')
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void straightForwardTest() {
|
||||
|
||||
nullScript
|
||||
.commonPipelineEnvironment
|
||||
.configuration = [steps:
|
||||
[neoDeploy:
|
||||
[ host: 'hana.example.com',
|
||||
account: 'myTestAccount',
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
jsr.step.fioriOnCloudPlatformPipeline(script: nullScript)
|
||||
|
||||
//
|
||||
// the mta build call:
|
||||
assertThat(jscr.shell, hasItem(
|
||||
allOf( containsString('java -jar /opt/sap/mta.jar'),
|
||||
containsString('--mtar test.mtar'),
|
||||
containsString('--build-target=NEO'),
|
||||
containsString('build'))))
|
||||
|
||||
//
|
||||
// the deployable is exchanged between the involved steps via this property:
|
||||
assertThat(nullScript.commonPipelineEnvironment.getMtarFilePath(), is(equalTo('test.mtar')))
|
||||
|
||||
//
|
||||
// the neo deploy call:
|
||||
assertThat(jscr.shell, hasItem('#!/bin/bash "/opt/sap/neo/tools/neo.sh" deploy-mta --source "test.mtar" ' +
|
||||
'--host \'hana.example.com\' --account \'myTestAccount\' --synchronous ' +
|
||||
'--user \'foo\' --password \'terceSpot\''))
|
||||
}
|
||||
}
|
@ -10,7 +10,12 @@ import util.JenkinsStepRule
|
||||
import util.JenkinsReadYamlRule
|
||||
import util.Rules
|
||||
|
||||
import static org.hamcrest.Matchers.allOf
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
import static org.hamcrest.Matchers.hasKey
|
||||
import static org.hamcrest.Matchers.hasValue
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.hamcrest.Matchers.isEmptyOrNullString
|
||||
import static org.junit.Assert.assertThat
|
||||
import static org.junit.Assert.assertTrue
|
||||
import static org.junit.Assert.assertEquals
|
||||
@ -59,15 +64,18 @@ class InfluxWriteDataTest extends BasePiperTest {
|
||||
nullScript.commonPipelineEnvironment.setArtifactVersion('1.2.3')
|
||||
jsr.step.influxWriteData(script: nullScript)
|
||||
|
||||
assertTrue(loggingRule.log.contains('Artifact version: 1.2.3'))
|
||||
assertThat(loggingRule.log, containsString('Artifact version: 1.2.3'))
|
||||
|
||||
assertEquals('testInflux', stepMap.selectedTarget)
|
||||
assertEquals(null, stepMap.customPrefix)
|
||||
assertEquals([:], stepMap.customData)
|
||||
assertEquals([pipeline_data: [:], step_data: [:]], stepMap.customDataMap)
|
||||
assertThat(stepMap.selectedTarget, is('testInflux'))
|
||||
assertThat(stepMap.customPrefix, isEmptyOrNullString())
|
||||
|
||||
assertTrue(fileMap.containsKey('jenkins_data.json'))
|
||||
assertTrue(fileMap.containsKey('pipeline_data.json'))
|
||||
assertThat(stepMap.customData, isEmptyOrNullString())
|
||||
assertThat(stepMap.customDataMap, is([pipeline_data: [:], step_data: [:]]))
|
||||
|
||||
assertThat(fileMap, hasKey('jenkins_data.json'))
|
||||
assertThat(fileMap, hasKey('influx_data.json'))
|
||||
assertThat(fileMap, hasKey('jenkins_data_tags.json'))
|
||||
assertThat(fileMap, hasKey('influx_data_tags.json'))
|
||||
|
||||
assertJobStatusSuccess()
|
||||
}
|
||||
@ -81,7 +89,7 @@ class InfluxWriteDataTest extends BasePiperTest {
|
||||
assertEquals(0, stepMap.size())
|
||||
|
||||
assertTrue(fileMap.containsKey('jenkins_data.json'))
|
||||
assertTrue(fileMap.containsKey('pipeline_data.json'))
|
||||
assertTrue(fileMap.containsKey('influx_data.json'))
|
||||
|
||||
assertJobStatusSuccess()
|
||||
}
|
||||
@ -116,4 +124,41 @@ class InfluxWriteDataTest extends BasePiperTest {
|
||||
assertThat(nodeCalled, is(true))
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void testInfluxCustomData() {
|
||||
nullScript.commonPipelineEnvironment.setArtifactVersion('1.2.3')
|
||||
jsr.step.influxWriteData(
|
||||
//juStabUtils: utils,
|
||||
script: nullScript,
|
||||
influxServer: 'myInstance',
|
||||
customData: [key1: 'test1'],
|
||||
customDataTags: [tag1: 'testTag1'],
|
||||
customDataMap: [test_data: [key1: 'keyValue1']],
|
||||
customDataMapTags: [test_data: [tag1: 'tagValue1']]
|
||||
)
|
||||
assertThat(stepMap.customData, allOf(hasKey('key1'), hasValue('test1')))
|
||||
assertThat(stepMap.customDataTags, allOf(hasKey('tag1'), hasValue('testTag1')))
|
||||
assertThat(stepMap.customDataMap, hasKey('test_data'))
|
||||
assertThat(stepMap.customDataMapTags, hasKey('test_data'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testInfluxCustomDataFromCPE() {
|
||||
nullScript.commonPipelineEnvironment.reset()
|
||||
nullScript.commonPipelineEnvironment.setArtifactVersion('1.2.3')
|
||||
nullScript.commonPipelineEnvironment.setInfluxCustomDataTagsEntry('tag1', 'testTag1')
|
||||
nullScript.commonPipelineEnvironment.setInfluxCustomDataMapEntry('test_data', 'key1', 'keyValue1')
|
||||
nullScript.commonPipelineEnvironment.setInfluxCustomDataMapTagsEntry('test_data', 'tag1', 'tagValue1')
|
||||
jsr.step.influxWriteData(
|
||||
//juStabUtils: utils,
|
||||
script: nullScript,
|
||||
influxServer: 'myInstance'
|
||||
)
|
||||
assertThat(stepMap.customData, isEmptyOrNullString())
|
||||
assertThat(stepMap.customDataTags, allOf(hasKey('tag1'), hasValue('testTag1')))
|
||||
assertThat(stepMap.customDataMap, hasKey('test_data'))
|
||||
assertThat(stepMap.customDataMapTags, hasKey('test_data'))
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -41,7 +41,14 @@ public class MtaBuildTest extends BasePiperTest {
|
||||
void init() {
|
||||
|
||||
helper.registerAllowedMethod('fileExists', [String], { s -> s == 'mta.yaml' })
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> getVersionWithoutEnvVars(m) })
|
||||
|
||||
jscr.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*\\$MTA_JAR_LOCATION.*', '')
|
||||
jscr.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*\\$JAVA_HOME.*', '')
|
||||
jscr.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*which java.*', 0)
|
||||
jscr.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*java -version.*', '''openjdk version \"1.8.0_121\"
|
||||
OpenJDK Runtime Environment (build 1.8.0_121-8u121-b13-1~bpo8+1-b13)
|
||||
OpenJDK 64-Bit Server VM (build 25.121-b13, mixed mode)''')
|
||||
jscr.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*mta\\.jar -v.*', '1.0.6')
|
||||
|
||||
binding.setVariable('PATH', '/usr/bin')
|
||||
}
|
||||
@ -126,7 +133,7 @@ public class MtaBuildTest extends BasePiperTest {
|
||||
@Test
|
||||
void mtaJarLocationFromEnvironmentTest() {
|
||||
|
||||
helper.registerAllowedMethod('sh', [Map], { Map m -> getVersionWithEnvVars(m) })
|
||||
jscr.setReturnValue(JenkinsShellCallRule.Type.REGEX, '.*\\$MTA_JAR_LOCATION.*', '/env/mta/mta.jar')
|
||||
|
||||
jsr.step.mtaBuild(script: nullScript, buildTarget: 'NEO')
|
||||
|
||||
@ -290,81 +297,4 @@ public class MtaBuildTest extends BasePiperTest {
|
||||
'''
|
||||
}
|
||||
|
||||
private getVersionWithEnvVars(Map m) {
|
||||
|
||||
if(m.script.contains('java -version')) {
|
||||
return '''openjdk version \"1.8.0_121\"
|
||||
OpenJDK Runtime Environment (build 1.8.0_121-8u121-b13-1~bpo8+1-b13)
|
||||
OpenJDK 64-Bit Server VM (build 25.121-b13, mixed mode)'''
|
||||
} else if(m.script.contains('mta.jar -v')) {
|
||||
return '1.0.6'
|
||||
} else {
|
||||
return getEnvVars(m)
|
||||
}
|
||||
}
|
||||
|
||||
private getVersionWithoutEnvVars(Map m) {
|
||||
|
||||
if(m.script.contains('java -version')) {
|
||||
return '''openjdk version \"1.8.0_121\"
|
||||
OpenJDK Runtime Environment (build 1.8.0_121-8u121-b13-1~bpo8+1-b13)
|
||||
OpenJDK 64-Bit Server VM (build 25.121-b13, mixed mode)'''
|
||||
} else if(m.script.contains('mta.jar -v')) {
|
||||
return '1.0.6'
|
||||
} else {
|
||||
return getNoEnvVars(m)
|
||||
}
|
||||
}
|
||||
|
||||
private getVersionWithoutEnvVarsAndNotInCurrentDir(Map m) {
|
||||
|
||||
if(m.script.contains('java -version')) {
|
||||
return '''openjdk version \"1.8.0_121\"
|
||||
OpenJDK Runtime Environment (build 1.8.0_121-8u121-b13-1~bpo8+1-b13)
|
||||
OpenJDK 64-Bit Server VM (build 25.121-b13, mixed mode)'''
|
||||
} else if(m.script.contains('mta.jar -v')) {
|
||||
return '1.0.6'
|
||||
} else {
|
||||
return getNoEnvVarsAndNotInCurrentDir(m)
|
||||
}
|
||||
}
|
||||
|
||||
private getEnvVars(Map m) {
|
||||
|
||||
if(m.script.contains('JAVA_HOME')) {
|
||||
return ''
|
||||
} else if(m.script.contains('MTA_JAR_LOCATION')) {
|
||||
return '/env/mta/mta.jar'
|
||||
} else if(m.script.contains('which java')) {
|
||||
return 0
|
||||
} else {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
private getNoEnvVars(Map m) {
|
||||
|
||||
if(m.script.contains('JAVA_HOME')) {
|
||||
return ''
|
||||
} else if(m.script.contains('MTA_JAR_LOCATION')) {
|
||||
return ''
|
||||
} else if(m.script.contains('which java')) {
|
||||
return 0
|
||||
} else {
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
private getNoEnvVarsAndNotInCurrentDir(Map m) {
|
||||
|
||||
if(m.script.contains('JAVA_HOME')) {
|
||||
return ''
|
||||
} else if(m.script.contains('MTA_JAR_LOCATION')) {
|
||||
return ''
|
||||
} else if(m.script.contains('which java')) {
|
||||
return 0
|
||||
} else {
|
||||
return 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6,6 +6,8 @@ import org.junit.rules.RuleChain
|
||||
import static org.hamcrest.Matchers.hasItem
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
import static org.hamcrest.Matchers.endsWith
|
||||
import static org.hamcrest.Matchers.startsWith
|
||||
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
@ -68,13 +70,28 @@ class NewmanExecuteTest extends BasePiperTest {
|
||||
newmanGlobals: 'testGlobals'
|
||||
)
|
||||
// asserts
|
||||
assertThat(jscr.shell, hasItem('npm install newman newman-reporter-html --global --quiet'))
|
||||
assertThat(jscr.shell, hasItem('newman run \'testCollection\' --environment \'testEnvironment\' --globals \'testGlobals\' --reporters junit,html --reporter-junit-export \'target/newman/TEST-testCollection.xml\' --reporter-html-export \'target/newman/TEST-testCollection.html\''))
|
||||
assertThat(jscr.shell, hasItem(endsWith('npm install newman newman-reporter-html --global --quiet')))
|
||||
assertThat(jscr.shell, hasItem(endsWith('newman run \'testCollection\' --environment \'testEnvironment\' --globals \'testGlobals\' --reporters junit,html --reporter-junit-export \'target/newman/TEST-testCollection.xml\' --reporter-html-export \'target/newman/TEST-testCollection.html\'')))
|
||||
assertThat(jedr.dockerParams.dockerImage, is('node:8-stretch'))
|
||||
assertThat(jlr.log, containsString('[newmanExecute] Found files [testCollection]'))
|
||||
assertJobStatusSuccess()
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGlobalInstall() throws Exception {
|
||||
jsr.step.newmanExecute(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
newmanCollection: 'testCollection',
|
||||
newmanEnvironment: 'testEnvironment',
|
||||
newmanGlobals: 'testGlobals'
|
||||
)
|
||||
// asserts
|
||||
assertThat(jscr.shell, hasItem(startsWith('NPM_CONFIG_PREFIX=~/.npm-global ')))
|
||||
assertThat(jscr.shell, hasItem(startsWith('PATH=$PATH:~/.npm-global/bin')))
|
||||
assertJobStatusSuccess()
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExecuteNewmanWithNoCollection() throws Exception {
|
||||
thrown.expectMessage('[newmanExecute] No collection found with pattern \'notFound.json\'')
|
||||
@ -103,7 +120,7 @@ class NewmanExecuteTest extends BasePiperTest {
|
||||
// asserts
|
||||
assertThat(jedr.dockerParams.dockerImage, is('testImage'))
|
||||
assertThat(gitMap.url, is('testRepo'))
|
||||
assertThat(jscr.shell, hasItem('newman run \'testCollection\' --environment \'testEnvironment\' --globals \'testGlobals\' --reporters junit,html --reporter-junit-export \'target/newman/TEST-testCollection.xml\' --reporter-html-export \'target/newman/TEST-testCollection.html\' --suppress-exit-code'))
|
||||
assertThat(jscr.shell, hasItem(endsWith('newman run \'testCollection\' --environment \'testEnvironment\' --globals \'testGlobals\' --reporters junit,html --reporter-junit-export \'target/newman/TEST-testCollection.xml\' --reporter-html-export \'target/newman/TEST-testCollection.html\' --suppress-exit-code')))
|
||||
assertJobStatusSuccess()
|
||||
}
|
||||
|
||||
@ -115,8 +132,8 @@ class NewmanExecuteTest extends BasePiperTest {
|
||||
newmanRunCommand: 'run ${config.newmanCollection} --iteration-data testDataFile --reporters junit,html --reporter-junit-export target/newman/TEST-${config.newmanCollection.toString().replace(File.separatorChar,(char)\'_\').tokenize(\'.\').first()}.xml --reporter-html-export target/newman/TEST-${config.newmanCollection.toString().replace(File.separatorChar,(char)\'_\').tokenize(\'.\').first()}.html'
|
||||
)
|
||||
// asserts
|
||||
assertThat(jscr.shell, hasItem('newman run testCollectionsFolder'+File.separatorChar+'A.postman_collection.json --iteration-data testDataFile --reporters junit,html --reporter-junit-export target/newman/TEST-testCollectionsFolder_A.xml --reporter-html-export target/newman/TEST-testCollectionsFolder_A.html'))
|
||||
assertThat(jscr.shell, hasItem('newman run testCollectionsFolder'+File.separatorChar+'B.postman_collection.json --iteration-data testDataFile --reporters junit,html --reporter-junit-export target/newman/TEST-testCollectionsFolder_B.xml --reporter-html-export target/newman/TEST-testCollectionsFolder_B.html'))
|
||||
assertThat(jscr.shell, hasItem(endsWith('newman run testCollectionsFolder'+File.separatorChar+'A.postman_collection.json --iteration-data testDataFile --reporters junit,html --reporter-junit-export target/newman/TEST-testCollectionsFolder_A.xml --reporter-html-export target/newman/TEST-testCollectionsFolder_A.html')))
|
||||
assertThat(jscr.shell, hasItem(endsWith('newman run testCollectionsFolder'+File.separatorChar+'B.postman_collection.json --iteration-data testDataFile --reporters junit,html --reporter-junit-export target/newman/TEST-testCollectionsFolder_B.xml --reporter-html-export target/newman/TEST-testCollectionsFolder_B.html')))
|
||||
assertJobStatusSuccess()
|
||||
}
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ class PiperStageWrapperTest extends BasePiperTest {
|
||||
|
||||
@Test
|
||||
void testDefault() {
|
||||
def testInt = 1
|
||||
def executed = false
|
||||
jsr.step.piperStageWrapper(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
@ -62,16 +62,16 @@ class PiperStageWrapperTest extends BasePiperTest {
|
||||
stageName: 'test'
|
||||
|
||||
) {
|
||||
testInt ++
|
||||
executed = true
|
||||
}
|
||||
assertThat(testInt, is(2))
|
||||
assertThat(executed, is(true))
|
||||
assertThat(lockMap.size(), is(2))
|
||||
assertThat(countNodeUsage, is(1))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testNoLocking() {
|
||||
def testInt = 1
|
||||
def executed = false
|
||||
jsr.step.piperStageWrapper(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
@ -81,9 +81,9 @@ class PiperStageWrapperTest extends BasePiperTest {
|
||||
stageName: 'test'
|
||||
|
||||
) {
|
||||
testInt ++
|
||||
executed = true
|
||||
}
|
||||
assertThat(testInt, is(2))
|
||||
assertThat(executed, is(true))
|
||||
assertThat(lockMap.size(), is(0))
|
||||
assertThat(countNodeUsage, is(1))
|
||||
assertThat(nodeLabel, is('testLabel'))
|
||||
@ -98,21 +98,23 @@ class PiperStageWrapperTest extends BasePiperTest {
|
||||
helper.registerAllowedMethod('load', [String.class], {
|
||||
return helper.loadScript('test/resources/stages/test.groovy')
|
||||
})
|
||||
nullScript.commonPipelineEnvironment.gitBranch = 'testBranch'
|
||||
|
||||
def testInt = 1
|
||||
def executed = false
|
||||
jsr.step.piperStageWrapper(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
ordinal: 10,
|
||||
stageName: 'test'
|
||||
) {
|
||||
testInt ++
|
||||
executed = true
|
||||
}
|
||||
|
||||
assertThat(testInt, is(2))
|
||||
assertThat(executed, is(true))
|
||||
assertThat(jlr.log, containsString('[piperStageWrapper] Running project interceptor \'.pipeline/extensions/test.groovy\' for test.'))
|
||||
assertThat(jlr.log, containsString('Stage Name: test'))
|
||||
assertThat(jlr.log, containsString('Config:'))
|
||||
assertThat(jlr.log, containsString('Config: [productiveBranch:master,'))
|
||||
assertThat(jlr.log, containsString('testBranch'))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
void call(body, stageName, config) {
|
||||
echo "Stage Name: ${stageName}"
|
||||
echo "Config: ${config}"
|
||||
body()
|
||||
void call(Map params) {
|
||||
echo "Stage Name: ${params.stageName}"
|
||||
echo "Config: ${params.config}"
|
||||
params.originalStage()
|
||||
echo "Branch: ${params.script.commonPipelineEnvironment.gitBranch}"
|
||||
}
|
||||
return this
|
||||
|
@ -61,7 +61,15 @@ void call(Map parameters = [:], Closure body = null) {
|
||||
config = configHelper.addIfEmpty('timestamp', getTimestamp(config.timestampTemplate))
|
||||
.use()
|
||||
|
||||
new Utils().pushToSWA([step: STEP_NAME, stepParam1: config.buildTool, stepParam2: config.artifactType, stepParam3: parameters?.script == null], config)
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'buildTool',
|
||||
stepParam1: config.buildTool,
|
||||
stepParamKey2: 'artifactType',
|
||||
stepParam2: config.artifactType,
|
||||
stepParamKey3: 'scriptMissing',
|
||||
stepParam3: parameters?.script == null
|
||||
], config)
|
||||
|
||||
def artifactVersioning = ArtifactVersioning.getArtifactVersioning(config.buildTool, script, config)
|
||||
def currentVersion = artifactVersioning.getVersion()
|
||||
@ -87,7 +95,7 @@ void call(Map parameters = [:], Closure body = null) {
|
||||
:script.commonPipelineEnvironment.getGitSshUrl())
|
||||
.withMandatoryProperty('gitSshUrl')
|
||||
.use()
|
||||
|
||||
|
||||
def gitConfig = []
|
||||
|
||||
if(config.gitUserEMail) gitConfig.add("-c user.email=\"${config.gitUserEMail}\"")
|
||||
|
@ -43,8 +43,11 @@ void call(Map parameters = [:]) {
|
||||
.use()
|
||||
|
||||
// report to SWA
|
||||
utils.pushToSWA([step: STEP_NAME,
|
||||
stepParam1: parameters?.script == null], config)
|
||||
utils.pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scriptMissing',
|
||||
stepParam1: parameters?.script == null
|
||||
], config)
|
||||
|
||||
script.commonPipelineEnvironment.setInfluxStepData('bats', false)
|
||||
|
||||
|
@ -1,12 +1,12 @@
|
||||
import static com.sap.piper.Prerequisites.checkScript
|
||||
|
||||
import com.sap.piper.GenerateDocumentation
|
||||
import com.sap.piper.GitUtils
|
||||
import com.sap.piper.Utils
|
||||
import groovy.transform.Field
|
||||
import hudson.AbortException
|
||||
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.ConfigurationMerger
|
||||
import com.sap.piper.cm.BackendType
|
||||
import com.sap.piper.cm.ChangeManagement
|
||||
import com.sap.piper.cm.ChangeManagementException
|
||||
@ -35,8 +35,8 @@ import static com.sap.piper.cm.StepHelpers.getBackendTypeAndLogInfoIfCMIntegrati
|
||||
*
|
||||
* By default the git commit messages between `origin/master` and `HEAD` are scanned for a line like `ChangeDocument : <changeDocumentId>`. The commit
|
||||
* range and the pattern can be configured. For details see 'parameters' table.
|
||||
*
|
||||
*/
|
||||
@GenerateDocumentation
|
||||
void call(parameters = [:]) {
|
||||
|
||||
handlePipelineStepErrors (stepName: STEP_NAME, stepParameters: parameters) {
|
||||
@ -94,8 +94,11 @@ void call(parameters = [:]) {
|
||||
*/
|
||||
.withMandatoryProperty('changeManagement/endpoint')
|
||||
|
||||
new Utils().pushToSWA([step: STEP_NAME,
|
||||
stepParam1: parameters?.script == null], configuration)
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scriptMissing',
|
||||
stepParam1: parameters?.script == null
|
||||
], configuration)
|
||||
|
||||
def changeId = getChangeDocumentId(cm, script, configuration)
|
||||
|
||||
|
@ -41,8 +41,11 @@ void call(Map parameters = [:]) {
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.use()
|
||||
|
||||
new Utils().pushToSWA([step: STEP_NAME,
|
||||
stepParam1: parameters?.script == null], configuration)
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scriptMissing',
|
||||
stepParam1: parameters?.script == null
|
||||
], configuration)
|
||||
|
||||
// JAVA
|
||||
report('PmdPublisher', configuration.pmd, configuration.archive)
|
||||
|
@ -1,3 +1,5 @@
|
||||
import com.sap.piper.JenkinsUtils
|
||||
|
||||
import static com.sap.piper.Prerequisites.checkScript
|
||||
|
||||
import com.sap.piper.Utils
|
||||
@ -32,10 +34,8 @@ void call(Map parameters = [:]) {
|
||||
|
||||
handlePipelineStepErrors (stepName: STEP_NAME, stepParameters: parameters) {
|
||||
|
||||
def utils = parameters.juStabUtils
|
||||
if (utils == null) {
|
||||
utils = new Utils()
|
||||
}
|
||||
def utils = parameters.juStabUtils ?: new Utils()
|
||||
def jenkinsUtils = parameters.jenkinsUtilsStub ?: new JenkinsUtils()
|
||||
|
||||
def script = checkScript(this, parameters)
|
||||
if (script == null)
|
||||
@ -54,7 +54,15 @@ void call(Map parameters = [:]) {
|
||||
.withMandatoryProperty('cloudFoundry/credentialsId')
|
||||
.use()
|
||||
|
||||
utils.pushToSWA([step: STEP_NAME, stepParam1: config.deployTool, stepParam2: config.deployType, stepParam3: parameters?.script == null], config)
|
||||
utils.pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'deployTool',
|
||||
stepParam1: config.deployTool,
|
||||
stepParamKey2: 'deployType',
|
||||
stepParam2: config.deployType,
|
||||
stepParamKey3: 'scriptMissing',
|
||||
stepParam3: parameters?.script == null
|
||||
], config)
|
||||
|
||||
echo "[${STEP_NAME}] General parameters: deployTool=${config.deployTool}, deployType=${config.deployType}, cfApiEndpoint=${config.cloudFoundry.apiEndpoint}, cfOrg=${config.cloudFoundry.org}, cfSpace=${config.cloudFoundry.space}, cfCredentialsId=${config.cloudFoundry.credentialsId}, deployUser=${config.deployUser}"
|
||||
|
||||
@ -63,42 +71,54 @@ void call(Map parameters = [:]) {
|
||||
//make sure that for further execution whole workspace, e.g. also downloaded artifacts are considered
|
||||
config.stashContent = []
|
||||
|
||||
if (config.deployTool == 'mtaDeployPlugin') {
|
||||
// set default mtar path
|
||||
config = ConfigurationHelper.newInstance(this, config)
|
||||
.addIfEmpty('mtaPath', config.mtaPath?:findMtar())
|
||||
.use()
|
||||
boolean deploy = false
|
||||
boolean deploySuccess = true
|
||||
try {
|
||||
if (config.deployTool == 'mtaDeployPlugin') {
|
||||
deploy = true
|
||||
// set default mtar path
|
||||
config = ConfigurationHelper.newInstance(this, config)
|
||||
.addIfEmpty('mtaPath', config.mtaPath?:findMtar())
|
||||
.use()
|
||||
|
||||
dockerExecute(script: script, dockerImage: config.dockerImage, dockerWorkspace: config.dockerWorkspace, stashContent: config.stashContent) {
|
||||
deployMta(config)
|
||||
dockerExecute(script: script, dockerImage: config.dockerImage, dockerWorkspace: config.dockerWorkspace, stashContent: config.stashContent) {
|
||||
deployMta(config)
|
||||
}
|
||||
}
|
||||
return
|
||||
|
||||
if (config.deployTool == 'cf_native') {
|
||||
deploy = true
|
||||
config.smokeTest = ''
|
||||
|
||||
if (config.smokeTestScript == 'blueGreenCheckScript.sh') {
|
||||
writeFile file: config.smokeTestScript, text: libraryResource(config.smokeTestScript)
|
||||
}
|
||||
|
||||
config.smokeTest = '--smoke-test $(pwd)/' + config.smokeTestScript
|
||||
sh "chmod +x ${config.smokeTestScript}"
|
||||
|
||||
echo "[${STEP_NAME}] CF native deployment (${config.deployType}) with cfAppName=${config.cloudFoundry.appName}, cfManifest=${config.cloudFoundry.manifest}, smokeTestScript=${config.smokeTestScript}"
|
||||
|
||||
dockerExecute (
|
||||
script: script,
|
||||
dockerImage: config.dockerImage,
|
||||
dockerWorkspace: config.dockerWorkspace,
|
||||
stashContent: config.stashContent,
|
||||
dockerEnvVars: [CF_HOME:"${config.dockerWorkspace}", CF_PLUGIN_HOME:"${config.dockerWorkspace}", STATUS_CODE: "${config.smokeTestStatusCode}"]
|
||||
) {
|
||||
deployCfNative(config)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
deploySuccess = false
|
||||
throw err
|
||||
} finally {
|
||||
if (deploy) {
|
||||
reportToInflux(script, config, deploySuccess, jenkinsUtils)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (config.deployTool == 'cf_native') {
|
||||
config.smokeTest = ''
|
||||
|
||||
if (config.smokeTestScript == 'blueGreenCheckScript.sh') {
|
||||
writeFile file: config.smokeTestScript, text: libraryResource(config.smokeTestScript)
|
||||
}
|
||||
|
||||
config.smokeTest = '--smoke-test $(pwd)/' + config.smokeTestScript
|
||||
sh "chmod +x ${config.smokeTestScript}"
|
||||
|
||||
echo "[${STEP_NAME}] CF native deployment (${config.deployType}) with cfAppName=${config.cloudFoundry.appName}, cfManifest=${config.cloudFoundry.manifest}, smokeTestScript=${config.smokeTestScript}"
|
||||
|
||||
dockerExecute (
|
||||
script: script,
|
||||
dockerImage: config.dockerImage,
|
||||
dockerWorkspace: config.dockerWorkspace,
|
||||
stashContent: config.stashContent,
|
||||
dockerEnvVars: [CF_HOME:"${config.dockerWorkspace}", CF_PLUGIN_HOME:"${config.dockerWorkspace}", STATUS_CODE: "${config.smokeTestStatusCode}"]
|
||||
) {
|
||||
deployCfNative(config)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -147,7 +167,8 @@ def deployCfNative (config) {
|
||||
}
|
||||
|
||||
sh """#!/bin/bash
|
||||
set +x
|
||||
set +x
|
||||
set -e
|
||||
export HOME=${config.dockerWorkspace}
|
||||
cf login -u \"${username}\" -p '${password}' -a ${config.cloudFoundry.apiEndpoint} -o \"${config.cloudFoundry.org}\" -s \"${config.cloudFoundry.space}\"
|
||||
cf plugins
|
||||
@ -188,8 +209,12 @@ def deployMta (config) {
|
||||
if (!config.mtaExtensionDescriptor.isEmpty() && !config.mtaExtensionDescriptor.startsWith('-e ')) config.mtaExtensionDescriptor = "-e ${config.mtaExtensionDescriptor}"
|
||||
|
||||
def deployCommand = 'deploy'
|
||||
if (config.deployType == 'blue-green')
|
||||
if (config.deployType == 'blue-green') {
|
||||
deployCommand = 'bg-deploy'
|
||||
if (config.mtaDeployParameters.indexOf('--no-confirm') < 0) {
|
||||
config.mtaDeployParameters += ' --no-confirm'
|
||||
}
|
||||
}
|
||||
|
||||
withCredentials([usernamePassword(
|
||||
credentialsId: config.cloudFoundry.credentialsId,
|
||||
@ -200,6 +225,7 @@ def deployMta (config) {
|
||||
sh """#!/bin/bash
|
||||
export HOME=${config.dockerWorkspace}
|
||||
set +x
|
||||
set -e
|
||||
cf api ${config.cloudFoundry.apiEndpoint}
|
||||
cf login -u ${username} -p '${password}' -a ${config.cloudFoundry.apiEndpoint} -o \"${config.cloudFoundry.org}\" -s \"${config.cloudFoundry.space}\"
|
||||
cf plugins
|
||||
@ -222,3 +248,33 @@ Transformed manifest file content: $transformedManifest"""
|
||||
writeYaml file: config.cloudFoundry.manifest, data: manifest
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void reportToInflux(script, config, deploySuccess, JenkinsUtils jenkinsUtils) {
|
||||
def deployUser = ''
|
||||
withCredentials([usernamePassword(
|
||||
credentialsId: config.cloudFoundry.credentialsId,
|
||||
passwordVariable: 'password',
|
||||
usernameVariable: 'username'
|
||||
)]) {
|
||||
deployUser = username
|
||||
}
|
||||
|
||||
def timeFinished = new Date().format( 'MMM dd, yyyy - HH:mm:ss' )
|
||||
def triggerCause = jenkinsUtils.isJobStartedByUser()?'USER':(jenkinsUtils.isJobStartedByTimer()?'TIMER': 'OTHER')
|
||||
|
||||
def deploymentData = [deployment_data: [
|
||||
artifactUrl: 'n/a', //might be added later on during pipeline run (written to commonPipelineEnvironment)
|
||||
deployTime: timeFinished,
|
||||
jobTrigger: triggerCause
|
||||
]]
|
||||
def deploymentDataTags = [deployment_data: [
|
||||
artifactVersion: script.commonPipelineEnvironment.getArtifactVersion(),
|
||||
deployUser: deployUser,
|
||||
deployResult: deploySuccess?'SUCCESS':'FAILURE',
|
||||
cfApiEndpoint: config.cloudFoundry.apiEndpoint,
|
||||
cfOrg: config.cloudFoundry.org,
|
||||
cfSpace: config.cloudFoundry.space,
|
||||
]]
|
||||
influxWriteData script: script, customData: [:], customDataTags: [:], customDataMap: deploymentData, customDataMapTags: deploymentDataTags
|
||||
}
|
||||
|
@ -25,8 +25,12 @@ class commonPipelineEnvironment implements Serializable {
|
||||
|
||||
//each Map in influxCustomDataMap represents a measurement in Influx. Additional measurements can be added as a new Map entry of influxCustomDataMap
|
||||
private Map influxCustomDataMap = [pipeline_data: [:], step_data: [:]]
|
||||
//each Map in influxCustomDataMapTags represents tags for certain measurement in Influx. Tags are required in Influx for easier querying data
|
||||
private Map influxCustomDataMapTags = [pipeline_data: [:]]
|
||||
//influxCustomData represents measurement jenkins_custom_data in Influx. Metrics can be written into this map
|
||||
private Map influxCustomData = [:]
|
||||
//influxCustomDataTags represents tags in Influx. Tags are required in Influx for easier querying data
|
||||
private Map influxCustomDataTags = [:]
|
||||
|
||||
String mtarFilePath
|
||||
|
||||
@ -49,7 +53,9 @@ class commonPipelineEnvironment implements Serializable {
|
||||
githubRepo = null
|
||||
|
||||
influxCustomData = [:]
|
||||
influxCustomDataTags = [:]
|
||||
influxCustomDataMap = [pipeline_data: [:], step_data: [:]]
|
||||
influxCustomDataMapTags = [pipeline_data: [:]]
|
||||
|
||||
mtarFilePath = null
|
||||
|
||||
@ -76,26 +82,56 @@ class commonPipelineEnvironment implements Serializable {
|
||||
return configProperties[property]
|
||||
}
|
||||
|
||||
// goes into measurement jenkins_data
|
||||
def setInfluxCustomDataEntry(field, value) {
|
||||
influxCustomData[field] = value
|
||||
}
|
||||
// goes into measurement jenkins_data
|
||||
def getInfluxCustomData() {
|
||||
return influxCustomData
|
||||
}
|
||||
|
||||
// goes into measurement jenkins_data
|
||||
def setInfluxCustomDataTagsEntry(tag, value) {
|
||||
influxCustomDataTags[tag] = value
|
||||
}
|
||||
|
||||
// goes into measurement jenkins_data
|
||||
def getInfluxCustomDataTags() {
|
||||
return influxCustomDataTags
|
||||
}
|
||||
|
||||
void setInfluxCustomDataMapEntry(measurement, field, value) {
|
||||
if (!influxCustomDataMap[measurement]) {
|
||||
influxCustomDataMap[measurement] = [:]
|
||||
}
|
||||
influxCustomDataMap[measurement][field] = value
|
||||
}
|
||||
def getInfluxCustomDataMap() {
|
||||
return influxCustomDataMap
|
||||
}
|
||||
|
||||
def setInfluxStepData (dataKey, value) {
|
||||
influxCustomDataMap.step_data[dataKey] = value
|
||||
def setInfluxCustomDataMapTagsEntry(measurement, tag, value) {
|
||||
if (!influxCustomDataMapTags[measurement]) {
|
||||
influxCustomDataMapTags[measurement] = [:]
|
||||
}
|
||||
influxCustomDataMapTags[measurement][tag] = value
|
||||
}
|
||||
def getInfluxStepData (dataKey) {
|
||||
return influxCustomDataMap.step_data[dataKey]
|
||||
def getInfluxCustomDataMapTags() {
|
||||
return influxCustomDataMapTags
|
||||
}
|
||||
|
||||
def setPipelineMeasurement (measurementName, value) {
|
||||
influxCustomDataMap.pipeline_data[measurementName] = value
|
||||
def setInfluxStepData(key, value) {
|
||||
setInfluxCustomDataMapEntry('step_data', key, value)
|
||||
}
|
||||
def getInfluxStepData(key) {
|
||||
return influxCustomDataMap.step_data[key]
|
||||
}
|
||||
|
||||
def getPipelineMeasurement (measurementName) {
|
||||
return influxCustomDataMap.pipeline_data[measurementName]
|
||||
def setPipelineMeasurement(key, value) {
|
||||
setInfluxCustomDataMapEntry('pipeline_data', key, value)
|
||||
}
|
||||
def getPipelineMeasurement(key) {
|
||||
return influxCustomDataMap.pipeline_data[key]
|
||||
}
|
||||
}
|
||||
|
@ -16,6 +16,8 @@ import groovy.transform.Field
|
||||
|
||||
@Field Set PARAMETER_KEYS = [
|
||||
'containerPortMappings',
|
||||
'containerCommand',
|
||||
'containerShell',
|
||||
'dockerEnvVars',
|
||||
'dockerImage',
|
||||
'dockerName',
|
||||
@ -57,6 +59,8 @@ void call(Map parameters = [:], body) {
|
||||
if (!config.sidecarImage) {
|
||||
dockerExecuteOnKubernetes(
|
||||
script: script,
|
||||
containerCommand: config.containerCommand,
|
||||
containerShell: config.containerShell,
|
||||
dockerImage: config.dockerImage,
|
||||
dockerEnvVars: config.dockerEnvVars,
|
||||
dockerWorkspace: config.dockerWorkspace,
|
||||
@ -102,12 +106,6 @@ void call(Map parameters = [:], body) {
|
||||
executeInsideDocker = false
|
||||
}
|
||||
|
||||
def returnCode = sh script: 'which docker > /dev/null', returnStatus: true
|
||||
if (returnCode != 0) {
|
||||
echo "[WARNING][${STEP_NAME}] No docker environment found (command 'which docker' did not return with '0'). Configured docker image '${config.dockerImage}' will not be used."
|
||||
executeInsideDocker = false
|
||||
}
|
||||
|
||||
returnCode = sh script: 'docker ps -q > /dev/null', returnStatus: true
|
||||
if (returnCode != 0) {
|
||||
echo "[WARNING][$STEP_NAME] Cannot connect to docker daemon (command 'docker ps' did not return with '0'). Configured docker image '${config.dockerImage}' will not be used."
|
||||
@ -192,10 +190,11 @@ private getDockerOptions(Map dockerEnvVars, Map dockerVolumeBind, def dockerOpti
|
||||
|
||||
if (dockerOptions) {
|
||||
if (dockerOptions instanceof CharSequence) {
|
||||
options.add(dockerOptions.toString())
|
||||
} else if (dockerOptions instanceof List) {
|
||||
dockerOptions = [dockerOptions]
|
||||
}
|
||||
if (dockerOptions instanceof List) {
|
||||
for (String option : dockerOptions) {
|
||||
options.add "${option}"
|
||||
options << escapeBlanks(option)
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unexpected type for dockerOptions. Expected was either a list or a string. Actual type was: '${dockerOptions.getClass()}'")
|
||||
@ -224,3 +223,22 @@ def getContainerDefined(config) {
|
||||
boolean isKubernetes() {
|
||||
return Boolean.valueOf(env.ON_K8S)
|
||||
}
|
||||
|
||||
/**
|
||||
* Escapes blanks for values in key/value pairs
|
||||
* E.g. <code>description=Lorem ipsum</code> is
|
||||
* changed to <code>description=Lorem\ ipsum</code>.
|
||||
*/
|
||||
@NonCPS
|
||||
def escapeBlanks(def s) {
|
||||
|
||||
def EQ='='
|
||||
def parts=s.split(EQ)
|
||||
|
||||
if(parts.length == 2) {
|
||||
parts[1]=parts[1].replaceAll(' ', '\\\\ ')
|
||||
s = parts.join(EQ)
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
@ -11,11 +11,13 @@ import hudson.AbortException
|
||||
@Field def PLUGIN_ID_KUBERNETES = 'kubernetes'
|
||||
@Field Set GENERAL_CONFIG_KEYS = ['jenkinsKubernetes']
|
||||
@Field Set PARAMETER_KEYS = [
|
||||
'containerCommands', //specify start command for containers to overwrite Piper default (`/usr/bin/tail -f /dev/null`). If container's defaultstart command should be used provide empty string like: `['selenium/standalone-chrome': '']`
|
||||
'containerCommand', // specify start command for container created with dockerImage parameter to overwrite Piper default (`/usr/bin/tail -f /dev/null`).
|
||||
'containerCommands', //specify start command for containers to overwrite Piper default (`/usr/bin/tail -f /dev/null`). If container's default start command should be used provide empty string like: `['selenium/standalone-chrome': '']`
|
||||
'containerEnvVars', //specify environment variables per container. If not provided dockerEnvVars will be used
|
||||
'containerMap', //specify multiple images which then form a kubernetes pod, example: containerMap: ['maven:3.5-jdk-8-alpine': 'mavenexecute','selenium/standalone-chrome': 'selenium']
|
||||
'containerName', //optional configuration in combination with containerMap to define the container where the commands should be executed in
|
||||
'containerPortMappings', //map which defines per docker image the port mappings, like containerPortMappings: ['selenium/standalone-chrome': [[name: 'selPort', containerPort: 4444, hostPort: 4444]]]
|
||||
'containerShell', // allows to specify the shell to be executed for container with containerName
|
||||
'containerWorkspaces', //specify workspace (=home directory of user) per container. If not provided dockerWorkspace will be used. If empty, home directory will not be set.
|
||||
'dockerImage',
|
||||
'dockerWorkspace',
|
||||
@ -48,6 +50,7 @@ void call(Map parameters = [:], body) {
|
||||
configHelper.withMandatoryProperty('dockerImage')
|
||||
config.containerName = 'container-exec'
|
||||
config.containerMap = ["${config.get('dockerImage')}": config.containerName]
|
||||
config.containerCommands = config.containerCommand ? ["${config.get('dockerImage')}": config.containerCommand] : null
|
||||
}
|
||||
executeOnPod(config, utils, body)
|
||||
}
|
||||
@ -77,7 +80,11 @@ void executeOnPod(Map config, utils, Closure body) {
|
||||
podTemplate(getOptions(config)) {
|
||||
node(config.uniqueId) {
|
||||
if (config.containerName) {
|
||||
container(name: config.containerName){
|
||||
Map containerParams = [name: config.containerName]
|
||||
if (config.containerShell) {
|
||||
containerParams.shell = config.containerShell
|
||||
}
|
||||
container(containerParams){
|
||||
try {
|
||||
utils.unstashAll(config.stashContent)
|
||||
body()
|
||||
|
50
vars/fioriOnCloudPlatformPipeline.groovy
Normal file
50
vars/fioriOnCloudPlatformPipeline.groovy
Normal file
@ -0,0 +1,50 @@
|
||||
import static com.sap.piper.Prerequisites.checkScript
|
||||
|
||||
import groovy.transform.Field
|
||||
|
||||
@Field def STEP_NAME = getClass().getName()
|
||||
|
||||
@Field def GENERAL_CONFIG_KEYS = []
|
||||
@Field def PARAMETER_KEYS = []
|
||||
@Field def STEP_CONFIG_KEYS = []
|
||||
|
||||
/** The Scenario is intended for building and uploading a fiori application.
|
||||
*
|
||||
* It needs to be called from a pipeline script (Jenkinsfile) like:
|
||||
* ```
|
||||
* @Library('piper-lib-os') _
|
||||
* @Library('your-additional-lib') __ // optional
|
||||
*
|
||||
* // parameter 'customDefaults' below is optional
|
||||
* fioriOnCloudPlatformPipeline(script: this, customDefaults: '<configFile>')
|
||||
* ```
|
||||
*/
|
||||
void call(parameters = [:]) {
|
||||
|
||||
checkScript(this, parameters)
|
||||
|
||||
node(parameters.label) {
|
||||
|
||||
//
|
||||
// Cut and paste lines below in order to create a pipeline from this scenario
|
||||
// In this case `parameters` needs to be replaced by `script: this`.
|
||||
|
||||
stage('prepare') {
|
||||
|
||||
setupCommonPipelineEnvironment(parameters)
|
||||
}
|
||||
|
||||
stage('build') {
|
||||
|
||||
mtaBuild(parameters)
|
||||
}
|
||||
|
||||
stage('deploy') {
|
||||
|
||||
neoDeploy(parameters)
|
||||
}
|
||||
|
||||
// Cut and paste lines above in order to create a pipeline from this scenario
|
||||
//
|
||||
}
|
||||
}
|
@ -47,7 +47,13 @@ void call(Map parameters = [:]) {
|
||||
.dependingOn('buildTool').mixin('testOptions')
|
||||
.use()
|
||||
|
||||
utils.pushToSWA([step: STEP_NAME, stepParam1: config.buildTool, stepParam2: config.dockerName], config)
|
||||
utils.pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'buildTool',
|
||||
stepParam1: config.buildTool,
|
||||
stepParamKey2: 'dockerName',
|
||||
stepParam2: config.dockerName
|
||||
], config)
|
||||
|
||||
if(!config.dockerEnvVars.TARGET_SERVER_URL && config.testServerUrl)
|
||||
config.dockerEnvVars.TARGET_SERVER_URL = config.testServerUrl
|
||||
|
@ -1,8 +1,6 @@
|
||||
import static com.sap.piper.Prerequisites.checkScript
|
||||
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.ConfigurationLoader
|
||||
import com.sap.piper.ConfigurationMerger
|
||||
import com.sap.piper.JsonUtils
|
||||
import com.sap.piper.Utils
|
||||
|
||||
@ -11,14 +9,17 @@ import groovy.transform.Field
|
||||
@Field def STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = [
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([
|
||||
'artifactVersion',
|
||||
'customData',
|
||||
'customDataTags',
|
||||
'customDataMap',
|
||||
'customDataMapTags',
|
||||
'influxServer',
|
||||
'influxPrefix',
|
||||
'wrapInNode'
|
||||
]
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS.plus([
|
||||
'artifactVersion'
|
||||
])
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
void call(Map parameters = [:]) {
|
||||
handlePipelineStepErrors (stepName: STEP_NAME, stepParameters: parameters, allowBuildFailure: true) {
|
||||
@ -37,10 +38,17 @@ void call(Map parameters = [:]) {
|
||||
artifactVersion: script.commonPipelineEnvironment.getArtifactVersion()
|
||||
])
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.addIfNull('customData', script.commonPipelineEnvironment.getInfluxCustomData())
|
||||
.addIfNull('customDataTags', script.commonPipelineEnvironment.getInfluxCustomDataTags())
|
||||
.addIfNull('customDataMap', script.commonPipelineEnvironment.getInfluxCustomDataMap())
|
||||
.addIfNull('customDataMapTags', script.commonPipelineEnvironment.getInfluxCustomDataMapTags())
|
||||
.use()
|
||||
|
||||
new Utils().pushToSWA([step: STEP_NAME,
|
||||
stepParam1: parameters?.script == null], config)
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scriptMissing',
|
||||
stepParam1: parameters?.script == null
|
||||
], config)
|
||||
|
||||
if (!config.artifactVersion) {
|
||||
//this takes care that terminated builds due to milestone-locking do not cause an error
|
||||
@ -52,8 +60,10 @@ void call(Map parameters = [:]) {
|
||||
Artifact version: ${config.artifactVersion}
|
||||
Influx server: ${config.influxServer}
|
||||
Influx prefix: ${config.influxPrefix}
|
||||
InfluxDB data: ${script.commonPipelineEnvironment.getInfluxCustomData()}
|
||||
InfluxDB data map: ${script.commonPipelineEnvironment.getInfluxCustomDataMap()}
|
||||
InfluxDB data: ${config.customData}
|
||||
InfluxDB data tags: ${config.customDataTags}
|
||||
InfluxDB data map: ${config.customDataMap}
|
||||
InfluxDB data map tags: ${config.customDataMapTags}
|
||||
[${STEP_NAME}]----------------------------------------------------------"""
|
||||
|
||||
if(config.wrapInNode){
|
||||
@ -72,19 +82,30 @@ InfluxDB data map: ${script.commonPipelineEnvironment.getInfluxCustomDataMap()}
|
||||
|
||||
private void writeToInflux(config, script){
|
||||
if (config.influxServer) {
|
||||
step([
|
||||
$class: 'InfluxDbPublisher',
|
||||
selectedTarget: config.influxServer,
|
||||
customPrefix: config.influxPrefix,
|
||||
customData: script.commonPipelineEnvironment.getInfluxCustomData(),
|
||||
customDataMap: script.commonPipelineEnvironment.getInfluxCustomDataMap()
|
||||
])
|
||||
try {
|
||||
step([
|
||||
$class: 'InfluxDbPublisher',
|
||||
selectedTarget: config.influxServer,
|
||||
customPrefix: config.influxPrefix,
|
||||
customData: config.customData.size()>0 ? config.customData : null,
|
||||
customDataTags: config.customDataTags.size()>0 ? config.customDataTags : null,
|
||||
customDataMap: config.customDataMap.size()>0 ? config.customDataMap : null,
|
||||
customDataMapTags: config.customDataMapTags.size()>0 ? config.customDataMapTags : null
|
||||
])
|
||||
} catch (NullPointerException e){
|
||||
if(!e.getMessage()){
|
||||
//TODO: catch NPEs as long as https://issues.jenkins-ci.org/browse/JENKINS-55594 is not fixed & released
|
||||
error "[$STEP_NAME] NullPointerException occured, is the correct target defined?"
|
||||
}
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
//write results into json file for archiving - also benefitial when no InfluxDB is available yet
|
||||
def jsonUtils = new JsonUtils()
|
||||
writeFile file: 'jenkins_data.json', text: jsonUtils.getPrettyJsonString(script.commonPipelineEnvironment.getInfluxCustomData())
|
||||
writeFile file: 'pipeline_data.json', text: jsonUtils.getPrettyJsonString(script.commonPipelineEnvironment.getInfluxCustomDataMap())
|
||||
writeFile file: 'jenkins_data.json', text: jsonUtils.getPrettyJsonString(config.customData)
|
||||
writeFile file: 'influx_data.json', text: jsonUtils.getPrettyJsonString(config.customDataMap)
|
||||
writeFile file: 'jenkins_data_tags.json', text: jsonUtils.getPrettyJsonString(config.customDataTags)
|
||||
writeFile file: 'influx_data_tags.json', text: jsonUtils.getPrettyJsonString(config.customDataMapTags)
|
||||
archiveArtifacts artifacts: '*data.json', allowEmptyArchive: true
|
||||
|
||||
}
|
||||
|
@ -37,8 +37,11 @@ void call(Map parameters = [:]) {
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.use()
|
||||
|
||||
new Utils().pushToSWA([step: STEP_NAME,
|
||||
stepParam1: parameters?.script == null], configuration)
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scriptMissing',
|
||||
stepParam1: parameters?.script == null
|
||||
], configuration)
|
||||
|
||||
String command = "mvn"
|
||||
|
||||
|
@ -36,8 +36,11 @@ void call(Map parameters = [:]) {
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.use()
|
||||
|
||||
new Utils().pushToSWA([step: STEP_NAME,
|
||||
stepParam1: parameters?.script == null], configuration)
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scriptMissing',
|
||||
stepParam1: parameters?.script == null
|
||||
], configuration)
|
||||
|
||||
dockerExecute(script: script, dockerImage: configuration.dockerImage, dockerOptions: configuration.dockerOptions) {
|
||||
def java = new ToolDescriptor('Java', 'JAVA_HOME', '', '/bin/', 'java', '1.8.0', '-version 2>&1')
|
||||
|
@ -104,9 +104,13 @@ void call(parameters = [:]) {
|
||||
|
||||
utils.pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'deployMode',
|
||||
stepParam1: configuration.deployMode == 'mta'?'mta':'war', // ['mta', 'warParams', 'warPropertiesFile']
|
||||
stepParamKey2: 'warAction',
|
||||
stepParam2: configuration.warAction == 'rolling-update'?'blue-green':'standard', // ['deploy', 'deploy-mta', 'rolling-update']
|
||||
stepParamKey3: 'scriptMissing',
|
||||
stepParam3: parameters?.script == null,
|
||||
stepParamKey4: 'legacyConfig',
|
||||
stepParam4: ! stepCompatibilityConfiguration.isEmpty(),
|
||||
], configuration)
|
||||
|
||||
|
@ -79,8 +79,11 @@ void call(Map parameters = [:]) {
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.use()
|
||||
|
||||
new Utils().pushToSWA([step: STEP_NAME,
|
||||
stepParam1: parameters?.script == null], config)
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scriptMissing',
|
||||
stepParam1: parameters?.script == null
|
||||
], config)
|
||||
|
||||
config.stashContent = config.testRepository
|
||||
?[GitUtils.handleTestRepository(this, config)]
|
||||
@ -98,7 +101,7 @@ void call(Map parameters = [:]) {
|
||||
dockerImage: config.dockerImage,
|
||||
stashContent: config.stashContent
|
||||
) {
|
||||
sh "${config.newmanInstallCommand}"
|
||||
sh "NPM_CONFIG_PREFIX=~/.npm-global ${config.newmanInstallCommand}"
|
||||
for(String collection : collectionList){
|
||||
def collectionDisplayName = collection.toString().replace(File.separatorChar,(char)'_').tokenize('.').first()
|
||||
// resolve templates
|
||||
@ -109,7 +112,7 @@ void call(Map parameters = [:]) {
|
||||
collectionDisplayName: collectionDisplayName
|
||||
]).toString()
|
||||
if(!config.failOnError) command += ' --suppress-exit-code'
|
||||
sh "newman ${command}"
|
||||
sh "PATH=\$PATH:~/.npm-global/bin newman ${command}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -34,8 +34,11 @@ void call(Map parameters = [:]) {
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.use()
|
||||
|
||||
new Utils().pushToSWA([step: STEP_NAME,
|
||||
stepParam1: parameters?.script == null], config)
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scriptMissing',
|
||||
stepParam1: parameters?.script == null
|
||||
], config)
|
||||
|
||||
// store files to be checked with checkmarx
|
||||
if (config.runCheckmarx) {
|
||||
|
@ -32,8 +32,11 @@ void call(Map parameters = [:]) {
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.use()
|
||||
|
||||
new Utils().pushToSWA([step: STEP_NAME,
|
||||
stepParam1: parameters?.script == null], config)
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scriptMissing',
|
||||
stepParam1: parameters?.script == null
|
||||
], config)
|
||||
|
||||
if (config.runOpaTests){
|
||||
utils.stash('opa5', config.stashIncludes?.get('opa5')?config.stashIncludes.opa5:'**/*.*', config.stashExcludes?.get('opa5')?config.stashExcludes.opa5:'')
|
||||
|
@ -81,7 +81,7 @@ private void executeStage(script, originalStage, stageName, config, utils) {
|
||||
echo "[${STEP_NAME}] Found global interceptor '${globalInterceptorFile}' for ${stageName}."
|
||||
// If we call the global interceptor, we will pass on originalStage as parameter
|
||||
body = {
|
||||
globalInterceptorScript(body, stageName, config)
|
||||
globalInterceptorScript(script: script, originalStage: body, stageName: stageName, config: config)
|
||||
}
|
||||
}
|
||||
|
||||
@ -90,7 +90,7 @@ private void executeStage(script, originalStage, stageName, config, utils) {
|
||||
Script projectInterceptorScript = load(projectInterceptorFile)
|
||||
echo "[${STEP_NAME}] Running project interceptor '${projectInterceptorFile}' for ${stageName}."
|
||||
// If we call the project interceptor, we will pass on body as parameter which contains either originalStage or the repository interceptor
|
||||
projectInterceptorScript(body, stageName, config)
|
||||
projectInterceptorScript(script: script, originalStage: body, stageName: stageName, config: config)
|
||||
} else {
|
||||
//TODO: assign projectInterceptorScript to body as done for globalInterceptorScript, currently test framework does not seem to support this case. Further investigations needed.
|
||||
body()
|
||||
@ -103,6 +103,19 @@ private void executeStage(script, originalStage, stageName, config, utils) {
|
||||
deleteDir()
|
||||
|
||||
def duration = System.currentTimeMillis() - startTime
|
||||
utils.pushToSWA([eventType: 'library-os-stage', stageName: stageName, stepParam1: "${script.currentBuild.currentResult}", stepParam2: "${startTime}", stepParam3: "${duration}", stepParam4: "${projectExtensions}", stepParam5: "${globalExtensions}"], config)
|
||||
utils.pushToSWA([
|
||||
eventType: 'library-os-stage',
|
||||
stageName: stageName,
|
||||
stepParamKey1: 'buildResult',
|
||||
stepParam1: "${script.currentBuild.currentResult}",
|
||||
stepParamKey2: 'stageStartTime',
|
||||
stepParam2: "${startTime}",
|
||||
stepParamKey3: 'stageDuration',
|
||||
stepParam3: "${duration}",
|
||||
stepParamKey4: 'projectExtension',
|
||||
stepParam4: "${projectExtensions}",
|
||||
stepParamKey5: 'globalExtension',
|
||||
stepParam5: "${globalExtensions}"
|
||||
], config)
|
||||
}
|
||||
}
|
||||
|
@ -48,8 +48,11 @@ void call(Map parameters = [:], Closure body) {
|
||||
.dependingOn('buildTool').mixin('dockerWorkspace')
|
||||
.use()
|
||||
|
||||
utils.pushToSWA([step: STEP_NAME,
|
||||
stepParam1: parameters?.script == null], config)
|
||||
utils.pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scriptMissing',
|
||||
stepParam1: parameters?.script == null
|
||||
], config)
|
||||
|
||||
dockerExecute(
|
||||
script: script,
|
||||
|
@ -24,9 +24,13 @@ void call(Map parameters = [:]) {
|
||||
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
|
||||
.use()
|
||||
|
||||
(parameters.utils ?: new Utils())
|
||||
.pushToSWA([step: STEP_NAME, stepParam4: parameters.customDefaults?'true':'false',
|
||||
stepParam5: Boolean.toString( ! (script?.commonPipelineEnvironment?.getConfigProperties() ?: [:]).isEmpty())], config)
|
||||
(parameters.utils ?: new Utils()).pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey4: 'customDefaults',
|
||||
stepParam4: parameters.customDefaults?'true':'false',
|
||||
stepParamKey5: 'legacyConfig',
|
||||
stepParam5: Boolean.toString( ! (script?.commonPipelineEnvironment?.getConfigProperties() ?: [:]).isEmpty())
|
||||
], config)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -38,8 +38,11 @@ void call(Map parameters = [:]) {
|
||||
.withMandatoryProperty('snykCredentialsId')
|
||||
.use()
|
||||
|
||||
new Utils().pushToSWA([step: STEP_NAME,
|
||||
stepParam1: parameters?.script == null], config)
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scriptMissing',
|
||||
stepParam1: parameters?.script == null
|
||||
], config)
|
||||
|
||||
utils.unstashAll(config.stashContent)
|
||||
|
||||
|
@ -40,8 +40,11 @@ void call(Map parameters = [:]) {
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.use()
|
||||
|
||||
new Utils().pushToSWA([step: STEP_NAME,
|
||||
stepParam1: parameters?.script == null], configuration)
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scriptMissing',
|
||||
stepParam1: parameters?.script == null
|
||||
], configuration)
|
||||
|
||||
// UNIT TESTS
|
||||
publishJUnitReport(configuration.get('junit'))
|
||||
|
@ -63,8 +63,11 @@ void call(parameters = [:]) {
|
||||
|
||||
def changeDocumentId = null
|
||||
|
||||
new Utils().pushToSWA([step: STEP_NAME,
|
||||
stepParam1: parameters?.script == null], configuration)
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scriptMissing',
|
||||
stepParam1: parameters?.script == null
|
||||
], configuration)
|
||||
|
||||
if(backendType == BackendType.SOLMAN) {
|
||||
|
||||
|
@ -58,8 +58,11 @@ void call(parameters = [:]) {
|
||||
|
||||
configuration = configHelper.use()
|
||||
|
||||
new Utils().pushToSWA([step: STEP_NAME,
|
||||
stepParam1: parameters?.script == null], configuration)
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'scriptMissing',
|
||||
stepParam1: parameters?.script == null
|
||||
], configuration)
|
||||
|
||||
def changeDocumentId = null
|
||||
def transportRequestId = getTransportRequestId(cm, script, configuration)
|
||||
|
@ -61,9 +61,13 @@ void call(parameters = [:]) {
|
||||
.withMandatoryProperty('changeManagement/git/format')
|
||||
.withMandatoryProperty('filePath')
|
||||
|
||||
new Utils().pushToSWA([step: STEP_NAME,
|
||||
stepParam1: configuration.changeManagement.type,
|
||||
stepParam2: parameters?.script == null], configuration)
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME,
|
||||
stepParamKey1: 'changeManagementType',
|
||||
stepParam1: configuration.changeManagement.type,
|
||||
stepParamKey2: 'scriptMissing',
|
||||
stepParam2: parameters?.script == null
|
||||
], configuration)
|
||||
|
||||
def changeDocumentId = null
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user