mirror of
https://github.com/SAP/jenkins-library.git
synced 2025-01-18 05:18:24 +02:00
Merge remote-tracking branch 'github/master' into HEAD
This commit is contained in:
commit
2eeb5e05d4
@ -140,7 +140,7 @@ if (failedThreads.size() == 0) {
|
||||
failedThread.printOutput()
|
||||
}
|
||||
status = "failure"
|
||||
statusMessage "The following consumer test(s) failed: ${failedThreads}"
|
||||
statusMessage = "The following consumer test(s) failed: ${failedThreads}"
|
||||
}
|
||||
|
||||
if (!RUNNING_LOCALLY) {
|
||||
|
@ -678,7 +678,7 @@ for (step in steps) {
|
||||
|
||||
// replace @see tag in docu by docu from referenced step.
|
||||
for(step in stepDescriptors) {
|
||||
if(step.value.parameters) {
|
||||
if(step.value?.parameters) {
|
||||
for(param in step.value.parameters) {
|
||||
if( param?.value?.docu?.contains('@see')) {
|
||||
def otherStep = param.value.docu.replaceAll('@see', '').trim()
|
||||
@ -695,8 +695,12 @@ for(step in stepDescriptors) {
|
||||
def stageDescriptors = [:]
|
||||
stages.each {key, value ->
|
||||
System.err << "[INFO] Processing stage '${key}' ...\n"
|
||||
stageDescriptors."${key}" = [:] << stepDescriptors."${key}"
|
||||
stepDescriptors.remove(key)
|
||||
if (stepDescriptors."${key}") {
|
||||
stageDescriptors."${key}" = [:] << stepDescriptors."${key}"
|
||||
stepDescriptors.remove(key)
|
||||
} else {
|
||||
stageDescriptors."${key}" = [:]
|
||||
}
|
||||
|
||||
//add stage name to stageDescriptors
|
||||
stageDescriptors."${key}".name = value
|
||||
|
83
documentation/docs/extensibility.md
Normal file
83
documentation/docs/extensibility.md
Normal file
@ -0,0 +1,83 @@
|
||||
# Extensibility
|
||||
|
||||
There are several possibilities for extensibility besides the **[very powerful configuration](configuration.md)**:
|
||||
|
||||
## 1. Stage Exits
|
||||
|
||||
You have to create a file like `<StageName>.groovy` for example `Acceptance.groovy` and store it in folder `.pipeline/extensions/` in your source code repository.
|
||||
|
||||
The pipeline template will check if such a file exists and executes it if present.
|
||||
A parameter is passed to the extension containing following keys:
|
||||
|
||||
* `script`: defines the global script environment of the Jenkinsfile run. This makes sure that the correct configuration environment can be passed to project "Piper" steps and also allows access to for example the `commonPipelineEnvironment`.
|
||||
* `originalStage`: this will allow you to execute the "original" stage at any place in your script. If omitting a call to `originalStage()` only your code will be executed instead.
|
||||
* `stageName`: name of the current stage
|
||||
* `config`: configuration of the stage (including all defaults)
|
||||
|
||||
Here a simple example for such an extension:
|
||||
|
||||
``` groovy
|
||||
void call(Map params) {
|
||||
//access stage name
|
||||
echo "Start - Extension for stage: ${params.stageName}"
|
||||
|
||||
//access config
|
||||
echo "Current stage config: ${params.config}"
|
||||
|
||||
//execute original stage as defined in the template
|
||||
params.originalStage()
|
||||
|
||||
//access overall pipeline script object
|
||||
echo "Branch: ${params.script.commonPipelineEnvironment.gitBranch}"
|
||||
|
||||
echo "End - Extension for stage: ${params.stageName}"
|
||||
}
|
||||
return this
|
||||
```
|
||||
|
||||
!!! note "Init stage cannot be extended"
|
||||
Please note, the `Init` stage among other things also checks out your current repository.<br />Thus it is not possible to use extensions on this stage.
|
||||
|
||||
## 2. Central Custom Template
|
||||
|
||||
If you have multiple projects where you want to use a custom template, you could implement this similarly to [piperPipeline](https://github.com/SAP/jenkins-library/blob/master/vars/piperPipeline.groovy).
|
||||
|
||||
!!! note "How to not get decoupled"
|
||||
Typically, providing a custom template decouples you from centrally provided updates to your template including the stages.<br />
|
||||
Where applicable, you can re-use the stage implementations. This means, you will call e.g. `piperPipelineStageBuild()` as you can see in [piperPipeline](https://github.com/SAP/jenkins-library/blob/master/vars/piperPipeline.groovy).
|
||||
|
||||
Using this approach you can at least benefit from innovations done in individual project "Piper" stages.
|
||||
|
||||
!!! note "When to go with a custom template"
|
||||
If the configuration possibilities are not sufficient for you and if _1. Stage Exits_ is not applicable.
|
||||
|
||||
## 3. Custom Jenkinsfile
|
||||
|
||||
Since project "Piper" fully builds on [Jenkins Pipelines as Code](https://jenkins.io/doc/book/pipeline-as-code/), you can also go with your complete custom `Jenkinsfile`.
|
||||
|
||||
!!! warning "Decoupling"
|
||||
If you go this route you will be decoupled from the innovations provided with project "Piper", unless you re-use for example stages (as indicated above under _2. Central Custom Templates_).
|
||||
|
||||
**We recommend to use this only as last option for extensibility.**
|
||||
|
||||
|
||||
## Further tips and information
|
||||
|
||||
When you consider to add additional capabilities your first stop should be the [Jenkins Pipeline Steps Reference](https://jenkins.io/doc/pipeline/steps/).
|
||||
Here you get an overview about what kind of capabilities are already available and a list of related parameters which you can use to customize the existing implementation. The provided information should help you to understand and extend the functionality of your pipeline.
|
||||
|
||||
!!! tip
|
||||
If you consider extensions we recommend you to do it using a custom library according to the [Jenkins shared libraries](https://jenkins.io/doc/book/pipeline/shared-libraries/) concept instead of adding groovy coding to the `Jenkinsfile`.
|
||||
Your custom library can easily live next to the provided pipeline library.
|
||||
|
||||
Your Jenkinsfile would then start like
|
||||
|
||||
```
|
||||
@Library(['piper-lib-os', 'your-custom-lib']) _
|
||||
|
||||
```
|
||||
|
||||
<!-- ## Examples
|
||||
|
||||
work in progress
|
||||
-->
|
13
documentation/docs/stages/acceptance.md
Normal file
13
documentation/docs/stages/acceptance.md
Normal file
@ -0,0 +1,13 @@
|
||||
# ${docGenStageName}
|
||||
|
||||
${docGenDescription}
|
||||
|
||||
## ${docGenStageContent}
|
||||
|
||||
## ${docGenStageActivation}
|
||||
|
||||
## ${docGenStepActivation}
|
||||
|
||||
## ${docGenStageParameters}
|
||||
|
||||
## ${docGenStageConfiguration}
|
13
documentation/docs/stages/additionalunittests.md
Normal file
13
documentation/docs/stages/additionalunittests.md
Normal file
@ -0,0 +1,13 @@
|
||||
# ${docGenStageName}
|
||||
|
||||
${docGenDescription}
|
||||
|
||||
## ${docGenStageContent}
|
||||
|
||||
## ${docGenStageActivation}
|
||||
|
||||
## ${docGenStepActivation}
|
||||
|
||||
## ${docGenStageParameters}
|
||||
|
||||
## ${docGenStageConfiguration}
|
13
documentation/docs/stages/build.md
Normal file
13
documentation/docs/stages/build.md
Normal file
@ -0,0 +1,13 @@
|
||||
# ${docGenStageName}
|
||||
|
||||
${docGenDescription}
|
||||
|
||||
## ${docGenStageContent}
|
||||
|
||||
## ${docGenStageActivation}
|
||||
|
||||
## ${docGenStepActivation}
|
||||
|
||||
## ${docGenStageParameters}
|
||||
|
||||
## ${docGenStageConfiguration}
|
13
documentation/docs/stages/compliance.md
Normal file
13
documentation/docs/stages/compliance.md
Normal file
@ -0,0 +1,13 @@
|
||||
# ${docGenStageName}
|
||||
|
||||
${docGenDescription}
|
||||
|
||||
## ${docGenStageContent}
|
||||
|
||||
## ${docGenStageActivation}
|
||||
|
||||
## ${docGenStepActivation}
|
||||
|
||||
## ${docGenStageParameters}
|
||||
|
||||
## ${docGenStageConfiguration}
|
33
documentation/docs/stages/examples.md
Normal file
33
documentation/docs/stages/examples.md
Normal file
@ -0,0 +1,33 @@
|
||||
# Example Configurations
|
||||
|
||||
This page shows you some pipeline configuration examples.
|
||||
|
||||
As `Jenkinsfile` only following code is required:
|
||||
|
||||
```
|
||||
@Library('piper-lib') _
|
||||
|
||||
piperPipeline script: this
|
||||
```
|
||||
|
||||
## Pure Pull-Request Voting
|
||||
|
||||
.pipeline/config.yml:
|
||||
``` YAML
|
||||
general:
|
||||
buildTool: 'npm'
|
||||
```
|
||||
|
||||
## Using custom defaults
|
||||
|
||||
It is possible to use custom defaults as indicated on the section about [Configuration](../configuration.md).
|
||||
|
||||
In order to use a custom defaults only a simple extension to the `Jenkinsfile` is required:
|
||||
|
||||
```
|
||||
@Library(['piper-lib-os', 'myCustomLibrary']) _
|
||||
|
||||
piperPipeline script: this, customDefaults: ['myCustomDefaults.yml']
|
||||
```
|
||||
|
||||
## more examples to come ...
|
13
documentation/docs/stages/init.md
Normal file
13
documentation/docs/stages/init.md
Normal file
@ -0,0 +1,13 @@
|
||||
# ${docGenStageName}
|
||||
|
||||
${docGenDescription}
|
||||
|
||||
## ${docGenStageContent}
|
||||
|
||||
## ${docGenStageActivation}
|
||||
|
||||
## ${docGenStepActivation}
|
||||
|
||||
## ${docGenStageParameters}
|
||||
|
||||
## ${docGenStageConfiguration}
|
13
documentation/docs/stages/integration.md
Normal file
13
documentation/docs/stages/integration.md
Normal file
@ -0,0 +1,13 @@
|
||||
# ${docGenStageName}
|
||||
|
||||
${docGenDescription}
|
||||
|
||||
## ${docGenStageContent}
|
||||
|
||||
## ${docGenStageActivation}
|
||||
|
||||
## ${docGenStepActivation}
|
||||
|
||||
## ${docGenStageParameters}
|
||||
|
||||
## ${docGenStageConfiguration}
|
163
documentation/docs/stages/introduction.md
Normal file
163
documentation/docs/stages/introduction.md
Normal file
@ -0,0 +1,163 @@
|
||||
# Project "Piper" general purpose pipeline
|
||||
|
||||
The pipeline consists of a sequence of stages where each contains a number of individual steps.
|
||||
|
||||
|
||||
### First step: Pull Request Pipeline
|
||||
|
||||
In order to validate pull-requests to your GitHub repository you need to perform two simple steps:
|
||||
|
||||
#### 1. Create Pipeline configuration
|
||||
|
||||
Create a file `.pipeline/config.yml` in your repository (typically in `master` branch) with the following content:
|
||||
|
||||
``` YAML
|
||||
general:
|
||||
buildTool: 'npm'
|
||||
```
|
||||
|
||||
!!! note "buildTool"
|
||||
Please make sure that you specify the correct build tool.
|
||||
Following are currently supported:
|
||||
|
||||
* `docker`
|
||||
* `kaniko`
|
||||
* `maven`
|
||||
* `mta`
|
||||
* `npm`
|
||||
|
||||
If your build tool is not in the list you can still use further options as described for [Pull-Request Voting Stage](prvoting.md)
|
||||
|
||||
#### 2. Create Jenkinsfile
|
||||
|
||||
Create a file called `Jenkinsfile` in the root of your repository (typically in `master` branch) with the following content:
|
||||
|
||||
```groovy
|
||||
@Library('piper-lib-os') _
|
||||
|
||||
piperPipeline script: this
|
||||
```
|
||||
|
||||
**There is typically no need to further touch this file**
|
||||
|
||||
!!! note "Using custom defaults"
|
||||
It is possible to overwrite/extend the pipeline defaults with custom defaults.
|
||||
|
||||
```
|
||||
piperPipeline script: this, customDefaults: ['myCustomDefaults.yml']
|
||||
```
|
||||
|
||||
You find more details about the custom defaults in the [configuration section](../configuration.md)
|
||||
|
||||
### Second step: Prepare pipeline for your main branch.
|
||||
|
||||
Extend your configuration to also contain git ssh credentials information.
|
||||
|
||||
Your `.pipeline/config.yml` should then look like:
|
||||
|
||||
``` YAML
|
||||
general:
|
||||
buildTool: 'npm'
|
||||
gitSshKeyCredentialsId: 'credentials-id-in-jenkins'
|
||||
```
|
||||
|
||||
!!! note "gitSshKeyCredentialsId"
|
||||
The pointer to the Jenkins credentials containing your ssh private key is an important part of the pipeline run.
|
||||
The credentials are for example required to push automatic versioning information to your GitHub repository.
|
||||
|
||||
### Subsequent steps: Configure individual stages
|
||||
|
||||
The stages of the pipeline can be configured individually.
|
||||
As a general rule of thumb, only stages with an existing configuration are executed.
|
||||
|
||||
If no dedicated configuration is required for a step, the precence of relevant files in the repository trigger the step execution.
|
||||
|
||||
**This smart and context-aware way of configuration** allows you an iterative approach to configuring the individual steps.
|
||||
|
||||
The pipeline comprises following stages:
|
||||
|
||||
#### Init
|
||||
|
||||
This stage takes care that the pipeline is initialized correctly.
|
||||
It will for example:
|
||||
|
||||
* Check out the GitHub repository
|
||||
* Set up the overall pipeline configuration and perform basic checks
|
||||
* Identify which pipeline stages to execute based on the configuration and file patterns
|
||||
* Perform automatic versioning of the software artifact in case the `master` branch pipeline is executed.
|
||||
|
||||
You find details about this stage on [**Init Stage** Details](init.md)
|
||||
|
||||
#### Pull-Request Voting
|
||||
|
||||
This stage is responsible for validating pull-requests, see also above.
|
||||
|
||||
You find further details about this stage on the page [**Pull-Request Voting**](prvoting.md).
|
||||
|
||||
### Build
|
||||
|
||||
In this stage the build of the software artifact is performed.
|
||||
The build artifact will be `stash`ed for use in subsequent stages. For `Docker` builds the build result will be uploaded to a container registry (as per your configuration).
|
||||
|
||||
Afterwards the results of static checks & unit tests are published on the Jenkins.
|
||||
|
||||
You find details about this stage on the page [**Build**](build.md).
|
||||
|
||||
### Additional Unit Tests
|
||||
|
||||
In this stage additional unit-like tests are executed which should not run during the build.
|
||||
|
||||
Currently, this stage holds the execution of a Karma runner which allows for
|
||||
|
||||
* qUnit tests
|
||||
* OPA5 (One Page Acceptance tests) for SAP UI5
|
||||
|
||||
You find details about this stage on the page [**Additional Unit Tests**](additionalunittests.md).
|
||||
|
||||
### Integration
|
||||
|
||||
In the [Integration stage](integration.md) a custom integration test script can be executed.
|
||||
|
||||
### Acceptance
|
||||
|
||||
Default implementation will come soon ...
|
||||
|
||||
Currently custom logic can be added using the [stage extension mechanism](../extensibility.md).
|
||||
|
||||
### Security
|
||||
|
||||
Default implementation will come soon ...
|
||||
|
||||
Currently custom logic can be added using the [stage extension mechanism](../extensibility.md).
|
||||
|
||||
|
||||
### Performance
|
||||
|
||||
Default implementation will come soon ...
|
||||
|
||||
Currently custom logic can be added using the [stage extension mechanism](../extensibility.md).
|
||||
|
||||
|
||||
### Compliance
|
||||
|
||||
Default implementation will come soon ...
|
||||
|
||||
Currently custom logic can be added using the [stage extension mechanism](../extensibility.md).
|
||||
|
||||
|
||||
### Confirm
|
||||
|
||||
The [Confirm stage](confirm.md), if executed, stops the pipeline execution and asks for manual confirmation before proceeding to the stages _Promote_ and _Release_.
|
||||
|
||||
### Promote
|
||||
|
||||
Default implementation will come soon ...
|
||||
|
||||
Currently custom logic can be added using the [stage extension mechanism](../extensibility.md).
|
||||
|
||||
|
||||
### Release
|
||||
|
||||
Default implementation will come soon ...
|
||||
|
||||
Currently custom logic can be added using the [stage extension mechanism](../extensibility.md).
|
13
documentation/docs/stages/performance.md
Normal file
13
documentation/docs/stages/performance.md
Normal file
@ -0,0 +1,13 @@
|
||||
# ${docGenStageName}
|
||||
|
||||
${docGenDescription}
|
||||
|
||||
## ${docGenStageContent}
|
||||
|
||||
## ${docGenStageActivation}
|
||||
|
||||
## ${docGenStepActivation}
|
||||
|
||||
## ${docGenStageParameters}
|
||||
|
||||
## ${docGenStageConfiguration}
|
13
documentation/docs/stages/promote.md
Normal file
13
documentation/docs/stages/promote.md
Normal file
@ -0,0 +1,13 @@
|
||||
# ${docGenStageName}
|
||||
|
||||
${docGenDescription}
|
||||
|
||||
## ${docGenStageContent}
|
||||
|
||||
## ${docGenStageActivation}
|
||||
|
||||
## ${docGenStepActivation}
|
||||
|
||||
## ${docGenStageParameters}
|
||||
|
||||
## ${docGenStageConfiguration}
|
13
documentation/docs/stages/prvoting.md
Normal file
13
documentation/docs/stages/prvoting.md
Normal file
@ -0,0 +1,13 @@
|
||||
# ${docGenStageName}
|
||||
|
||||
${docGenDescription}
|
||||
|
||||
## ${docGenStageContent}
|
||||
|
||||
## ${docGenStageActivation}
|
||||
|
||||
## ${docGenStepActivation}
|
||||
|
||||
## ${docGenStageParameters}
|
||||
|
||||
## ${docGenStageConfiguration}
|
13
documentation/docs/stages/release.md
Normal file
13
documentation/docs/stages/release.md
Normal file
@ -0,0 +1,13 @@
|
||||
# ${docGenStageName}
|
||||
|
||||
${docGenDescription}
|
||||
|
||||
## ${docGenStageContent}
|
||||
|
||||
## ${docGenStageActivation}
|
||||
|
||||
## ${docGenStepActivation}
|
||||
|
||||
## ${docGenStageParameters}
|
||||
|
||||
## ${docGenStageConfiguration}
|
13
documentation/docs/stages/security.md
Normal file
13
documentation/docs/stages/security.md
Normal file
@ -0,0 +1,13 @@
|
||||
# ${docGenStageName}
|
||||
|
||||
${docGenDescription}
|
||||
|
||||
## ${docGenStageContent}
|
||||
|
||||
## ${docGenStageActivation}
|
||||
|
||||
## ${docGenStepActivation}
|
||||
|
||||
## ${docGenStageParameters}
|
||||
|
||||
## ${docGenStageConfiguration}
|
23
documentation/docs/steps/buildExecute.md
Normal file
23
documentation/docs/steps/buildExecute.md
Normal file
@ -0,0 +1,23 @@
|
||||
# ${docGenStepName}
|
||||
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequsites
|
||||
|
||||
When performing a Docker build you need to maintain the respective credentials in your Jenkins credentials store.<br />
|
||||
Further details
|
||||
|
||||
* for builds when a Docker deamon: see step [containerPushToRegistry](containerPushToRegistry.md)
|
||||
* for builds using Kaniko: see step [kanikoExecute](kanikoExecute.md)
|
||||
|
||||
## Example
|
||||
|
||||
```groovy
|
||||
buildExecute script:this, buildTool: 'maven'
|
||||
```
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## ${docJenkinsPluginDependencies}
|
44
documentation/docs/steps/containerPushToRegistry.md
Normal file
44
documentation/docs/steps/containerPushToRegistry.md
Normal file
@ -0,0 +1,44 @@
|
||||
# ${docGenStepName}
|
||||
|
||||
## ${docGenDescription}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
You need to have a valid user with write permissions in the target docker registry.
|
||||
|
||||
Credentials for the target docker registry have been configured in Jenkins with a dedicated Id.
|
||||
|
||||
You can create the credentials in your Jenkins<br />
|
||||
via _Jenkins_ -> _Credentials_ -> _System_ -> _Global credentials (unrestricted)_ -> _Add Credentials_ ->
|
||||
|
||||
* Kind: _Username with Password_
|
||||
* ID: specify id which you then use for the configuration of `dockerCredentialsId` (see below)
|
||||
|
||||
## Example
|
||||
|
||||
Usage of pipeline step:
|
||||
|
||||
**OPTION A:** To pull a Docker image from an existing docker registry and push to a different docker registry:
|
||||
|
||||
```groovy
|
||||
containerPushToRegistry script: this,
|
||||
dockerCredentialsId: 'myTargetRegistryCredentials',
|
||||
sourceRegistryUrl: 'https://mysourceRegistry.url',
|
||||
sourceImage: 'path/to/mySourceImageWith:tag',
|
||||
dockerRegistryUrl: 'https://my.target.docker.registry:50000'
|
||||
```
|
||||
|
||||
**OPTION B:** To push a locally build docker image into the target registry (only possible when a Docker deamon is available on your Jenkins node):
|
||||
|
||||
```groovy
|
||||
containerPushToRegistry script: this,
|
||||
dockerCredentialsId: 'myTargetRegistryCredentials',
|
||||
dockerImage: 'path/to/myImageWith:tag',
|
||||
dockerRegistryUrl: 'https://my.target.docker.registry:50000'
|
||||
```
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
||||
|
||||
## ${docJenkinsPluginDependencies}
|
@ -14,8 +14,8 @@ The step is stashing files before and after the build. This is due to the fact,
|
||||
|
||||
| stash name | mandatory | prerequisite | pattern |
|
||||
|---|---|---|---|
|
||||
|buildDescriptor|no| |includes: `**/pom.xml, **/.mvn/**, **/assembly.xml, **/.swagger-codegen-ignore, **/package.json, **/requirements.txt, **/setup.py, **/whitesource_config.py, **/mta*.y*ml, **/.npmrc, **/whitesource.*.json, **/whitesource-fs-agent.config, Dockerfile, **/VERSION, **/version.txt, **/build.sbt, **/sbtDescriptor.json, **/project/*`<br /> excludes: `**/node_modules/**/package.json`|
|
||||
|checkmarx|no|Checkmarx is enabled|includes: `**/*.js, **/*.scala, **/*.go`<br /> excludes: `**/*.mockserver.js, node_modules/**/*.js`|
|
||||
|buildDescriptor|no| |includes: `**/pom.xml, **/.mvn/**, **/assembly.xml, **/.swagger-codegen-ignore, **/package.json, **/requirements.txt, **/setup.py, **/whitesource_config.py, **/mta*.y*ml, **/.npmrc, **/whitesource.*.json, **/whitesource-fs-agent.config, Dockerfile, **/VERSION, **/version.txt, **/Gopkg.*, **/dub.json, **/dub.sdl, **/build.sbt, **/sbtDescriptor.json, **/project/*`<br /> excludes: `**/node_modules/**/package.json`|
|
||||
|checkmarx|no|Checkmarx is enabled|includes: `**/*.js, **/*.scala, **/*.go, **/*.d, **/*.di`<br /> excludes: `**/*.mockserver.js, node_modules/**/*.js`|
|
||||
|classFiles|no| |includes: `**/target/classes/**/*.class, **/target/test-classes/**/*.class` <br />excludes: `''`|
|
||||
|deployDescriptor|no| |includes: `**/manifest*.y*ml, **/*.mtaext.y*ml, **/*.mtaext, **/xs-app.json, helm/**, *.y*ml`<br />exclude: `''`|
|
||||
|git|no| |includes: `**/gitmetadata/**`<br />exludes: `''`|
|
||||
|
@ -3,6 +3,7 @@ nav:
|
||||
- Home: index.md
|
||||
- 'Guided Tour' : guidedtour.md
|
||||
- Configuration: configuration.md
|
||||
- Extensibility: extensibility.md
|
||||
- 'Library steps':
|
||||
- artifactSetVersion: steps/artifactSetVersion.md
|
||||
- batsExecuteTests: steps/batsExecuteTests.md
|
||||
@ -48,6 +49,23 @@ nav:
|
||||
- transportRequestUploadFile: steps/transportRequestUploadFile.md
|
||||
- uiVeri5ExecuteTests: steps/uiVeri5ExecuteTests.md
|
||||
- whitesourceExecuteScan: steps/whitesourceExecuteScan.md
|
||||
- 'Pipelines':
|
||||
- 'General purpose pipeline':
|
||||
- 'Introduction': stages/introduction.md
|
||||
- 'Examples': stages/examples.md
|
||||
- 'Stages':
|
||||
- 'Init Stage': stages/init.md
|
||||
- 'Pull-Request Voting Stage': stages/prvoting.md
|
||||
- 'Build Stage': stages/build.md
|
||||
- 'Additional Unit Test Stage': stages/additionalunittests.md
|
||||
- 'Integration Stage': stages/integration.md
|
||||
- 'Acceptance Stage': stages/acceptance.md
|
||||
- 'Security Stage': stages/security.md
|
||||
- 'Performance Stage': stages/performance.md
|
||||
- 'Compliance': stages/compliance.md
|
||||
- 'Confirm Stage': stages/confirm.md
|
||||
- 'Promote Stage': stages/promote.md
|
||||
- 'Release Stage': stages/release.md
|
||||
- 'Scenarios':
|
||||
- 'Build and Deploy Hybrid Applications with Jenkins and SAP Solution Manager': scenarios/changeManagement.md
|
||||
- 'Build and Deploy SAP UI5 or SAP Fiori Applications on SAP Cloud Platform with Jenkins': scenarios/ui5-sap-cp/Readme.md
|
||||
|
7
pom.xml
7
pom.xml
@ -44,6 +44,7 @@
|
||||
<jenkins.version>2.32.3</jenkins.version>
|
||||
<pipeline.version>2.5</pipeline.version>
|
||||
<cps.global.lib.version>2.6</cps.global.lib.version>
|
||||
<java.level>8</java.level>
|
||||
</properties>
|
||||
|
||||
|
||||
@ -86,6 +87,12 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jenkins-ci.plugins</groupId>
|
||||
<artifactId>pipeline-github</artifactId>
|
||||
<version>2.2</version>
|
||||
</dependency>
|
||||
|
||||
<!--LesFurets Jenkins Pipeline Unit testing framework: https://github.com/lesfurets/JenkinsPipelineUnit-->
|
||||
<dependency>
|
||||
<groupId>com.lesfurets</groupId>
|
||||
|
@ -2,27 +2,61 @@ stages:
|
||||
Init:
|
||||
stepConditions:
|
||||
slackSendNotification:
|
||||
config: 'channel'
|
||||
configKeys:
|
||||
- 'channel'
|
||||
'Pull-Request Voting': {}
|
||||
Build: {}
|
||||
'Additional Unit Tests': {}
|
||||
'Additional Unit Tests':
|
||||
stepConditions:
|
||||
batsExecuteTests:
|
||||
filePattern: '**/*.bats'
|
||||
karmaExecuteTests:
|
||||
filePattern: '**/karma.conf.js'
|
||||
Integration: {}
|
||||
Acceptance:
|
||||
stepConditions:
|
||||
cloudFoundryDeploy:
|
||||
config: 'cfSpace'
|
||||
configKeys:
|
||||
- 'cfSpace'
|
||||
healthExecuteCheck:
|
||||
configKeys:
|
||||
- 'testServerUrl'
|
||||
newmanExecute:
|
||||
filePatternFromConfig: 'newmanCollection'
|
||||
config: 'testRepository'
|
||||
configKeys:
|
||||
- 'testRepository'
|
||||
uiVeri5ExecuteTests:
|
||||
filePattern: '**/conf.js'
|
||||
config: 'testRepository'
|
||||
Security: {}
|
||||
configKeys:
|
||||
- 'testRepository'
|
||||
Security:
|
||||
stepConditions:
|
||||
whitesourceExecuteScan:
|
||||
configKeys:
|
||||
- 'userTokenCredentialsId'
|
||||
- 'whitesource/userTokenCredentialsId'
|
||||
- 'whitesourceUserTokenCredentialsId'
|
||||
Performance: {}
|
||||
Compliance: {}
|
||||
Promote: {}
|
||||
Release: {}
|
||||
Promote:
|
||||
stepConditions:
|
||||
containerPushToRegistry:
|
||||
configKeys:
|
||||
- 'dockerRegistryUrl'
|
||||
Release:
|
||||
stepConditions:
|
||||
cloudFoundryDeploy:
|
||||
configKeys:
|
||||
- 'cfSpace'
|
||||
- 'cloudFoundry/space'
|
||||
healthExecuteCheck:
|
||||
configKeys:
|
||||
- 'testServerUrl'
|
||||
githubPublishRelease:
|
||||
configKeys:
|
||||
- 'githubTokenCredentialsId'
|
||||
'Post Actions':
|
||||
stepConditions:
|
||||
slackSendNotification:
|
||||
config: 'channel'
|
||||
configKeys:
|
||||
- 'channel'
|
||||
|
23
resources/com.sap.piper/pipeline/stageOrdinals.yml
Normal file
23
resources/com.sap.piper/pipeline/stageOrdinals.yml
Normal file
@ -0,0 +1,23 @@
|
||||
stages:
|
||||
Init:
|
||||
ordinal: 1
|
||||
'Pull-Request Voting':
|
||||
ordinal: 5
|
||||
Build:
|
||||
ordinal: 10
|
||||
'Additional Unit Tests':
|
||||
ordinal: 20
|
||||
Integration:
|
||||
ordinal: 30
|
||||
Acceptance:
|
||||
ordinal: 40
|
||||
Security:
|
||||
ordinal: 50
|
||||
Performance:
|
||||
ordinal: 60
|
||||
Compliance:
|
||||
ordinal: 70
|
||||
Promote:
|
||||
ordinal: 80
|
||||
Release:
|
||||
ordinal: 90
|
@ -5,10 +5,14 @@ Init:
|
||||
includes: "**/*"
|
||||
excludes: ".pipeline/**"
|
||||
|
||||
'Central Build':
|
||||
unstash: ['source']
|
||||
stashes: []
|
||||
|
||||
'Pull-Request Voting':
|
||||
unstash: ['source']
|
||||
stashes: []
|
||||
|
||||
'Build':
|
||||
unstash: ['source']
|
||||
stashes: []
|
||||
|
||||
'Acceptance':
|
||||
unstash: ['buildResult']
|
||||
stashes: []
|
||||
|
@ -38,7 +38,6 @@ general:
|
||||
githubServerUrl: 'https://github.com'
|
||||
gitSshKeyCredentialsId: '' #needed to allow sshagent to run with local ssh key
|
||||
jenkinsKubernetes:
|
||||
jnlpAgent: 's4sdk/jenkins-agent-k8s:latest'
|
||||
securityContext:
|
||||
# Setting security context globally is currently not working with jaas
|
||||
# runAsUser: 1000
|
||||
@ -56,7 +55,7 @@ steps:
|
||||
timestampTemplate: '%Y%m%d%H%M%S'
|
||||
tagPrefix: 'build_'
|
||||
commitVersion: true
|
||||
dlang:
|
||||
dub:
|
||||
filePath: 'dub.json'
|
||||
versioningTemplate: '${version}-${timestamp}${commitId?"+"+commitId:""}'
|
||||
docker:
|
||||
@ -354,7 +353,7 @@ steps:
|
||||
projectNames: []
|
||||
jreDownloadUrl: 'https://github.com/SAP/SapMachine/releases/download/sapmachine-11.0.2/sapmachine-jre-11.0.2_linux-x64_bin.tar.gz'
|
||||
agentFileName: 'wss-unified-agent.jar'
|
||||
agentDownloadUrl: 'https://github.com/whitesource/unified-agent-distribution/raw/master/standAlone/${config.agentFileName}'
|
||||
agentDownloadUrl: 'https://github.com/whitesource/unified-agent-distribution/releases/latest/download/${config.agentFileName}'
|
||||
agentParameters: ''
|
||||
configFilePath: './wss-unified-agent.config'
|
||||
mta:
|
||||
@ -413,17 +412,19 @@ steps:
|
||||
timeoutInSeconds: 900
|
||||
pipelineStashFilesAfterBuild:
|
||||
stashIncludes:
|
||||
checkmarx: '**/*.js, **/*.scala, **/*.py, **/*.go, **/*.xml, **/*.html'
|
||||
buildResult: '**/target/*.jar, **/*.mtar'
|
||||
checkmarx: '**/*.js, **/*.scala, **/*.py, **/*.go, **/*.d, **/*.di, **/*.xml, **/*.html'
|
||||
classFiles: '**/target/classes/**/*.class, **/target/test-classes/**/*.class'
|
||||
sonar: '**/jacoco*.exec, **/sonar-project.properties'
|
||||
stashExcludes:
|
||||
buildResult: ''
|
||||
checkmarx: '**/*.mockserver.js, node_modules/**/*.js'
|
||||
classFiles: ''
|
||||
sonar: ''
|
||||
noDefaultExludes: []
|
||||
pipelineStashFilesBeforeBuild:
|
||||
stashIncludes:
|
||||
buildDescriptor: '**/pom.xml, **/.mvn/**, **/assembly.xml, **/.swagger-codegen-ignore, **/package.json, **/requirements.txt, **/setup.py, **/mta*.y*ml, **/.npmrc, Dockerfile, .hadolint.yaml, **/VERSION, **/version.txt, **/Gopkg.*, **/build.sbt, **/sbtDescriptor.json, **/project/*'
|
||||
buildDescriptor: '**/pom.xml, **/.mvn/**, **/assembly.xml, **/.swagger-codegen-ignore, **/package.json, **/requirements.txt, **/setup.py, **/mta*.y*ml, **/.npmrc, Dockerfile, .hadolint.yaml, **/VERSION, **/version.txt, **/Gopkg.*, **/dub.json, **/dub.sdl, **/build.sbt, **/sbtDescriptor.json, **/project/*'
|
||||
deployDescriptor: '**/manifest*.y*ml, **/*.mtaext.y*ml, **/*.mtaext, **/xs-app.json, helm/**, *.y*ml'
|
||||
git: '.git/**'
|
||||
opa5: '**/*.*'
|
||||
|
@ -46,7 +46,7 @@ def getNpmGAV(file = 'package.json') {
|
||||
return result
|
||||
}
|
||||
|
||||
def getDlangGAV(file = 'dub.json') {
|
||||
def getDubGAV(file = 'dub.json') {
|
||||
def result = [:]
|
||||
def descriptor = readJSON(file: file)
|
||||
|
||||
|
57
src/com/sap/piper/DockerUtils.groovy
Normal file
57
src/com/sap/piper/DockerUtils.groovy
Normal file
@ -0,0 +1,57 @@
|
||||
package com.sap.piper
|
||||
|
||||
class DockerUtils implements Serializable {
|
||||
|
||||
private static Script script
|
||||
|
||||
DockerUtils(Script script) {
|
||||
this.script = script
|
||||
}
|
||||
|
||||
public boolean withDockerDaemon() {
|
||||
def returnCode = script.sh script: 'docker ps -q > /dev/null', returnStatus: true
|
||||
return (returnCode == 0)
|
||||
}
|
||||
|
||||
public boolean onKubernetes() {
|
||||
return (Boolean.valueOf(script.env.ON_K8S))
|
||||
}
|
||||
|
||||
public String getRegistryFromUrl(dockerRegistryUrl) {
|
||||
URL url = new URL(dockerRegistryUrl)
|
||||
return "${url.getHost()}${(url.getPort() != -1) ? ':' + url.getPort() : ''}"
|
||||
}
|
||||
|
||||
public String getProtocolFromUrl(dockerRegistryUrl) {
|
||||
URL url = new URL(dockerRegistryUrl)
|
||||
return url.getProtocol()
|
||||
|
||||
//return dockerRegistryUrl.split(/:\/\//)[0]
|
||||
}
|
||||
|
||||
public void moveImage(Map source, Map target) {
|
||||
//expects source/target in the format [image: '', registryUrl: '', credentialsId: '']
|
||||
def sourceDockerRegistry = source.registryUrl ? "${getRegistryFromUrl(source.registryUrl)}/" : ''
|
||||
def sourceImageFullName = sourceDockerRegistry + source.image
|
||||
def targetDockerRegistry = target.registryUrl ? "${getRegistryFromUrl(target.registryUrl)}/" : ''
|
||||
def targetImageFullName = targetDockerRegistry + target.image
|
||||
|
||||
if (!withDockerDaemon()) {
|
||||
script.withCredentials([script.usernamePassword(
|
||||
credentialsId: target.credentialsId,
|
||||
passwordVariable: 'password',
|
||||
usernameVariable: 'userid'
|
||||
)]) {
|
||||
skopeoMoveImage(sourceImageFullName, targetImageFullName, script.userid, script.password)
|
||||
}
|
||||
}
|
||||
//else not yet implemented here - available directly via containerPushToRegistry
|
||||
|
||||
}
|
||||
|
||||
private void skopeoMoveImage(sourceImageFullName, targetImageFullName, targetUserId, targetPassword) {
|
||||
script.sh "skopeo copy --src-tls-verify=false --dest-tls-verify=false --dest-creds=${BashUtils.quoteAndEscape(targetUserId)}:${BashUtils.quoteAndEscape(targetPassword)} docker://${sourceImageFullName} docker://${targetImageFullName}"
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -57,3 +57,20 @@ def isJobStartedByCause(Class cause) {
|
||||
}
|
||||
return startedByGivenCause
|
||||
}
|
||||
|
||||
@NonCPS
|
||||
String getIssueCommentTriggerAction() {
|
||||
try {
|
||||
def triggerCause = getRawBuild().getCause(org.jenkinsci.plugins.pipeline.github.trigger.IssueCommentCause)
|
||||
if (triggerCause) {
|
||||
//triggerPattern e.g. like '.* /piper ([a-z]*) .*'
|
||||
def matcher = triggerCause.comment =~ triggerCause.triggerPattern
|
||||
if (matcher) {
|
||||
return matcher[0][1]
|
||||
}
|
||||
}
|
||||
return null
|
||||
} catch (err) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
package com.sap.piper.k8s
|
||||
|
||||
import com.cloudbees.groovy.cps.NonCPS
|
||||
|
||||
class SystemEnv implements Serializable {
|
||||
@ -12,7 +13,8 @@ class SystemEnv implements Serializable {
|
||||
'NO_PROXY',
|
||||
'http_proxy',
|
||||
'https_proxy',
|
||||
'no_proxy'
|
||||
'no_proxy',
|
||||
'ON_K8S'
|
||||
]
|
||||
|
||||
SystemEnv() {
|
||||
|
@ -12,8 +12,8 @@ abstract class ArtifactVersioning implements Serializable {
|
||||
|
||||
public static getArtifactVersioning(buildTool, script, configuration) {
|
||||
switch (buildTool) {
|
||||
case 'dlang':
|
||||
return new DlangArtifactVersioning(script, configuration)
|
||||
case 'dub':
|
||||
return new DubArtifactVersioning(script, configuration)
|
||||
case 'docker':
|
||||
return new DockerArtifactVersioning(script, configuration)
|
||||
case 'golang':
|
||||
|
@ -1,7 +1,7 @@
|
||||
package com.sap.piper.versioning
|
||||
|
||||
class DlangArtifactVersioning extends ArtifactVersioning {
|
||||
protected DlangArtifactVersioning(script, configuration) {
|
||||
class DubArtifactVersioning extends ArtifactVersioning {
|
||||
protected DubArtifactVersioning(script, configuration) {
|
||||
super(script, configuration)
|
||||
}
|
||||
|
@ -64,8 +64,8 @@ class BatsExecuteTestsTest extends BasePiperTest {
|
||||
assertThat(dockerExecuteRule.dockerParams.dockerImage, is('node:8-stretch'))
|
||||
assertThat(dockerExecuteRule.dockerParams.dockerWorkspace, is('/home/node'))
|
||||
|
||||
assertThat(shellRule.shell, hasItem('npm install tap-xunit -g'))
|
||||
assertThat(shellRule.shell, hasItem('cat \'TEST-testPackage.tap\' | tap-xunit --package=\'testPackage\' > TEST-testPackage.xml'))
|
||||
assertThat(shellRule.shell, hasItem('NPM_CONFIG_PREFIX=~/.npm-global npm install tap-xunit -g'))
|
||||
assertThat(shellRule.shell, hasItem('cat \'TEST-testPackage.tap\' | PATH=\$PATH:~/.npm-global/bin tap-xunit --package=\'testPackage\' > TEST-testPackage.xml'))
|
||||
|
||||
assertJobStatusSuccess()
|
||||
}
|
||||
|
230
test/groovy/BuildExecuteTest.groovy
Normal file
230
test/groovy/BuildExecuteTest.groovy
Normal file
@ -0,0 +1,230 @@
|
||||
#!groovy
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.ExpectedException
|
||||
import org.junit.rules.RuleChain
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsDockerExecuteRule
|
||||
import util.JenkinsReadYamlRule
|
||||
import util.JenkinsShellCallRule
|
||||
import util.JenkinsStepRule
|
||||
import util.Rules
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString
|
||||
import static org.hamcrest.CoreMatchers.hasItem
|
||||
import static org.hamcrest.CoreMatchers.is
|
||||
import static org.hamcrest.CoreMatchers.nullValue
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
class BuildExecuteTest extends BasePiperTest {
|
||||
private ExpectedException exception = ExpectedException.none()
|
||||
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
|
||||
private JenkinsDockerExecuteRule dockerRule = new JenkinsDockerExecuteRule(this)
|
||||
private JenkinsShellCallRule shellCallRule = new JenkinsShellCallRule(this)
|
||||
|
||||
@Rule
|
||||
public RuleChain rules = Rules
|
||||
.getCommonRules(this)
|
||||
.around(new JenkinsReadYamlRule(this))
|
||||
.around(exception)
|
||||
.around(shellCallRule)
|
||||
.around(dockerRule)
|
||||
.around(stepRule)
|
||||
|
||||
def dockerMockArgs = [:]
|
||||
class DockerMock {
|
||||
DockerMock(name){
|
||||
dockerMockArgs.name = name
|
||||
}
|
||||
def build(image, options) {
|
||||
return [image: image, options: options]
|
||||
}
|
||||
}
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDefaultError() {
|
||||
exception.expectMessage(containsString('buildTool not set and no dockerImage & dockerCommand provided'))
|
||||
stepRule.step.buildExecute(
|
||||
script: nullScript,
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDefaultWithDockerImage() {
|
||||
stepRule.step.buildExecute(
|
||||
script: nullScript,
|
||||
dockerImage: 'path/to/myImage:tag',
|
||||
dockerCommand: 'myTestCommand'
|
||||
)
|
||||
assertThat(dockerRule.dockerParams.dockerImage, is('path/to/myImage:tag'))
|
||||
assertThat(shellCallRule.shell, hasItem('myTestCommand'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMaven() {
|
||||
def buildToolCalled = false
|
||||
helper.registerAllowedMethod('mavenExecute', [Map.class], {m ->
|
||||
buildToolCalled = true
|
||||
return
|
||||
})
|
||||
stepRule.step.buildExecute(
|
||||
script: nullScript,
|
||||
buildTool: 'maven',
|
||||
)
|
||||
assertThat(buildToolCalled, is(true))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMta() {
|
||||
def buildToolCalled = false
|
||||
helper.registerAllowedMethod('mtaBuild', [Map.class], {m ->
|
||||
buildToolCalled = true
|
||||
return
|
||||
})
|
||||
stepRule.step.buildExecute(
|
||||
script: nullScript,
|
||||
buildTool: 'mta',
|
||||
)
|
||||
assertThat(buildToolCalled, is(true))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testNpm() {
|
||||
def buildToolCalled = false
|
||||
helper.registerAllowedMethod('npmExecute', [Map.class], {m ->
|
||||
buildToolCalled = true
|
||||
return
|
||||
})
|
||||
stepRule.step.buildExecute(
|
||||
script: nullScript,
|
||||
buildTool: 'npm',
|
||||
)
|
||||
assertThat(buildToolCalled, is(true))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDocker() {
|
||||
binding.setVariable('docker', new DockerMock('test'))
|
||||
def pushParams= [:]
|
||||
helper.registerAllowedMethod('containerPushToRegistry', [Map.class], {m ->
|
||||
pushParams = m
|
||||
return
|
||||
})
|
||||
stepRule.step.buildExecute(
|
||||
script: nullScript,
|
||||
buildTool: 'docker',
|
||||
dockerImageName: 'path/to/myImage',
|
||||
dockerImageTag: 'myTag',
|
||||
dockerRegistryUrl: 'https://my.registry:55555'
|
||||
)
|
||||
|
||||
assertThat(pushParams.dockerBuildImage.image.toString(), is('path/to/myImage:myTag'))
|
||||
assertThat(pushParams.dockerRegistryUrl.toString(), is('https://my.registry:55555'))
|
||||
assertThat(nullScript.commonPipelineEnvironment.getValue('containerImage').toString(), is('path/to/myImage:myTag'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDockerWithEnv() {
|
||||
nullScript.commonPipelineEnvironment.setArtifactVersion('1.0.0')
|
||||
binding.setVariable('docker', new DockerMock('test'))
|
||||
def pushParams= [:]
|
||||
helper.registerAllowedMethod('containerPushToRegistry', [Map.class], {m ->
|
||||
pushParams = m
|
||||
return
|
||||
})
|
||||
stepRule.step.buildExecute(
|
||||
script: nullScript,
|
||||
buildTool: 'docker',
|
||||
dockerImageName: 'path/to/myImage',
|
||||
dockerRegistryUrl: 'https://my.registry:55555'
|
||||
)
|
||||
|
||||
assertThat(pushParams.dockerBuildImage.image.toString(), is('path/to/myImage:1.0.0'))
|
||||
assertThat(pushParams.dockerRegistryUrl.toString(), is('https://my.registry:55555'))
|
||||
assertThat(nullScript.commonPipelineEnvironment.getValue('containerImage').toString(), is('path/to/myImage:1.0.0'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDockerNoPush() {
|
||||
binding.setVariable('docker', new DockerMock('test'))
|
||||
def pushParams= [:]
|
||||
helper.registerAllowedMethod('containerPushToRegistry', [Map.class], {m ->
|
||||
pushParams = m
|
||||
return
|
||||
})
|
||||
stepRule.step.buildExecute(
|
||||
script: nullScript,
|
||||
buildTool: 'docker',
|
||||
dockerImageName: 'path/to/myImage',
|
||||
dockerImageTag: 'myTag',
|
||||
dockerRegistryUrl: ''
|
||||
)
|
||||
|
||||
assertThat(pushParams.dockerBuildImage, nullValue())
|
||||
assertThat(pushParams.dockerRegistryUrl, nullValue())
|
||||
}
|
||||
|
||||
@Test
|
||||
void testKaniko() {
|
||||
def kanikoParams = [:]
|
||||
helper.registerAllowedMethod('kanikoExecute', [Map.class], {m ->
|
||||
kanikoParams = m
|
||||
return
|
||||
})
|
||||
|
||||
stepRule.step.buildExecute(
|
||||
script: nullScript,
|
||||
buildTool: 'kaniko',
|
||||
dockerImageName: 'path/to/myImage',
|
||||
dockerImageTag: 'myTag',
|
||||
dockerRegistryUrl: 'https://my.registry:55555'
|
||||
)
|
||||
|
||||
assertThat(kanikoParams.containerImageNameAndTag.toString(), is('my.registry:55555/path/to/myImage:myTag'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testKanikoNoPush() {
|
||||
def kanikoParams = [:]
|
||||
helper.registerAllowedMethod('kanikoExecute', [Map.class], {m ->
|
||||
kanikoParams = m
|
||||
return
|
||||
})
|
||||
|
||||
stepRule.step.buildExecute(
|
||||
script: nullScript,
|
||||
buildTool: 'kaniko',
|
||||
dockerImageName: 'path/to/myImage',
|
||||
dockerImageTag: 'myTag',
|
||||
dockerRegistryUrl: ''
|
||||
)
|
||||
|
||||
assertThat(kanikoParams.containerImageNameAndTag, is(''))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSwitchToKaniko() {
|
||||
shellCallRule.setReturnValue('docker ps -q > /dev/null', 1)
|
||||
def kanikoParams = [:]
|
||||
helper.registerAllowedMethod('kanikoExecute', [Map.class], {m ->
|
||||
kanikoParams = m
|
||||
return
|
||||
})
|
||||
|
||||
stepRule.step.buildExecute(
|
||||
script: nullScript,
|
||||
buildTool: 'kaniko',
|
||||
dockerImageName: 'path/to/myImage',
|
||||
dockerImageTag: 'myTag',
|
||||
dockerRegistryUrl: 'https://my.registry:55555'
|
||||
)
|
||||
|
||||
assertThat(kanikoParams.containerImageNameAndTag.toString(), is('my.registry:55555/path/to/myImage:myTag'))
|
||||
}
|
||||
|
||||
}
|
@ -1,3 +1,4 @@
|
||||
import org.junit.After
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
@ -43,6 +44,11 @@ class ContainerExecuteStructureTestsTest extends BasePiperTest {
|
||||
})
|
||||
}
|
||||
|
||||
@After
|
||||
void cleanup() {
|
||||
nullScript.env = [ON_K8S: null]
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExecuteContainterStructureTestsDefault() throws Exception {
|
||||
helper.registerAllowedMethod('readFile', [String.class], {s ->
|
||||
|
264
test/groovy/ContainerPushToRegistryTest.groovy
Normal file
264
test/groovy/ContainerPushToRegistryTest.groovy
Normal file
@ -0,0 +1,264 @@
|
||||
#!groovy
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.ExpectedException
|
||||
import org.junit.rules.RuleChain
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsCredentialsRule
|
||||
import util.JenkinsDockerExecuteRule
|
||||
import util.JenkinsReadYamlRule
|
||||
import util.JenkinsShellCallRule
|
||||
import util.JenkinsStepRule
|
||||
import util.Rules
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString
|
||||
import static org.hamcrest.CoreMatchers.hasItem
|
||||
import static org.hamcrest.CoreMatchers.is
|
||||
import static org.hamcrest.CoreMatchers.not
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
class ContainerPushToRegistryTest extends BasePiperTest {
|
||||
private ExpectedException exception = ExpectedException.none()
|
||||
private JenkinsStepRule stepRule = new JenkinsStepRule(this)
|
||||
private JenkinsDockerExecuteRule dockerRule = new JenkinsDockerExecuteRule(this)
|
||||
private JenkinsShellCallRule shellCallRule = new JenkinsShellCallRule(this)
|
||||
|
||||
@Rule
|
||||
public RuleChain rules = Rules
|
||||
.getCommonRules(this)
|
||||
.around(new JenkinsReadYamlRule(this))
|
||||
.around(exception)
|
||||
.around(shellCallRule)
|
||||
.around(dockerRule)
|
||||
.around(new JenkinsCredentialsRule(this)
|
||||
.withCredentials('testCredentialsId', 'registryUser', '********')
|
||||
)
|
||||
.around(stepRule)
|
||||
|
||||
def dockerMockArgs = [:]
|
||||
class DockerMock {
|
||||
DockerMock(name){
|
||||
dockerMockArgs.name = name
|
||||
}
|
||||
def withRegistry(paramRegistry, paramCredentials, paramClosure){
|
||||
dockerMockArgs.paramRegistry = paramRegistry
|
||||
dockerMockArgs.paramCredentials = paramCredentials
|
||||
return paramClosure()
|
||||
}
|
||||
def withRegistry(paramRegistry, paramClosure){
|
||||
dockerMockArgs.paramRegistryAnonymous = paramRegistry.toString()
|
||||
return paramClosure()
|
||||
}
|
||||
|
||||
def image(name) {
|
||||
dockerMockArgs.name = name
|
||||
return new ContainerImageMock()
|
||||
}
|
||||
}
|
||||
|
||||
def dockerMockPushes = []
|
||||
def dockerMockPull = false
|
||||
class ContainerImageMock {
|
||||
ContainerImageMock(){}
|
||||
def push(tag){
|
||||
dockerMockPushes.add(tag)
|
||||
}
|
||||
def push(){
|
||||
push('default')
|
||||
}
|
||||
|
||||
def pull(){
|
||||
dockerMockPull = true
|
||||
}
|
||||
}
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
binding.setVariable('docker', new DockerMock('test'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testNoImageProvided() {
|
||||
exception.expectMessage(containsString('Please provide a dockerImage'))
|
||||
stepRule.step.containerPushToRegistry(
|
||||
script: nullScript,
|
||||
dockerRegistryUrl: 'https://testRegistry',
|
||||
dockerCredentialsId: 'testCredentialsId',
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDefault() {
|
||||
stepRule.step.containerPushToRegistry(
|
||||
script: nullScript,
|
||||
dockerRegistryUrl: 'https://testRegistry',
|
||||
dockerCredentialsId: 'testCredentialsId',
|
||||
dockerImage: 'testImage:tag',
|
||||
)
|
||||
|
||||
assertThat(dockerMockArgs.paramRegistry, is('https://testRegistry'))
|
||||
assertThat(dockerMockArgs.paramCredentials, is('testCredentialsId'))
|
||||
assertThat(dockerMockArgs.name, is('testImage:tag'))
|
||||
assertThat(dockerMockPushes, hasItem('default'))
|
||||
assertThat(dockerMockPushes, not(hasItem('latest')))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testBuildImagePushLatest() {
|
||||
def dockerBuildImage = new ContainerImageMock()
|
||||
stepRule.step.containerPushToRegistry(
|
||||
script: nullScript,
|
||||
dockerRegistryUrl: 'https://testRegistry',
|
||||
dockerCredentialsId: 'testCredentialsId',
|
||||
dockerBuildImage: dockerBuildImage,
|
||||
tagLatest: true
|
||||
)
|
||||
|
||||
assertThat(dockerMockArgs.paramRegistry, is('https://testRegistry'))
|
||||
assertThat(dockerMockArgs.paramCredentials, is('testCredentialsId'))
|
||||
assertThat(dockerMockArgs.paramRegistryAnonymous, is(null))
|
||||
assertThat(dockerMockArgs.name, is('test'))
|
||||
assertThat(dockerMockPushes, hasItem('default'))
|
||||
assertThat(dockerMockPushes, hasItem('latest'))
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
void testFromEnv() {
|
||||
nullScript.commonPipelineEnvironment.setValue('containerImage', 'path/testImage:tag')
|
||||
nullScript.commonPipelineEnvironment.setValue('containerRegistryUrl', 'https://testRegistry:55555')
|
||||
|
||||
stepRule.step.containerPushToRegistry(
|
||||
script: nullScript,
|
||||
dockerRegistryUrl: 'https://testRegistry',
|
||||
dockerCredentialsId: 'testCredentialsId',
|
||||
)
|
||||
|
||||
assertThat(dockerMockArgs.paramRegistryAnonymous, is('https://testRegistry:55555'))
|
||||
assertThat(dockerMockArgs.name, is('path/testImage:tag'))
|
||||
assertThat(shellCallRule.shell, hasItem('docker tag testRegistry:55555/path/testImage:tag path/testImage:tag'))
|
||||
assertThat(dockerMockPull, is(true))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testWithSourceImageAndRegistry() {
|
||||
stepRule.step.containerPushToRegistry(
|
||||
script: nullScript,
|
||||
dockerCredentialsId: 'testCredentialsId',
|
||||
dockerRegistryUrl: 'https://testRegistry',
|
||||
sourceImage: 'testSourceName:testSourceTag',
|
||||
sourceRegistryUrl: 'http://testSourceRegistry'
|
||||
)
|
||||
|
||||
assertThat(dockerMockArgs.paramRegistryAnonymous, is('http://testSourceRegistry'))
|
||||
assertThat(dockerMockArgs.name, is('testSourceName:testSourceTag'))
|
||||
assertThat(shellCallRule.shell, hasItem('docker tag testSourceRegistry/testSourceName:testSourceTag testSourceName:testSourceTag'))
|
||||
assertThat(dockerMockPull, is(true))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testWithSourceAndTarget() {
|
||||
stepRule.step.containerPushToRegistry(
|
||||
script: nullScript,
|
||||
dockerCredentialsId: 'testCredentialsId',
|
||||
dockerImage: 'testImage:tag',
|
||||
dockerRegistryUrl: 'https://testRegistry',
|
||||
sourceImage: 'testSourceName:testSourceTag',
|
||||
sourceRegistryUrl: 'http://testSourceRegistry'
|
||||
)
|
||||
|
||||
assertThat(dockerMockArgs.paramRegistryAnonymous, is('http://testSourceRegistry'))
|
||||
assertThat(dockerMockArgs.name, is('testSourceName:testSourceTag'))
|
||||
assertThat(shellCallRule.shell, hasItem('docker tag testSourceRegistry/testSourceName:testSourceTag testImage:tag'))
|
||||
assertThat(dockerMockPull, is(true))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testKubernetesMove() {
|
||||
binding.setVariable('docker', null)
|
||||
shellCallRule.setReturnValue('docker ps -q > /dev/null', 1)
|
||||
|
||||
stepRule.step.containerPushToRegistry(
|
||||
script: nullScript,
|
||||
dockerCredentialsId: 'testCredentialsId',
|
||||
dockerImage: 'testImage:tag',
|
||||
dockerRegistryUrl: 'https://my.registry:55555',
|
||||
skopeoImage: 'skopeo:latest',
|
||||
sourceImage: 'sourceImage:sourceTag',
|
||||
sourceRegistryUrl: 'https://my.source.registry:44444'
|
||||
)
|
||||
|
||||
assertThat(shellCallRule.shell, hasItem('skopeo copy --src-tls-verify=false --dest-tls-verify=false --dest-creds=\'registryUser\':\'********\' docker://my.source.registry:44444/sourceImage:sourceTag docker://my.registry:55555/testImage:tag'))
|
||||
assertThat(dockerRule.dockerParams.dockerImage, is('skopeo:latest'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testKubernetesMoveTagLatest() {
|
||||
binding.setVariable('docker', null)
|
||||
shellCallRule.setReturnValue('docker ps -q > /dev/null', 1)
|
||||
|
||||
stepRule.step.containerPushToRegistry(
|
||||
script: nullScript,
|
||||
dockerCredentialsId: 'testCredentialsId',
|
||||
dockerImage: 'testImage:tag',
|
||||
dockerRegistryUrl: 'https://my.registry:55555',
|
||||
sourceImage: 'sourceImage:sourceTag',
|
||||
sourceRegistryUrl: 'https://my.source.registry:44444',
|
||||
tagLatest: true
|
||||
)
|
||||
|
||||
assertThat(shellCallRule.shell, hasItem('skopeo copy --src-tls-verify=false --dest-tls-verify=false --dest-creds=\'registryUser\':\'********\' docker://my.source.registry:44444/sourceImage:sourceTag docker://my.registry:55555/testImage:tag'))
|
||||
assertThat(shellCallRule.shell, hasItem('skopeo copy --src-tls-verify=false --dest-tls-verify=false --dest-creds=\'registryUser\':\'********\' docker://my.source.registry:44444/sourceImage:sourceTag docker://my.registry:55555/testImage:latest'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testKubernetesSourceOnly() {
|
||||
binding.setVariable('docker', null)
|
||||
shellCallRule.setReturnValue('docker ps -q > /dev/null', 1)
|
||||
|
||||
stepRule.step.containerPushToRegistry(
|
||||
script: nullScript,
|
||||
dockerCredentialsId: 'testCredentialsId',
|
||||
dockerRegistryUrl: 'https://my.registry:55555',
|
||||
sourceImage: 'sourceImage:sourceTag',
|
||||
sourceRegistryUrl: 'https://my.source.registry:44444'
|
||||
)
|
||||
|
||||
assertThat(shellCallRule.shell, hasItem('skopeo copy --src-tls-verify=false --dest-tls-verify=false --dest-creds=\'registryUser\':\'********\' docker://my.source.registry:44444/sourceImage:sourceTag docker://my.registry:55555/sourceImage:sourceTag'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testKubernetesSourceRegistryFromEnv() {
|
||||
binding.setVariable('docker', null)
|
||||
shellCallRule.setReturnValue('docker ps -q > /dev/null', 1)
|
||||
|
||||
nullScript.commonPipelineEnvironment.setValue('containerImage', 'sourceImage:sourceTag')
|
||||
nullScript.commonPipelineEnvironment.setValue('containerRegistryUrl', 'https://my.source.registry:44444')
|
||||
|
||||
stepRule.step.containerPushToRegistry(
|
||||
script: nullScript,
|
||||
dockerCredentialsId: 'testCredentialsId',
|
||||
dockerRegistryUrl: 'https://my.registry:55555',
|
||||
sourceImage: 'sourceImage:sourceTag',
|
||||
)
|
||||
|
||||
assertThat(shellCallRule.shell, hasItem('skopeo copy --src-tls-verify=false --dest-tls-verify=false --dest-creds=\'registryUser\':\'********\' docker://my.source.registry:44444/sourceImage:sourceTag docker://my.registry:55555/sourceImage:sourceTag'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testKubernetesPushTar() {
|
||||
binding.setVariable('docker', null)
|
||||
shellCallRule.setReturnValue('docker ps -q > /dev/null', 1)
|
||||
|
||||
exception.expectMessage('Only moving images')
|
||||
stepRule.step.containerPushToRegistry(
|
||||
script: nullScript,
|
||||
dockerCredentialsId: 'testCredentialsId',
|
||||
dockerArchive: 'myImage.tar',
|
||||
dockerImage: 'testImage:tag',
|
||||
dockerRegistryUrl: 'https://my.registry:55555',
|
||||
)
|
||||
}
|
||||
}
|
@ -261,12 +261,10 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
|
||||
assertThat(containerName, is('mavenexecute'))
|
||||
|
||||
assertThat(containersList, allOf(
|
||||
hasItem('jnlp'),
|
||||
hasItem('mavenexecute'),
|
||||
hasItem('selenium'),
|
||||
))
|
||||
assertThat(imageList, allOf(
|
||||
hasItem('s4sdk/jenkins-agent-k8s:latest'),
|
||||
hasItem('maven:3.5-jdk-8-alpine'),
|
||||
hasItem('selenium/standalone-chrome'),
|
||||
))
|
||||
@ -388,6 +386,54 @@ class DockerExecuteOnKubernetesTest extends BasePiperTest {
|
||||
assertThat(podNodeSelector, is('size:big'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDockerExecuteOnKubernetesCustomJnlpViaEnv() {
|
||||
|
||||
nullScript.configuration = [
|
||||
general: [jenkinsKubernetes: [jnlpAgent: 'config/jnlp:latest']]
|
||||
]
|
||||
binding.variables.env.JENKINS_JNLP_IMAGE = 'env/jnlp:latest'
|
||||
stepRule.step.dockerExecuteOnKubernetes(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
dockerImage: 'maven:3.5-jdk-8-alpine',
|
||||
) { bodyExecuted = true }
|
||||
assertTrue(bodyExecuted)
|
||||
|
||||
assertThat(containersList, allOf(
|
||||
hasItem('jnlp'),
|
||||
hasItem('container-exec')
|
||||
))
|
||||
assertThat(imageList, allOf(
|
||||
hasItem('env/jnlp:latest'),
|
||||
hasItem('maven:3.5-jdk-8-alpine'),
|
||||
))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDockerExecuteOnKubernetesCustomJnlpViaConfig() {
|
||||
|
||||
nullScript.configuration = [
|
||||
general: [jenkinsKubernetes: [jnlpAgent: 'config/jnlp:latest']]
|
||||
]
|
||||
binding.variables.env.JENKINS_JNLP_IMAGE = 'config/jnlp:latest'
|
||||
stepRule.step.dockerExecuteOnKubernetes(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
dockerImage: 'maven:3.5-jdk-8-alpine',
|
||||
) { bodyExecuted = true }
|
||||
assertTrue(bodyExecuted)
|
||||
|
||||
assertThat(containersList, allOf(
|
||||
hasItem('jnlp'),
|
||||
hasItem('container-exec')
|
||||
))
|
||||
assertThat(imageList, allOf(
|
||||
hasItem('config/jnlp:latest'),
|
||||
hasItem('maven:3.5-jdk-8-alpine'),
|
||||
))
|
||||
}
|
||||
|
||||
|
||||
private container(options, body) {
|
||||
containerName = options.name
|
||||
|
@ -33,8 +33,13 @@ class GithubPublishReleaseTest extends BasePiperTest {
|
||||
def data
|
||||
def requestList = []
|
||||
|
||||
def responseStatusLatestRelease
|
||||
|
||||
@Before
|
||||
void init() throws Exception {
|
||||
|
||||
responseStatusLatestRelease = 200
|
||||
|
||||
// register Jenkins commands with mock values
|
||||
helper.registerAllowedMethod( "deleteDir", [], null )
|
||||
helper.registerAllowedMethod("httpRequest", [], null)
|
||||
@ -57,12 +62,9 @@ class GithubPublishReleaseTest extends BasePiperTest {
|
||||
def responseRelease = '{"url":"https://api.github.com/SAP/jenkins-library/releases/27149","assets_url":"https://api.github.com/SAP/jenkins-library/releases/27149/assets","upload_url":"https://github.com/api/uploads/repos/ContinuousDelivery/piper-library/releases/27149/assets{?name,label}","html_url":"https://github.com/ContinuousDelivery/piper-library/releases/tag/test","id":27149,"tag_name":"test","target_commitish":"master","name":"v1.0.0","draft":false,"author":{"login":"XTEST2","id":6991,"avatar_url":"https://github.com/avatars/u/6991?","gravatar_id":"","url":"https://api.github.com/users/XTEST2","html_url":"https://github.com/XTEST2","followers_url":"https://api.github.com/users/XTEST2/followers","following_url":"https://api.github.com/users/XTEST2/following{/other_user}","gists_url":"https://api.github.com/users/XTEST2/gists{/gist_id}","starred_url":"https://api.github.com/users/XTEST2/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/XTEST2/subscriptions","organizations_url":"https://api.github.com/users/XTEST2/orgs","repos_url":"https://api.github.com/users/XTEST2/repos","events_url":"https://api.github.com/users/XTEST2/events{/privacy}","received_events_url":"https://api.github.com/users/XTEST2/received_events","type":"User","site_admin":false},"prerelease":false,"created_at":"2018-04-18T11:00:17Z","published_at":"2018-04-18T11:32:34Z","assets":[],"tarball_url":"https://api.github.com/SAP/jenkins-library/tarball/test","zipball_url":"https://api.github.com/SAP/jenkins-library/zipball/test","body":"Description of the release"}'
|
||||
|
||||
helper.registerAllowedMethod("httpRequest", [String.class], { s ->
|
||||
def result = [status: 404]
|
||||
def result = [:]
|
||||
requestList.push(s.toString())
|
||||
if(s.contains('/releases/latest?')) {
|
||||
result.content = responseLatestRelease
|
||||
result.status = 200
|
||||
} else if(s.contains('/issues?')) {
|
||||
if(s.contains('/issues?')) {
|
||||
result.content = responseIssues
|
||||
result.status = 200
|
||||
}
|
||||
@ -70,12 +72,16 @@ class GithubPublishReleaseTest extends BasePiperTest {
|
||||
})
|
||||
helper.registerAllowedMethod("httpRequest", [Map.class], { m ->
|
||||
def result = ''
|
||||
def status = 200
|
||||
requestList.push(m?.url?.toString())
|
||||
if(m?.url?.contains('/releases?')){
|
||||
data = new JsonSlurperClassic().parseText(m?.requestBody?.toString())
|
||||
result = responseRelease
|
||||
} else if(m.url.contains('/releases/latest?')) {
|
||||
result = responseLatestRelease
|
||||
status = responseStatusLatestRelease
|
||||
}
|
||||
return [content: result]
|
||||
return [content: result, status: status]
|
||||
})
|
||||
}
|
||||
|
||||
@ -146,6 +152,21 @@ class GithubPublishReleaseTest extends BasePiperTest {
|
||||
assertJobStatusSuccess()
|
||||
}
|
||||
|
||||
@Test
|
||||
void testNoReleaseYet() {
|
||||
responseStatusLatestRelease = 404
|
||||
|
||||
stepRule.step.githubPublishRelease(
|
||||
script: nullScript,
|
||||
githubOrg: 'TestOrg',
|
||||
githubRepo: 'TestRepo',
|
||||
githubTokenCredentialsId: 'TestCredentials',
|
||||
version: '1.2.3'
|
||||
)
|
||||
|
||||
assertThat(loggingRule.log, containsString('This is the first release - no previous releases available'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExcludeLabels() throws Exception {
|
||||
stepRule.step.githubPublishRelease(
|
||||
|
@ -1,121 +0,0 @@
|
||||
package stages
|
||||
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.ExpectedException
|
||||
import org.junit.rules.RuleChain
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsLoggingRule
|
||||
import util.JenkinsReadYamlRule
|
||||
import util.JenkinsStepRule
|
||||
import util.Rules
|
||||
|
||||
import static org.hamcrest.Matchers.*
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
class PiperPipelineStageInitTest extends BasePiperTest {
|
||||
private JenkinsStepRule jsr = new JenkinsStepRule(this)
|
||||
private JenkinsLoggingRule jlr = new JenkinsLoggingRule(this)
|
||||
private ExpectedException thrown = ExpectedException.none()
|
||||
|
||||
@Rule
|
||||
public RuleChain rules = Rules
|
||||
.getCommonRules(this)
|
||||
.around(new JenkinsReadYamlRule(this))
|
||||
.around(thrown)
|
||||
.around(jlr)
|
||||
.around(jsr)
|
||||
|
||||
private List stepsCalled = []
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
binding.variables.env.STAGE_NAME = 'Init'
|
||||
binding.setVariable('scm', {})
|
||||
|
||||
helper.registerAllowedMethod('deleteDir', [], null)
|
||||
helper.registerAllowedMethod("findFiles", [Map.class], { map ->
|
||||
switch (map.glob) {
|
||||
case 'pom.xml':
|
||||
return [new File('pom.xml')].toArray()
|
||||
default:
|
||||
return [].toArray()
|
||||
}
|
||||
})
|
||||
helper.registerAllowedMethod('piperStageWrapper', [Map.class, Closure.class], {m, body ->
|
||||
assertThat(m.stageName, is('Init'))
|
||||
return body()
|
||||
})
|
||||
helper.registerAllowedMethod('checkout', [Closure.class], {c ->
|
||||
stepsCalled.add('checkout')
|
||||
return [
|
||||
GIT_COMMIT: 'abcdef12345',
|
||||
GIT_URL: 'some.url'
|
||||
]
|
||||
})
|
||||
helper.registerAllowedMethod('setupCommonPipelineEnvironment', [Map.class], {m -> stepsCalled.add('setupCommonPipelineEnvironment')})
|
||||
helper.registerAllowedMethod('piperInitRunStageConfiguration', [Map.class], {m -> stepsCalled.add('piperInitRunStageConfiguration')})
|
||||
helper.registerAllowedMethod('slackSendNotification', [Map.class], {m -> stepsCalled.add('slackSendNotification')})
|
||||
helper.registerAllowedMethod('artifactSetVersion', [Map.class], {m -> stepsCalled.add('artifactSetVersion')})
|
||||
helper.registerAllowedMethod('pipelineStashFilesBeforeBuild', [Map.class], {m -> stepsCalled.add('pipelineStashFilesBeforeBuild')})
|
||||
}
|
||||
|
||||
@Test
|
||||
void testInitNoBuildTool() {
|
||||
thrown.expectMessage('ERROR - NO VALUE AVAILABLE FOR buildTool')
|
||||
jsr.step.piperPipelineStageInit(script: nullScript, juStabUtils: utils)
|
||||
}
|
||||
|
||||
@Test
|
||||
void testInitBuildToolDoesNotMatchProject() {
|
||||
thrown.expect(hudson.AbortException)
|
||||
thrown.expectMessage(containsString("buildTool configuration 'npm' does not fit to your project"))
|
||||
jsr.step.piperPipelineStageInit(script: nullScript, juStabUtils: utils, buildTool: 'npm')
|
||||
}
|
||||
|
||||
@Test
|
||||
void testInitDefault() {
|
||||
jsr.step.piperPipelineStageInit(script: nullScript, juStabUtils: utils, buildTool: 'maven')
|
||||
|
||||
assertThat(stepsCalled, hasItems(
|
||||
'checkout',
|
||||
'setupCommonPipelineEnvironment',
|
||||
'piperInitRunStageConfiguration',
|
||||
'artifactSetVersion',
|
||||
'pipelineStashFilesBeforeBuild'
|
||||
))
|
||||
assertThat(stepsCalled, not(hasItems('slackSendNotification')))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testInitNotOnProductiveBranch() {
|
||||
binding.variables.env.BRANCH_NAME = 'anyOtherBranch'
|
||||
|
||||
jsr.step.piperPipelineStageInit(script: nullScript, juStabUtils: utils, buildTool: 'maven')
|
||||
|
||||
assertThat(stepsCalled, hasItems(
|
||||
'checkout',
|
||||
'setupCommonPipelineEnvironment',
|
||||
'piperInitRunStageConfiguration',
|
||||
'pipelineStashFilesBeforeBuild'
|
||||
))
|
||||
assertThat(stepsCalled, not(hasItems('artifactSetVersion')))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testInitWithSlackNotification() {
|
||||
nullScript.commonPipelineEnvironment.configuration = [runStep: [Init: [slackSendNotification: true]]]
|
||||
|
||||
jsr.step.piperPipelineStageInit(script: nullScript, juStabUtils: utils, buildTool: 'maven')
|
||||
|
||||
assertThat(stepsCalled, hasItems(
|
||||
'checkout',
|
||||
'setupCommonPipelineEnvironment',
|
||||
'piperInitRunStageConfiguration',
|
||||
'artifactSetVersion',
|
||||
'slackSendNotification',
|
||||
'pipelineStashFilesBeforeBuild'
|
||||
))
|
||||
}
|
||||
}
|
@ -138,7 +138,7 @@ class WhitesourceExecuteScanTest extends BasePiperTest {
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('stashContent', ['buildDescriptor', 'opensourceConfiguration', 'modified whitesource config d3aa80454919391024374ba46b4df082d15ab9a3']))
|
||||
|
||||
assertThat(shellRule.shell, Matchers.hasItems(
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/raw/master/standAlone/wss-unified-agent.jar'),
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/releases/latest/download/wss-unified-agent.jar'),
|
||||
is('./bin/java -jar wss-unified-agent.jar -c \'./wss-unified-agent.config.d3aa80454919391024374ba46b4df082d15ab9a3\' -apiKey \'testOrgToken\' -userKey \'token-0815\' -product \'testProduct\'')
|
||||
))
|
||||
|
||||
@ -182,7 +182,7 @@ class WhitesourceExecuteScanTest extends BasePiperTest {
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('dockerWorkspace', '/home/node'))
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('stashContent', ['buildDescriptor', 'opensourceConfiguration', 'modified whitesource config d3aa80454919391024374ba46b4df082d15ab9a3']))
|
||||
assertThat(shellRule.shell, Matchers.hasItems(
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/raw/master/standAlone/wss-unified-agent.jar'),
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/releases/latest/download/wss-unified-agent.jar'),
|
||||
is('curl --location --output jvm.tar.gz https://github.com/SAP/SapMachine/releases/download/sapmachine-11.0.2/sapmachine-jre-11.0.2_linux-x64_bin.tar.gz && tar --strip-components=1 -xzf jvm.tar.gz'),
|
||||
is('./bin/java -jar wss-unified-agent.jar -c \'./wss-unified-agent.config.d3aa80454919391024374ba46b4df082d15ab9a3\' -apiKey \'testOrgToken\' -userKey \'token-0815\' -product \'testProductName\'')
|
||||
))
|
||||
@ -223,7 +223,7 @@ class WhitesourceExecuteScanTest extends BasePiperTest {
|
||||
])
|
||||
|
||||
assertThat(shellRule.shell, Matchers.hasItems(
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/raw/master/standAlone/wss-unified-agent.jar'),
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/releases/latest/download/wss-unified-agent.jar'),
|
||||
is('curl --location --output jvm.tar.gz https://github.com/SAP/SapMachine/releases/download/sapmachine-11.0.2/sapmachine-jre-11.0.2_linux-x64_bin.tar.gz && tar --strip-components=1 -xzf jvm.tar.gz'),
|
||||
is('./bin/java -jar wss-unified-agent.jar -c \'./../../testConfigPath.2766cacc0cf1449dd4034385f4a9f0a6fdb755cf\' -apiKey \'b39d1328-52e2-42e3-98f0-932709daf3f0\' -userKey \'token-0815\' -product \'SHC - Piper\'')
|
||||
))
|
||||
@ -262,7 +262,7 @@ class WhitesourceExecuteScanTest extends BasePiperTest {
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('stashContent', ['buildDescriptor', 'opensourceConfiguration', 'modified whitesource config d3aa80454919391024374ba46b4df082d15ab9a3']))
|
||||
|
||||
assertThat(shellRule.shell, Matchers.hasItems(
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/raw/master/standAlone/wss-unified-agent.jar'),
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/releases/latest/download/wss-unified-agent.jar'),
|
||||
is('curl --location --output jvm.tar.gz https://github.com/SAP/SapMachine/releases/download/sapmachine-11.0.2/sapmachine-jre-11.0.2_linux-x64_bin.tar.gz && tar --strip-components=1 -xzf jvm.tar.gz'),
|
||||
is('./bin/java -jar wss-unified-agent.jar -c \'./wss-unified-agent.config.d3aa80454919391024374ba46b4df082d15ab9a3\' -apiKey \'testOrgToken\' -userKey \'token-0815\' -product \'testProductName\'')
|
||||
))
|
||||
@ -308,7 +308,7 @@ class WhitesourceExecuteScanTest extends BasePiperTest {
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('stashContent', ['buildDescriptor', 'opensourceConfiguration', 'modified whitesource config d3aa80454919391024374ba46b4df082d15ab9a3']))
|
||||
|
||||
assertThat(shellRule.shell, Matchers.hasItems(
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/raw/master/standAlone/wss-unified-agent.jar'),
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/releases/latest/download/wss-unified-agent.jar'),
|
||||
is('curl --location --output jvm.tar.gz https://github.com/SAP/SapMachine/releases/download/sapmachine-11.0.2/sapmachine-jre-11.0.2_linux-x64_bin.tar.gz && tar --strip-components=1 -xzf jvm.tar.gz'),
|
||||
is('./bin/java -jar wss-unified-agent.jar -c \'./wss-unified-agent.config.d3aa80454919391024374ba46b4df082d15ab9a3\' -apiKey \'testOrgToken\' -userKey \'token-0815\' -product \'testProductName\'')
|
||||
))
|
||||
@ -393,7 +393,7 @@ class WhitesourceExecuteScanTest extends BasePiperTest {
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('stashContent', ['buildDescriptor', 'opensourceConfiguration', 'modified whitesource config d3aa80454919391024374ba46b4df082d15ab9a3']))
|
||||
|
||||
assertThat(shellRule.shell, Matchers.hasItems(
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/raw/master/standAlone/wss-unified-agent.jar'),
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/releases/latest/download/wss-unified-agent.jar'),
|
||||
is('./bin/java -jar wss-unified-agent.jar -c \'./wss-unified-agent.config.d3aa80454919391024374ba46b4df082d15ab9a3\' -apiKey \'testOrgToken\' -userKey \'token-0815\' -product \'testProductName\'')
|
||||
))
|
||||
|
||||
@ -451,7 +451,7 @@ class WhitesourceExecuteScanTest extends BasePiperTest {
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('stashContent', ['buildDescriptor', 'opensourceConfiguration', 'checkmarx', 'modified whitesource config 7d1c90ed46c66061fc8ea45dd96e209bf767f038']))
|
||||
|
||||
assertThat(shellRule.shell, Matchers.hasItems(
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/raw/master/standAlone/wss-unified-agent.jar'),
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/releases/latest/download/wss-unified-agent.jar'),
|
||||
is('./bin/java -jar wss-unified-agent.jar -c \'./myProject/wss-unified-agent.config.7d1c90ed46c66061fc8ea45dd96e209bf767f038\' -apiKey \'testOrgToken\' -userKey \'token-0815\' -product \'testProductName\'')
|
||||
))
|
||||
|
||||
@ -508,7 +508,7 @@ class WhitesourceExecuteScanTest extends BasePiperTest {
|
||||
assertThat(dockerExecuteRule.dockerParams, hasEntry('stashContent', ['buildDescriptor', 'opensourceConfiguration', 'checkmarx', 'modified whitesource config d3aa80454919391024374ba46b4df082d15ab9a3']))
|
||||
|
||||
assertThat(shellRule.shell, Matchers.hasItems(
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/raw/master/standAlone/wss-unified-agent.jar'),
|
||||
is('curl --location --output wss-unified-agent.jar https://github.com/whitesource/unified-agent-distribution/releases/latest/download/wss-unified-agent.jar'),
|
||||
is('./bin/java -jar wss-unified-agent.jar -c \'./wss-unified-agent.config.d3aa80454919391024374ba46b4df082d15ab9a3\' -apiKey \'testOrgToken\' -userKey \'token-0815\' -product \'testProductName\'')
|
||||
))
|
||||
|
||||
|
@ -111,15 +111,15 @@ class DescriptorUtilsTest extends BasePiperTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetDlangGAV() {
|
||||
void testGetDubGAV() {
|
||||
|
||||
helper.registerAllowedMethod("readJSON", [Map.class], {
|
||||
searchConfig ->
|
||||
def packageJsonFile = new File("test/resources/DescriptorUtils/dlang/${searchConfig.file}")
|
||||
def packageJsonFile = new File("test/resources/DescriptorUtils/dub/${searchConfig.file}")
|
||||
return new JsonUtils().jsonStringToGroovyObject(packageJsonFile.text)
|
||||
})
|
||||
|
||||
def gav = descriptorUtils.getDlangGAV('dub.json')
|
||||
def gav = descriptorUtils.getDubGAV('dub.json')
|
||||
|
||||
assertEquals(gav.group, 'com.sap.dlang')
|
||||
assertEquals(gav.artifact, 'hdi-deploy')
|
||||
|
97
test/groovy/com/sap/piper/DockerUtilsTest.groovy
Normal file
97
test/groovy/com/sap/piper/DockerUtilsTest.groovy
Normal file
@ -0,0 +1,97 @@
|
||||
package com.sap.piper
|
||||
|
||||
import hudson.AbortException
|
||||
import org.junit.Before
|
||||
import org.junit.Ignore
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.ExpectedException
|
||||
import org.junit.rules.RuleChain
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsCredentialsRule
|
||||
import util.JenkinsShellCallRule
|
||||
import util.Rules
|
||||
|
||||
import static org.hamcrest.CoreMatchers.hasItem
|
||||
import static org.hamcrest.CoreMatchers.is
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
class DockerUtilsTest extends BasePiperTest {
|
||||
|
||||
public ExpectedException exception = ExpectedException.none()
|
||||
public JenkinsShellCallRule shellCallRule = new JenkinsShellCallRule(this)
|
||||
|
||||
def dockerMockArgs = [:]
|
||||
class DockerMock {
|
||||
def withRegistry(paramRegistry, paramClosure){
|
||||
dockerMockArgs.paramRegistryAnonymous = paramRegistry.toString()
|
||||
return paramClosure()
|
||||
}
|
||||
}
|
||||
|
||||
@Rule
|
||||
public RuleChain ruleChain = Rules.getCommonRules(this)
|
||||
.around(shellCallRule)
|
||||
.around(exception)
|
||||
.around(new JenkinsCredentialsRule(this)
|
||||
.withCredentials('testCredentialsId', 'registryUser', '********')
|
||||
)
|
||||
@Before
|
||||
void init() {
|
||||
nullScript.binding.setVariable('docker', new DockerMock())
|
||||
}
|
||||
|
||||
@Test
|
||||
void testWithDockerDaemon() {
|
||||
DockerUtils dockerUtils = new DockerUtils(nullScript)
|
||||
assertThat(dockerUtils.withDockerDaemon(), is(true))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testWithoutDockerDaemon() {
|
||||
shellCallRule.setReturnValue('docker ps -q > /dev/null', 1)
|
||||
DockerUtils dockerUtils = new DockerUtils(nullScript)
|
||||
assertThat(dockerUtils.withDockerDaemon(), is(false))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testOnKubernetes() {
|
||||
nullScript.env.ON_K8S = 'true'
|
||||
DockerUtils dockerUtils = new DockerUtils(nullScript)
|
||||
assertThat(dockerUtils.onKubernetes(), is(true))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMoveImageKubernetes() {
|
||||
shellCallRule.setReturnValue('docker ps -q > /dev/null', 1)
|
||||
DockerUtils dockerUtils = new DockerUtils(nullScript)
|
||||
dockerUtils.moveImage(
|
||||
[
|
||||
registryUrl: 'https://my.source.registry:44444',
|
||||
image: 'sourceImage:sourceTag'
|
||||
],
|
||||
[
|
||||
registryUrl: 'https://my.registry:55555',
|
||||
image: 'testImage:tag',
|
||||
credentialsId: 'testCredentialsId'
|
||||
]
|
||||
)
|
||||
|
||||
assertThat(shellCallRule.shell, hasItem('skopeo copy --src-tls-verify=false --dest-tls-verify=false --dest-creds=\'registryUser\':\'********\' docker://my.source.registry:44444/sourceImage:sourceTag docker://my.registry:55555/testImage:tag'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetRegistryFromUrl() {
|
||||
DockerUtils dockerUtils = new DockerUtils(nullScript)
|
||||
assertThat(dockerUtils.getRegistryFromUrl('https://my.registry.com:55555'), is('my.registry.com:55555'))
|
||||
assertThat(dockerUtils.getRegistryFromUrl('http://my.registry.com:55555'), is('my.registry.com:55555'))
|
||||
assertThat(dockerUtils.getRegistryFromUrl('https://my.registry.com'), is('my.registry.com'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetProtocolFromUrl() {
|
||||
DockerUtils dockerUtils = new DockerUtils(nullScript)
|
||||
assertThat(dockerUtils.getProtocolFromUrl('https://my.registry.com:55555'), is('https'))
|
||||
assertThat(dockerUtils.getProtocolFromUrl('http://my.registry.com:55555'), is('http'))
|
||||
}
|
||||
}
|
@ -9,6 +9,7 @@ import org.junit.rules.RuleChain
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsLoggingRule
|
||||
import util.JenkinsShellCallRule
|
||||
import util.LibraryLoadingTestExecutionListener
|
||||
import util.Rules
|
||||
|
||||
import static org.hamcrest.Matchers.*
|
||||
@ -24,6 +25,55 @@ class JenkinsUtilsTest extends BasePiperTest {
|
||||
.around(shellRule)
|
||||
.around(loggingRule)
|
||||
|
||||
JenkinsUtils jenkinsUtils
|
||||
Object currentBuildMock
|
||||
Object rawBuildMock
|
||||
Object jenkinsInstanceMock
|
||||
Object parentMock
|
||||
|
||||
Map triggerCause
|
||||
|
||||
|
||||
@Before
|
||||
void init() throws Exception {
|
||||
jenkinsUtils = new JenkinsUtils() {
|
||||
def getCurrentBuildInstance() {
|
||||
return currentBuildMock
|
||||
}
|
||||
|
||||
def getActiveJenkinsInstance() {
|
||||
return jenkinsInstanceMock
|
||||
}
|
||||
}
|
||||
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(jenkinsUtils)
|
||||
|
||||
jenkinsInstanceMock = new Object()
|
||||
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(jenkinsInstanceMock)
|
||||
|
||||
parentMock = new Object() {
|
||||
|
||||
}
|
||||
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(parentMock)
|
||||
|
||||
rawBuildMock = new Object() {
|
||||
def getParent() {
|
||||
return parentMock
|
||||
}
|
||||
def getCause(type) {
|
||||
return triggerCause
|
||||
}
|
||||
|
||||
}
|
||||
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(rawBuildMock)
|
||||
|
||||
currentBuildMock = new Object() {
|
||||
def number
|
||||
def getRawBuild() {
|
||||
return rawBuildMock
|
||||
}
|
||||
}
|
||||
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(currentBuildMock)
|
||||
}
|
||||
@Test
|
||||
void testNodeAvailable() {
|
||||
def result = jenkinsUtils.nodeAvailable()
|
||||
@ -42,4 +92,21 @@ class JenkinsUtilsTest extends BasePiperTest {
|
||||
assertThat(result, is(false))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetIssueCommentTriggerAction() {
|
||||
triggerCause = [
|
||||
comment: 'this is my test comment /n /piper test whatever',
|
||||
triggerPattern: '.*/piper ([a-z]*).*'
|
||||
]
|
||||
assertThat(jenkinsUtils.getIssueCommentTriggerAction(), is('test'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetIssueCommentTriggerActionNoAction() {
|
||||
triggerCause = [
|
||||
comment: 'this is my test comment /n whatever',
|
||||
triggerPattern: '.*/piper ([a-z]*).*'
|
||||
]
|
||||
assertThat(jenkinsUtils.getIssueCommentTriggerAction(), isEmptyOrNullString())
|
||||
}
|
||||
}
|
||||
|
@ -115,8 +115,8 @@ class WhitesourceConfigurationHelperTest extends BasePiperTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExtendConfigurationFileUnifiedAgentDlang() {
|
||||
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'dlang', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
|
||||
void testExtendConfigurationFileUnifiedAgentDub() {
|
||||
WhitesourceConfigurationHelper.extendUAConfigurationFile(nullScript, utils, [scanType: 'dub', whitesource: [configFilePath: './config',serviceUrl: "http://some.host.whitesource.com/api/", orgToken: 'abcd', productName: 'DIST - name1', productToken: '1234', userKey: '0000']], "./")
|
||||
assertThat(jwfr.files['./config.847f9aec2f93de9000d5fa4e6eaace2283ae6377'],
|
||||
allOf(
|
||||
containsString("apiKey=abcd"),
|
||||
@ -126,7 +126,7 @@ class WhitesourceConfigurationHelperTest extends BasePiperTest {
|
||||
)
|
||||
)
|
||||
|
||||
assertThat(jlr.log, containsString("[Whitesource] Configuration for scanType: 'dlang' is not yet hardened, please do a quality assessment of your scan results."))
|
||||
assertThat(jlr.log, containsString("[Whitesource] Configuration for scanType: 'dub' is not yet hardened, please do a quality assessment of your scan results."))
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -11,9 +11,9 @@ import util.Rules
|
||||
import static org.junit.Assert.assertEquals
|
||||
import static org.junit.Assert.assertTrue
|
||||
|
||||
class DlangArtifactVersioningTest extends BasePiperTest{
|
||||
class DubArtifactVersioningTest extends BasePiperTest{
|
||||
|
||||
JenkinsReadJsonRule readJsonRule = new JenkinsReadJsonRule(this, 'test/resources/versioning/DlangArtifactVersioning/')
|
||||
JenkinsReadJsonRule readJsonRule = new JenkinsReadJsonRule(this, 'test/resources/versioning/DubArtifactVersioning/')
|
||||
JenkinsWriteJsonRule writeJsonRule = new JenkinsWriteJsonRule(this)
|
||||
|
||||
@Rule
|
||||
@ -24,7 +24,7 @@ class DlangArtifactVersioningTest extends BasePiperTest{
|
||||
|
||||
@Test
|
||||
void testVersioning() {
|
||||
DlangArtifactVersioning av = new DlangArtifactVersioning(nullScript, [filePath: 'dub.json'])
|
||||
DubArtifactVersioning av = new DubArtifactVersioning(nullScript, [filePath: 'dub.json'])
|
||||
assertEquals('1.2.3', av.getVersion())
|
||||
av.setVersion('1.2.3-20180101')
|
||||
assertTrue(writeJsonRule.files['dub.json'].contains('1.2.3-20180101'))
|
@ -7,6 +7,10 @@ import org.junit.rules.RuleChain
|
||||
import util.*
|
||||
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
import static org.hamcrest.Matchers.hasItem
|
||||
import static org.hamcrest.Matchers.hasItems
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.hamcrest.Matchers.not
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
class PiperPipelineStageAcceptanceTest extends BasePiperTest {
|
||||
@ -20,22 +24,121 @@ class PiperPipelineStageAcceptanceTest extends BasePiperTest {
|
||||
.around(jlr)
|
||||
.around(jsr)
|
||||
|
||||
private List stepsCalled = []
|
||||
private Map stepParameters = [:]
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
binding.variables.env.STAGE_NAME = 'Acceptance'
|
||||
helper.registerAllowedMethod('piperStageWrapper', [Map.class, Closure.class], {m, body ->
|
||||
assertThat(m.stageName, is('Acceptance'))
|
||||
return body()
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('healthExecuteCheck', [Map.class], {m ->
|
||||
stepsCalled.add('healthExecuteCheck')
|
||||
stepParameters.healthExecuteCheck = m
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('cloudFoundryDeploy', [Map.class], {m ->
|
||||
stepsCalled.add('cloudFoundryDeploy')
|
||||
stepParameters.cloudFoundryDeploy = m
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('neoDeploy', [Map.class], {m ->
|
||||
stepsCalled.add('neoDeploy')
|
||||
stepParameters.neoDeploy = m
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('gaugeExecuteTests', [Map.class], {m ->
|
||||
stepsCalled.add('gaugeExecuteTests')
|
||||
stepParameters.gaugeExecuteTests = m
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('newmanExecute', [Map.class], {m ->
|
||||
stepsCalled.add('newmanExecute')
|
||||
stepParameters.newmanExecute = m
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('uiVeri5ExecuteTests', [Map.class], {m ->
|
||||
stepsCalled.add('uiVeri5ExecuteTests')
|
||||
stepParameters.uiVeri5ExecuteTests = m
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('testsPublishResults', [Map.class], {m ->
|
||||
stepsCalled.add('testsPublishResults')
|
||||
stepParameters.testsPublishResults = m
|
||||
})
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStageDefault() {
|
||||
void testAcceptanceStageDefault() {
|
||||
|
||||
jsr.step.piperPipelineStageIntegration(
|
||||
jsr.step.piperPipelineStageAcceptance(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
juStabUtils: utils
|
||||
)
|
||||
assertThat(jlr.log, containsString('Stage implementation is not provided yet.'))
|
||||
assertThat(stepsCalled, not(hasItems('cloudFoundryDeploy', 'neoDeploy', 'healthExecuteCheck', 'newmanExecute', 'uiVeri5ExecuteTests', 'gaugeExecuteTests')))
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAcceptanceStageCF() {
|
||||
|
||||
jsr.step.piperPipelineStageAcceptance(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
cloudFoundryDeploy: true,
|
||||
healthExecuteCheck: true
|
||||
)
|
||||
|
||||
assertThat(stepsCalled, hasItems('cloudFoundryDeploy', 'healthExecuteCheck'))
|
||||
assertThat(stepsCalled, not(hasItem('testsPublishResults')))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAcceptanceStageNeo() {
|
||||
|
||||
jsr.step.piperPipelineStageAcceptance(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
neoDeploy: true
|
||||
)
|
||||
assertThat(stepsCalled, hasItem('neoDeploy'))
|
||||
assertThat(stepsCalled, not(hasItem('testsPublishResults')))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAcceptanceStageGauge() {
|
||||
|
||||
jsr.step.piperPipelineStageAcceptance(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
gaugeExecuteTests: true
|
||||
)
|
||||
assertThat(stepsCalled, hasItems('gaugeExecuteTests', 'testsPublishResults'))
|
||||
assertThat(stepParameters.testsPublishResults.gauge.archive, is(true))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAcceptanceStageNewman() {
|
||||
|
||||
jsr.step.piperPipelineStageAcceptance(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
newmanExecute: true
|
||||
)
|
||||
assertThat(stepsCalled, hasItems('newmanExecute', 'testsPublishResults'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAcceptanceStageUiVeri5() {
|
||||
|
||||
jsr.step.piperPipelineStageAcceptance(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
uiVeri5ExecuteTests: true
|
||||
)
|
||||
assertThat(stepsCalled, hasItems('uiVeri5ExecuteTests', 'testsPublishResults'))
|
||||
}
|
||||
}
|
||||
|
@ -7,6 +7,12 @@ import org.junit.rules.RuleChain
|
||||
import util.*
|
||||
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
import static org.hamcrest.Matchers.hasItems
|
||||
import static org.hamcrest.Matchers.hasItems
|
||||
import static org.hamcrest.Matchers.hasItems
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.hamcrest.Matchers.not
|
||||
import static org.hamcrest.Matchers.not
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
class PiperPipelineStageAdditionalUnitTestsTest extends BasePiperTest {
|
||||
@ -20,22 +26,57 @@ class PiperPipelineStageAdditionalUnitTestsTest extends BasePiperTest {
|
||||
.around(jlr)
|
||||
.around(jsr)
|
||||
|
||||
|
||||
private List stepsCalled = []
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
|
||||
binding.variables.env.STAGE_NAME = 'Additional Unit Tests'
|
||||
|
||||
helper.registerAllowedMethod('piperStageWrapper', [Map.class, Closure.class], {m, body ->
|
||||
assertThat(m.stageName, is('Additional Unit Tests'))
|
||||
return body()
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('batsExecuteTests', [Map.class], {m ->
|
||||
stepsCalled.add('batsExecuteTests')
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('karmaExecuteTests', [Map.class], {m ->
|
||||
stepsCalled.add('karmaExecuteTests')
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('testsPublishResults', [Map.class], {m ->
|
||||
stepsCalled.add('testsPublishResults')
|
||||
})
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStageDefault() {
|
||||
void testAdditionalUnitTestsDefault() {
|
||||
|
||||
jsr.step.piperPipelineStageIntegration(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
)
|
||||
assertThat(jlr.log, containsString('Stage implementation is not provided yet.'))
|
||||
jsr.step.piperPipelineStageAdditionalUnitTests(script: nullScript, juStabUtils: utils)
|
||||
|
||||
assertThat(stepsCalled, not(hasItems('batsExecuteTests', 'karmaExecuteTests', 'testsPublishResults')))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAdditionalUnitTestsWithKarmaConfig() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [runStep: ['Additional Unit Tests': [karmaExecuteTests: true]]]
|
||||
|
||||
jsr.step.piperPipelineStageAdditionalUnitTests(script: nullScript, juStabUtils: utils)
|
||||
|
||||
assertThat(stepsCalled, hasItems('karmaExecuteTests', 'testsPublishResults'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAdditionalUnitTestsWithBats() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [runStep: ['Additional Unit Tests': [batsExecuteTests: true]]]
|
||||
|
||||
jsr.step.piperPipelineStageAdditionalUnitTests(script: nullScript, juStabUtils: utils)
|
||||
|
||||
assertThat(stepsCalled, hasItems('batsExecuteTests', 'testsPublishResults'))
|
||||
}
|
||||
}
|
||||
|
@ -4,9 +4,14 @@ import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.RuleChain
|
||||
import util.*
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsLoggingRule
|
||||
import util.JenkinsReadYamlRule
|
||||
import util.JenkinsStepRule
|
||||
import util.Rules
|
||||
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
import static org.hamcrest.Matchers.hasItems
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
class PiperPipelineStageBuildTest extends BasePiperTest {
|
||||
@ -20,22 +25,43 @@ class PiperPipelineStageBuildTest extends BasePiperTest {
|
||||
.around(jlr)
|
||||
.around(jsr)
|
||||
|
||||
private List stepsCalled = []
|
||||
private Map stepParameters = [:]
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
|
||||
binding.variables.env.STAGE_NAME = 'Build'
|
||||
|
||||
helper.registerAllowedMethod('piperStageWrapper', [Map.class, Closure.class], {m, body ->
|
||||
assertThat(m.stageName, is('Build'))
|
||||
return body()
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('buildExecute', [Map.class], {m ->
|
||||
stepsCalled.add('buildExecute')
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('pipelineStashFilesAfterBuild', [Map.class], {m ->
|
||||
stepsCalled.add('pipelineStashFilesAfterBuild')
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('checksPublishResults', [Map.class], {m ->
|
||||
stepsCalled.add('checksPublishResults')
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('testsPublishResults', [Map.class], {m ->
|
||||
stepsCalled.add('testsPublishResults')
|
||||
stepParameters.testsPublishResults = m
|
||||
})
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStageDefault() {
|
||||
void testBuildDefault() {
|
||||
|
||||
jsr.step.piperPipelineStageIntegration(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
)
|
||||
assertThat(jlr.log, containsString('Stage implementation is not provided yet.'))
|
||||
jsr.step.piperPipelineStageBuild(script: nullScript, juStabUtils: utils)
|
||||
|
||||
assertThat(stepsCalled, hasItems('buildExecute', 'checksPublishResults', 'pipelineStashFilesAfterBuild', 'testsPublishResults'))
|
||||
assertThat(stepParameters.testsPublishResults.junit.updateResults, is(true))
|
||||
}
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ class PiperPipelineStageComplianceTest extends BasePiperTest {
|
||||
@Test
|
||||
void testStageDefault() {
|
||||
|
||||
jsr.step.piperPipelineStageIntegration(
|
||||
jsr.step.piperPipelineStageCompliance(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
)
|
||||
|
@ -72,6 +72,10 @@ class PiperPipelineStageInitTest extends BasePiperTest {
|
||||
stepsCalled.add('pipelineStashFilesBeforeBuild')
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('slackSendNotification', [Map.class], {m ->
|
||||
stepsCalled.add('slackSendNotification')
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -110,11 +114,12 @@ class PiperPipelineStageInitTest extends BasePiperTest {
|
||||
)
|
||||
|
||||
assertThat(stepsCalled, hasItems('checkout', 'setupCommonPipelineEnvironment', 'piperInitRunStageConfiguration', 'artifactSetVersion', 'pipelineStashFilesBeforeBuild'))
|
||||
assertThat(stepsCalled, not(hasItems('slackSendNotification')))
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void testInitOverwriteDefault() {
|
||||
void testInitNotOnProductiveBranch() {
|
||||
|
||||
binding.variables.env.BRANCH_NAME = 'testBranch'
|
||||
|
||||
@ -134,17 +139,60 @@ class PiperPipelineStageInitTest extends BasePiperTest {
|
||||
void testSetScmInfoOnCommonPipelineEnvironment() {
|
||||
//currently supported formats
|
||||
def scmInfoTestList = [
|
||||
[GIT_URL: 'https://github.com/testOrg/testRepo.git', expectedSsh: 'git@github.com:testOrg/testRepo.git', expectedHttp: 'https://github.com/testOrg/testRepo.git'],
|
||||
[GIT_URL: 'https://github.com:7777/testOrg/testRepo.git', expectedSsh: 'git@github.com:testOrg/testRepo.git', expectedHttp: 'https://github.com:7777/testOrg/testRepo.git'],
|
||||
[GIT_URL: 'git@github.com:testOrg/testRepo.git', expectedSsh: 'git@github.com:testOrg/testRepo.git', expectedHttp: 'https://github.com/testOrg/testRepo.git'],
|
||||
[GIT_URL: 'ssh://git@github.com/testOrg/testRepo.git', expectedSsh: 'ssh://git@github.com/testOrg/testRepo.git', expectedHttp: 'https://github.com/testOrg/testRepo.git'],
|
||||
[GIT_URL: 'ssh://git@github.com:7777/testOrg/testRepo.git', expectedSsh: 'ssh://git@github.com:7777/testOrg/testRepo.git', expectedHttp: 'https://github.com/testOrg/testRepo.git'],
|
||||
[GIT_URL: 'https://github.com/testOrg/testRepo.git', expectedSsh: 'git@github.com:testOrg/testRepo.git', expectedHttp: 'https://github.com/testOrg/testRepo.git', expectedOrg: 'testOrg', expectedRepo: 'testRepo'],
|
||||
[GIT_URL: 'https://github.com:7777/testOrg/testRepo.git', expectedSsh: 'git@github.com:testOrg/testRepo.git', expectedHttp: 'https://github.com:7777/testOrg/testRepo.git', expectedOrg: 'testOrg', expectedRepo: 'testRepo'],
|
||||
[GIT_URL: 'git@github.com:testOrg/testRepo.git', expectedSsh: 'git@github.com:testOrg/testRepo.git', expectedHttp: 'https://github.com/testOrg/testRepo.git', expectedOrg: 'testOrg', expectedRepo: 'testRepo'],
|
||||
[GIT_URL: 'ssh://git@github.com/testOrg/testRepo.git', expectedSsh: 'ssh://git@github.com/testOrg/testRepo.git', expectedHttp: 'https://github.com/testOrg/testRepo.git', expectedOrg: 'testOrg', expectedRepo: 'testRepo'],
|
||||
[GIT_URL: 'ssh://git@github.com:7777/testOrg/testRepo.git', expectedSsh: 'ssh://git@github.com:7777/testOrg/testRepo.git', expectedHttp: 'https://github.com/testOrg/testRepo.git', expectedOrg: 'testOrg', expectedRepo: 'testRepo'],
|
||||
[GIT_URL: 'ssh://git@github.com/path/to/testOrg/testRepo.git', expectedSsh: 'ssh://git@github.com/path/to/testOrg/testRepo.git', expectedHttp: 'https://github.com/path/to/testOrg/testRepo.git', expectedOrg: 'path/to/testOrg', expectedRepo: 'testRepo'],
|
||||
[GIT_URL: 'ssh://git@github.com/testRepo.git', expectedSsh: 'ssh://git@github.com/testRepo.git', expectedHttp: 'https://github.com/testRepo.git', expectedOrg: 'N/A', expectedRepo: 'testRepo'],
|
||||
]
|
||||
|
||||
scmInfoTestList.each {scmInfoTest ->
|
||||
jsr.step.piperPipelineStageInit.setScmInfoOnCommonPipelineEnvironment(nullScript, scmInfoTest)
|
||||
println(scmInfoTest.GIT_URL)
|
||||
assertThat(nullScript.commonPipelineEnvironment.getGitSshUrl(), is(scmInfoTest.expectedSsh))
|
||||
assertThat(nullScript.commonPipelineEnvironment.getGitHttpsUrl(), is(scmInfoTest.expectedHttp))
|
||||
assertThat(nullScript.commonPipelineEnvironment.getGithubOrg(), is(scmInfoTest.expectedOrg))
|
||||
assertThat(nullScript.commonPipelineEnvironment.getGithubRepo(), is(scmInfoTest.expectedRepo))
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPullRequestStageStepActivation() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [
|
||||
runStep: [:]
|
||||
]
|
||||
def config = [
|
||||
pullRequestStageName: 'Pull-Request Voting',
|
||||
stepMappings : [
|
||||
karma : 'karmaExecuteTests',
|
||||
whitesource: 'whitesourceExecuteScan'
|
||||
],
|
||||
labelPrefix : 'pr_'
|
||||
]
|
||||
|
||||
def actions = ['karma', 'pr_whitesource']
|
||||
jsr.step.piperPipelineStageInit.setPullRequestStageStepActivation(nullScript, config, actions)
|
||||
|
||||
assertThat(nullScript.commonPipelineEnvironment.configuration.runStep."Pull-Request Voting".karmaExecuteTests, is(true))
|
||||
assertThat(nullScript.commonPipelineEnvironment.configuration.runStep."Pull-Request Voting".whitesourceExecuteScan, is(true))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testInitWithSlackNotification() {
|
||||
nullScript.commonPipelineEnvironment.configuration = [runStep: [Init: [slackSendNotification: true]]]
|
||||
|
||||
jsr.step.piperPipelineStageInit(script: nullScript, juStabUtils: utils, buildTool: 'maven')
|
||||
|
||||
assertThat(stepsCalled, hasItems(
|
||||
'checkout',
|
||||
'setupCommonPipelineEnvironment',
|
||||
'piperInitRunStageConfiguration',
|
||||
'artifactSetVersion',
|
||||
'slackSendNotification',
|
||||
'pipelineStashFilesBeforeBuild'
|
||||
))
|
||||
}
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ class PiperPipelineStageIntegrationTest extends BasePiperTest {
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
)
|
||||
assertThat(jlr.log, containsString('Stage implementation is not provided yet.'))
|
||||
assertThat(jlr.log, containsString('No default stage implementation is provided for this stage.'))
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -4,9 +4,16 @@ import org.junit.Before
|
||||
import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.RuleChain
|
||||
import util.*
|
||||
import util.BasePiperTest
|
||||
import util.JenkinsLoggingRule
|
||||
import util.JenkinsReadYamlRule
|
||||
import util.JenkinsStepRule
|
||||
import util.Rules
|
||||
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
import static org.hamcrest.Matchers.hasItems
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.hamcrest.Matchers.not
|
||||
import static org.hamcrest.Matchers.nullValue
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
class PiperPipelineStagePRVotingTest extends BasePiperTest {
|
||||
@ -20,22 +27,89 @@ class PiperPipelineStagePRVotingTest extends BasePiperTest {
|
||||
.around(jlr)
|
||||
.around(jsr)
|
||||
|
||||
private List stepsCalled = []
|
||||
private Map stepParameters = [:]
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
binding.variables.env.STAGE_NAME = 'Pull-Request Voting'
|
||||
|
||||
binding.variables.env = [
|
||||
STAGE_NAME: 'Pull-Request Voting',
|
||||
BRANCH_NAME: 'PR-1'
|
||||
]
|
||||
|
||||
helper.registerAllowedMethod('piperStageWrapper', [Map.class, Closure.class], {m, body ->
|
||||
assertThat(m.stageName, is('Pull-Request Voting'))
|
||||
return body()
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('buildExecute', [Map.class], {m ->
|
||||
stepsCalled.add('buildExecute')
|
||||
stepParameters.buildExecute = m
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('checksPublishResults', [Map.class], {m ->
|
||||
stepsCalled.add('checksPublishResults')
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('testsPublishResults', [Map.class], {m ->
|
||||
stepsCalled.add('testsPublishResults')
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('karmaExecuteTests', [Map.class], {m ->
|
||||
stepsCalled.add('karmaExecuteTests')
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('whitesourceExecuteScan', [Map.class], {m ->
|
||||
stepsCalled.add('whitesourceExecuteScan')
|
||||
stepParameters.whitesourceExecuteScan = m
|
||||
m.script.commonPipelineEnvironment.setValue('whitesourceProjectNames', ['ws project - PR1'])
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStageDefault() {
|
||||
void testPRVotingDefault() {
|
||||
|
||||
jsr.step.piperPipelineStageIntegration(
|
||||
nullScript.commonPipelineEnvironment.configuration = [general: [buildTool: 'maven']]
|
||||
jsr.step.piperPipelineStagePRVoting(script: nullScript, juStabUtils: utils)
|
||||
|
||||
assertThat(stepsCalled, hasItems('buildExecute', 'checksPublishResults', 'testsPublishResults'))
|
||||
assertThat(stepsCalled, not(hasItems('karmaExecuteTests', 'whitesourceExecuteScan')))
|
||||
assertThat(stepParameters.buildExecute.buildTool, is('maven'))
|
||||
assertThat(stepParameters.buildExecute.dockerRegistryUrl, nullValue())
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPRVotingWithCustomSteps() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [
|
||||
general: [buildTool: 'maven'],
|
||||
runStep: ['Pull-Request Voting': [karmaExecuteTests: true, whitesourceExecuteScan: true]]
|
||||
]
|
||||
|
||||
jsr.step.piperPipelineStagePRVoting(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
)
|
||||
assertThat(jlr.log, containsString('Stage implementation is not provided yet.'))
|
||||
|
||||
assertThat(stepsCalled, hasItems( 'karmaExecuteTests', 'whitesourceExecuteScan'))
|
||||
assertThat(stepParameters.whitesourceExecuteScan.productVersion, is('PR-1'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPRVotingDocker() {
|
||||
|
||||
nullScript.commonPipelineEnvironment.configuration = [
|
||||
general: [buildTool: 'docker'],
|
||||
runStep: ['Pull-Request Voting': [karmaExecuteTests: true, whitesourceExecuteScan: true]]
|
||||
]
|
||||
|
||||
jsr.step.piperPipelineStagePRVoting(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
)
|
||||
|
||||
assertThat(stepParameters.buildExecute.dockerRegistryUrl, is(''))
|
||||
}
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ class PiperPipelineStagePerformanceTest extends BasePiperTest {
|
||||
@Test
|
||||
void testStageDefault() {
|
||||
|
||||
jsr.step.piperPipelineStageIntegration(
|
||||
jsr.step.piperPipelineStagePerformance(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
)
|
||||
|
@ -1,4 +1,4 @@
|
||||
package stages
|
||||
package templates
|
||||
|
||||
import org.junit.Before
|
||||
import org.junit.Rule
|
@ -7,6 +7,12 @@ import org.junit.rules.RuleChain
|
||||
import util.*
|
||||
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
import static org.hamcrest.Matchers.hasItem
|
||||
import static org.hamcrest.Matchers.hasItems
|
||||
import static org.hamcrest.Matchers.hasItems
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.hamcrest.Matchers.not
|
||||
import static org.hamcrest.Matchers.not
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
class PiperPipelineStagePromoteTest extends BasePiperTest {
|
||||
@ -20,22 +26,44 @@ class PiperPipelineStagePromoteTest extends BasePiperTest {
|
||||
.around(jlr)
|
||||
.around(jsr)
|
||||
|
||||
private List stepsCalled = []
|
||||
private Map stepParameters = [:]
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
binding.variables.env.STAGE_NAME = 'Promote'
|
||||
helper.registerAllowedMethod('piperStageWrapper', [Map.class, Closure.class], {m, body ->
|
||||
assertThat(m.stageName, is('Promote'))
|
||||
|
||||
return body()
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('containerPushToRegistry', [Map.class], {m ->
|
||||
stepsCalled.add('containerPushToRegistry')
|
||||
stepParameters.containerPushToRegistry = m
|
||||
})
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStageDefault() {
|
||||
void testStagePromoteDefault() {
|
||||
|
||||
jsr.step.piperPipelineStageIntegration(
|
||||
jsr.step.piperPipelineStagePromote(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
)
|
||||
assertThat(jlr.log, containsString('Stage implementation is not provided yet.'))
|
||||
assertThat(stepsCalled, not(hasItems('containerPushToRegistry')))
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStagePromotePushToRegistry() {
|
||||
|
||||
jsr.step.piperPipelineStagePromote(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
containerPushToRegistry: true
|
||||
)
|
||||
|
||||
assertThat(stepsCalled, hasItem('containerPushToRegistry'))
|
||||
}
|
||||
}
|
||||
|
@ -7,6 +7,12 @@ import org.junit.rules.RuleChain
|
||||
import util.*
|
||||
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
import static org.hamcrest.Matchers.hasItem
|
||||
import static org.hamcrest.Matchers.hasItems
|
||||
import static org.hamcrest.Matchers.hasItems
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.hamcrest.Matchers.not
|
||||
import static org.hamcrest.Matchers.not
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
class PiperPipelineStageReleaseTest extends BasePiperTest {
|
||||
@ -20,22 +26,82 @@ class PiperPipelineStageReleaseTest extends BasePiperTest {
|
||||
.around(jlr)
|
||||
.around(jsr)
|
||||
|
||||
private List stepsCalled = []
|
||||
private Map stepParameters = [:]
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
binding.variables.env.STAGE_NAME = 'Release'
|
||||
helper.registerAllowedMethod('piperStageWrapper', [Map.class, Closure.class], {m, body ->
|
||||
assertThat(m.stageName, is('Release'))
|
||||
return body()
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('healthExecuteCheck', [Map.class], {m ->
|
||||
stepsCalled.add('healthExecuteCheck')
|
||||
stepParameters.healthExecuteCheck = m
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('cloudFoundryDeploy', [Map.class], {m ->
|
||||
stepsCalled.add('cloudFoundryDeploy')
|
||||
stepParameters.cloudFoundryDeploy = m
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('neoDeploy', [Map.class], {m ->
|
||||
stepsCalled.add('neoDeploy')
|
||||
stepParameters.neoDeploy = m
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('githubPublishRelease', [Map.class], {m ->
|
||||
stepsCalled.add('githubPublishRelease')
|
||||
stepParameters.githubPublishRelease = m
|
||||
})
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStageDefault() {
|
||||
void testReleaseStageDefault() {
|
||||
|
||||
jsr.step.piperPipelineStageIntegration(
|
||||
jsr.step.piperPipelineStageRelease(
|
||||
script: nullScript,
|
||||
juStabUtils: utils
|
||||
)
|
||||
assertThat(stepsCalled, not(hasItems('cloudFoundryDeploy', 'neoDeploy', 'healthExecuteCheck', 'githubPublishRelease')))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testReleaseStageCF() {
|
||||
|
||||
jsr.step.piperPipelineStageRelease(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
cloudFoundryDeploy: true,
|
||||
healthExecuteCheck: true
|
||||
)
|
||||
assertThat(jlr.log, containsString('Stage implementation is not provided yet.'))
|
||||
|
||||
assertThat(stepsCalled, hasItems('cloudFoundryDeploy', 'healthExecuteCheck'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testReleaseStageNeo() {
|
||||
|
||||
jsr.step.piperPipelineStageRelease(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
neoDeploy: true
|
||||
)
|
||||
|
||||
assertThat(stepsCalled, hasItem('neoDeploy'))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testReleaseStageGitHub() {
|
||||
|
||||
jsr.step.piperPipelineStageRelease(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
githubPublishRelease: true
|
||||
)
|
||||
|
||||
assertThat(stepsCalled, hasItem('githubPublishRelease'))
|
||||
}
|
||||
}
|
||||
|
@ -7,6 +7,10 @@ import org.junit.rules.RuleChain
|
||||
import util.*
|
||||
|
||||
import static org.hamcrest.Matchers.containsString
|
||||
import static org.hamcrest.Matchers.hasItem
|
||||
import static org.hamcrest.Matchers.hasItems
|
||||
import static org.hamcrest.Matchers.is
|
||||
import static org.hamcrest.Matchers.not
|
||||
import static org.junit.Assert.assertThat
|
||||
|
||||
class PiperPipelineStageSecurityTest extends BasePiperTest {
|
||||
@ -20,22 +24,42 @@ class PiperPipelineStageSecurityTest extends BasePiperTest {
|
||||
.around(jlr)
|
||||
.around(jsr)
|
||||
|
||||
private List stepsCalled = []
|
||||
private Map stepParameters = [:]
|
||||
|
||||
@Before
|
||||
void init() {
|
||||
binding.variables.env.STAGE_NAME = 'Security'
|
||||
helper.registerAllowedMethod('piperStageWrapper', [Map.class, Closure.class], {m, body ->
|
||||
assertThat(m.stageName, is('Security'))
|
||||
return body()
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('whitesourceExecuteScan', [Map.class], {m ->
|
||||
stepsCalled.add('whitesourceExecuteScan')
|
||||
stepParameters.whitesourceExecuteScan = m
|
||||
})
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStageDefault() {
|
||||
|
||||
jsr.step.piperPipelineStageIntegration(
|
||||
jsr.step.piperPipelineStageSecurity(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
)
|
||||
assertThat(jlr.log, containsString('Stage implementation is not provided yet.'))
|
||||
assertThat(stepsCalled, not(hasItems('whitesourceExecuteScan')))
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSecurityStageWhiteSource() {
|
||||
|
||||
jsr.step.piperPipelineStageSecurity(
|
||||
script: nullScript,
|
||||
juStabUtils: utils,
|
||||
whitesourceExecuteScan: true
|
||||
)
|
||||
|
||||
assertThat(stepsCalled, hasItem('whitesourceExecuteScan'))
|
||||
}
|
||||
}
|
||||
|
@ -43,6 +43,11 @@ class PiperPipelineTest extends BasePiperTest {
|
||||
helper.registerAllowedMethod('skipDefaultCheckout', [], {skipDefaultCheckout = true})
|
||||
helper.registerAllowedMethod('timestamps', [], {timestamps = true})
|
||||
|
||||
helper.registerAllowedMethod('triggers', [Closure.class], null)
|
||||
helper.registerAllowedMethod('issueCommentTrigger', [String.class], { s ->
|
||||
assertThat(s, is('.*/piper ([a-z]*).*'))
|
||||
})
|
||||
|
||||
helper.registerAllowedMethod('stages', [Closure.class], null)
|
||||
|
||||
helper.registerAllowedMethod('stage', [String.class, Closure.class], {stageName, body ->
|
||||
|
@ -22,7 +22,7 @@ import groovy.text.SimpleTemplateEngine
|
||||
'artifactType',
|
||||
/**
|
||||
* Defines the tool which is used for building the artifact.
|
||||
* @possibleValues `dlang`, `docker`, `golang`, `maven`, `mta`, `npm`, `pip`, `sbt`
|
||||
* @possibleValues `dub`, `docker`, `golang`, `maven`, `mta`, `npm`, `pip`, `sbt`
|
||||
*/
|
||||
'buildTool',
|
||||
/**
|
||||
|
@ -25,7 +25,7 @@ import groovy.transform.Field
|
||||
'failOnError',
|
||||
/**
|
||||
* Defines the format of the test result output. `junit` would be the standard for automated build environments but you could use also the option `tap`.
|
||||
* @possibleValues `tap`
|
||||
* @possibleValues `junit`, `tap`
|
||||
*/
|
||||
'outputFormat',
|
||||
/**
|
||||
@ -98,8 +98,8 @@ void call(Map parameters = [:]) {
|
||||
sh "cat 'TEST-${config.testPackage}.tap'"
|
||||
if (config.outputFormat == 'junit') {
|
||||
dockerExecute(script: script, dockerImage: config.dockerImage, dockerWorkspace: config.dockerWorkspace, stashContent: config.stashContent) {
|
||||
sh "npm install tap-xunit -g"
|
||||
sh "cat 'TEST-${config.testPackage}.tap' | tap-xunit --package='${config.testPackage}' > TEST-${config.testPackage}.xml"
|
||||
sh "NPM_CONFIG_PREFIX=~/.npm-global npm install tap-xunit -g"
|
||||
sh "cat 'TEST-${config.testPackage}.tap' | PATH=\$PATH:~/.npm-global/bin tap-xunit --package='${config.testPackage}' > TEST-${config.testPackage}.xml"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
117
vars/buildExecute.groovy
Normal file
117
vars/buildExecute.groovy
Normal file
@ -0,0 +1,117 @@
|
||||
import com.sap.piper.DockerUtils
|
||||
import com.sap.piper.GenerateDocumentation
|
||||
import com.sap.piper.Utils
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
|
||||
import groovy.text.SimpleTemplateEngine
|
||||
import groovy.transform.Field
|
||||
|
||||
import static com.sap.piper.Prerequisites.checkScript
|
||||
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
@Field Set GENERAL_CONFIG_KEYS = [
|
||||
/**
|
||||
* Defines the tool used for the build.
|
||||
* @possibleValues `docker`, `kaniko`, `maven`, `mta`, `npm`
|
||||
*/
|
||||
'buildTool',
|
||||
/** For Docker builds only (mandatory): name of the image to be built. */
|
||||
'dockerImageName',
|
||||
/** For Docker builds only: Defines the registry url where the image should be pushed to, incl. the protocol like `https://my.registry.com`. If it is not defined, image will not be pushed to a registry.*/
|
||||
'dockerRegistryUrl',
|
||||
]
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([
|
||||
|
||||
/** Only for Docker builds on the local deamon: Defines the build options for the build.*/
|
||||
'containerBuildOptions',
|
||||
/** For custom build types: Defines the command to be executed within the `dockerImage` in order to execute the build. */
|
||||
'dockerCommand',
|
||||
/** For custom build types: Image to be used for builds in case they should run inside a custom Docker container */
|
||||
'dockerImage',
|
||||
/** For Docker builds only (mandatory): tag of the image to be built. */
|
||||
'dockerImageTag',
|
||||
])
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* This step serves as generic entry point in pipelines for building artifacts.
|
||||
*
|
||||
* You can use pre-defined `buildTool`s.
|
||||
*
|
||||
* Alternatively you can define a command via `dockerCommand` which should be executed in `dockerImage`.<br />
|
||||
* This allows you to trigger any build tool using a defined Docker container which provides the required build infrastructure.
|
||||
*
|
||||
* When using `buildTool: docker` or `buildTool: kaniko` the created container image is uploaded to a container registry.<br />
|
||||
* You need to make sure that the required credentials are provided to the step.
|
||||
*
|
||||
* For all other `buildTool`s the artifact will just be stored in the workspace and could then be `stash`ed for later use.
|
||||
*
|
||||
*/
|
||||
@GenerateDocumentation
|
||||
void call(Map parameters = [:]) {
|
||||
handlePipelineStepErrors (stepName: STEP_NAME, stepParameters: parameters) {
|
||||
final script = checkScript(this, parameters) ?: this
|
||||
def utils = parameters.juStabUtils ?: new Utils()
|
||||
// handle deprecated parameters
|
||||
// load default & individual configuration
|
||||
Map config = ConfigurationHelper.newInstance(this)
|
||||
.loadStepDefaults()
|
||||
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
|
||||
.mixinStepConfig(script.commonPipelineEnvironment, STEP_CONFIG_KEYS)
|
||||
.mixinStageConfig(script.commonPipelineEnvironment, parameters.stageName?:env.STAGE_NAME, STEP_CONFIG_KEYS)
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.addIfEmpty('dockerImageTag', script.commonPipelineEnvironment.getArtifactVersion())
|
||||
.use()
|
||||
|
||||
// telemetry reporting
|
||||
utils.pushToSWA([stepParam1: config.buildTool, 'buildTool': config.buildTool], config)
|
||||
|
||||
switch(config.buildTool){
|
||||
case 'maven':
|
||||
mavenExecute script: script
|
||||
break
|
||||
case 'mta':
|
||||
mtaBuild script: script
|
||||
break
|
||||
case 'npm':
|
||||
npmExecute script: script
|
||||
break
|
||||
case ['docker', 'kaniko']:
|
||||
DockerUtils dockerUtils = new DockerUtils(script)
|
||||
if (config.buildTool == 'docker' && !dockerUtils.withDockerDaemon()) {
|
||||
config.buildTool = 'kaniko'
|
||||
echo "[${STEP_NAME}] No Docker daemon available, thus switching to Kaniko build"
|
||||
}
|
||||
|
||||
ConfigurationHelper.newInstance(this, config)
|
||||
.withMandatoryProperty('dockerImageName')
|
||||
.withMandatoryProperty('dockerImageTag')
|
||||
|
||||
def dockerImageNameAndTag = "${config.dockerImageName}:${config.dockerImageTag}"
|
||||
|
||||
if (config.buildTool == 'kaniko') {
|
||||
def containerImageNameAndTag = config.dockerRegistryUrl ? "${dockerUtils.getRegistryFromUrl(config.dockerRegistryUrl)}/${dockerImageNameAndTag}" : ''
|
||||
kanikoExecute script: script, containerImageNameAndTag: containerImageNameAndTag
|
||||
} else {
|
||||
def dockerBuildImage = docker.build(dockerImageNameAndTag, "${config.containerBuildOptions ?: ''} .")
|
||||
//only push if registry is defined
|
||||
if (config.dockerRegistryUrl) {
|
||||
containerPushToRegistry script: script, dockerBuildImage: dockerBuildImage, dockerRegistryUrl: config.dockerRegistryUrl
|
||||
}
|
||||
}
|
||||
script.commonPipelineEnvironment.setValue('containerImage', dockerImageNameAndTag)
|
||||
break
|
||||
default:
|
||||
if (config.dockerImage && config.dockerCommand) {
|
||||
dockerExecute(
|
||||
script: script,
|
||||
dockerImage: config.dockerImage,
|
||||
) {
|
||||
sh "${config.dockerCommand}"
|
||||
}
|
||||
} else {
|
||||
error "[${STEP_NAME}] buildTool not set and no dockerImage & dockerCommand provided."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
130
vars/containerPushToRegistry.groovy
Normal file
130
vars/containerPushToRegistry.groovy
Normal file
@ -0,0 +1,130 @@
|
||||
import com.sap.piper.GenerateDocumentation
|
||||
import com.sap.piper.Utils
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.DockerUtils
|
||||
import groovy.transform.Field
|
||||
|
||||
import static com.sap.piper.Prerequisites.checkScript
|
||||
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
@Field Set GENERAL_CONFIG_KEYS = [
|
||||
/**
|
||||
* Defines the id of the Jenkins username/password credentials containing the credentials for the target Docker registry.
|
||||
*/
|
||||
'dockerCredentialsId',
|
||||
/** Defines the registry url where the image should be pushed to, incl. the protocol like `https://my.registry.com`*/
|
||||
'dockerRegistryUrl',
|
||||
]
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([
|
||||
/** Not supported yet - Docker archive to be pushed to registry*/
|
||||
'dockerArchive',
|
||||
/** For images built locally on the Docker Deamon, reference to the image object resulting from `docker.build` execution */
|
||||
'dockerBuildImage',
|
||||
/** Defines the name (incl. tag) of the target image*/
|
||||
'dockerImage',
|
||||
/**
|
||||
* Only if no Docker daemon available on your Jenkins image: Docker image to be used for [Skopeo](https://github.com/containers/skopeo) calls
|
||||
* Unfortunately no proper image known to be available.
|
||||
* Simple custom Dockerfile could look as follows: <br>
|
||||
* ```
|
||||
* FROM fedora:29
|
||||
* RUN dnf install -y skopeo
|
||||
* ```
|
||||
*/
|
||||
'skopeoImage',
|
||||
/** Defines the name (incl. tag) of the source image to be pushed to a new image defined in `dockerImage`.<br>
|
||||
* This is helpful for moving images from one location to another.
|
||||
*/
|
||||
'sourceImage',
|
||||
/** Defines a registry url from where the image should optionally be pulled from, incl. the protocol like `https://my.registry.com`*/
|
||||
'sourceRegistryUrl',
|
||||
/** Defines if the image should be tagged as `latest`*/
|
||||
'tagLatest'
|
||||
])
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* This step allows you to push a Docker image into a dedicated Container registry.
|
||||
*
|
||||
* By default an image available via the local Docker daemon will be pushed.
|
||||
*
|
||||
* In case you want to pull an existing image from a remote container registry, a source image and source registry needs to be specified.<br />
|
||||
* This makes it possible to move an image from one registry to another.
|
||||
*/
|
||||
@GenerateDocumentation
|
||||
void call(Map parameters = [:]) {
|
||||
handlePipelineStepErrors (stepName: STEP_NAME, stepParameters: parameters) {
|
||||
final script = checkScript(this, parameters) ?: this
|
||||
|
||||
// load default & individual configuration
|
||||
Map config = ConfigurationHelper.newInstance(this)
|
||||
.loadStepDefaults()
|
||||
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
|
||||
.mixinStepConfig(script.commonPipelineEnvironment, STEP_CONFIG_KEYS)
|
||||
.mixinStageConfig(script.commonPipelineEnvironment, parameters.stageName?:env.STAGE_NAME, STEP_CONFIG_KEYS)
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.addIfEmpty('sourceImage', script.commonPipelineEnvironment.getValue('containerImage'))
|
||||
.addIfEmpty('sourceRegistryUrl', script.commonPipelineEnvironment.getValue('containerRegistryUrl'))
|
||||
.withMandatoryProperty('dockerCredentialsId')
|
||||
.withMandatoryProperty('dockerRegistryUrl')
|
||||
.use()
|
||||
|
||||
DockerUtils dockerUtils = new DockerUtils(script)
|
||||
|
||||
if (config.sourceRegistryUrl) {
|
||||
config.sourceRegistry = dockerUtils.getRegistryFromUrl(config.sourceRegistryUrl)
|
||||
}
|
||||
|
||||
// telemetry reporting
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME
|
||||
], config)
|
||||
|
||||
if (!config.dockerImage)
|
||||
config.dockerImage = config.sourceImage
|
||||
|
||||
if (dockerUtils.withDockerDaemon()) {
|
||||
|
||||
//Prevent NullPointerException in case no dockerImage nor dockerBuildImage is provided
|
||||
if (!config.dockerImage && !config.dockerBuildImage) {
|
||||
error "[${STEP_NAME}] Please provide a dockerImage (either in your config.yml or via step parameter)."
|
||||
}
|
||||
config.dockerBuildImage = config.dockerBuildImage?:docker.image(config.dockerImage)
|
||||
|
||||
if (config.sourceRegistry && config.sourceImage) {
|
||||
|
||||
def sourceBuildImage = docker.image(config.sourceImage)
|
||||
docker.withRegistry(config.sourceRegistryUrl) {
|
||||
sourceBuildImage.pull()
|
||||
}
|
||||
sh "docker tag ${config.sourceRegistry}/${config.sourceImage} ${config.dockerImage}"
|
||||
}
|
||||
|
||||
docker.withRegistry(
|
||||
config.dockerRegistryUrl,
|
||||
config.dockerCredentialsId
|
||||
) {
|
||||
config.dockerBuildImage.push()
|
||||
if (config.tagLatest)
|
||||
config.dockerBuildImage.push('latest')
|
||||
}
|
||||
} else {
|
||||
//handling for Kubernetes case
|
||||
dockerExecute(
|
||||
script: script,
|
||||
dockerImage: config.skopeoImage
|
||||
) {
|
||||
|
||||
if (!config.dockerArchive && !config.dockerBuildImage) {
|
||||
dockerUtils.moveImage([image: config.sourceImage, registryUrl: config.sourceRegistryUrl], [image: config.dockerImage, registryUrl: config.dockerRegistryUrl, credentialsId: config.dockerCredentialsId])
|
||||
if (config.tagLatest) {
|
||||
def latestImage = "${config.dockerImage.split(':')[0]}:latest"
|
||||
dockerUtils.moveImage([image: config.sourceImage, registryUrl: config.sourceRegistryUrl], [image: latestImage, registryUrl: config.dockerRegistryUrl, credentialsId: config.dockerCredentialsId])
|
||||
}
|
||||
} else {
|
||||
error "[${STEP_NAME}] Running on Kubernetes: Only moving images from one registry to another supported."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -106,6 +106,15 @@ import hudson.AbortException
|
||||
/**
|
||||
* Executes a closure inside a container in a kubernetes pod.
|
||||
* Proxy environment variables defined on the Jenkins machine are also available in the container.
|
||||
*
|
||||
* By default jnlp agent defined for kubernetes-plugin will be used (see https://github.com/jenkinsci/kubernetes-plugin#pipeline-support).
|
||||
*
|
||||
* It is possible to define a custom jnlp agent image by
|
||||
*
|
||||
* 1. Defining the jnlp image via environment variable JENKINS_JNLP_IMAGE in the Kubernetes landscape
|
||||
* 2. Defining the image via config (`jenkinsKubernetes.jnlpAgent`)
|
||||
*
|
||||
* Option 1 will take precedence over option 2.
|
||||
*/
|
||||
@GenerateDocumentation
|
||||
void call(Map parameters = [:], body) {
|
||||
@ -262,10 +271,17 @@ private void unstashWorkspace(config, prefix) {
|
||||
}
|
||||
|
||||
private List getContainerList(config) {
|
||||
def result = [[
|
||||
name: 'jnlp',
|
||||
image: config.jenkinsKubernetes.jnlpAgent
|
||||
]]
|
||||
|
||||
//If no custom jnlp agent provided as default jnlp agent (jenkins/jnlp-slave) as defined in the plugin, see https://github.com/jenkinsci/kubernetes-plugin#pipeline-support
|
||||
def result = []
|
||||
|
||||
//allow definition of jnlp image via environment variable JENKINS_JNLP_IMAGE in the Kubernetes landscape or via config as fallback
|
||||
if (env.JENKINS_JNLP_IMAGE || config.jenkinsKubernetes.jnlpAgent) {
|
||||
result.push([
|
||||
name: 'jnlp',
|
||||
image: env.JENKINS_JNLP_IMAGE ?: config.jenkinsKubernetes.jnlpAgent
|
||||
])
|
||||
}
|
||||
config.containerMap.each { imageName, containerName ->
|
||||
def containerPullImage = config.containerPullImageFlags?.get(imageName)
|
||||
def containerSpec = [
|
||||
|
@ -98,7 +98,7 @@ void call(Map parameters = [:]) {
|
||||
Map getLastRelease(config, TOKEN){
|
||||
def result = [:]
|
||||
|
||||
def response = httpRequest "${config.githubApiUrl}/repos/${config.githubOrg}/${config.githubRepo}/releases/latest?access_token=${TOKEN}"
|
||||
def response = httpRequest url: "${config.githubApiUrl}/repos/${config.githubOrg}/${config.githubRepo}/releases/latest?access_token=${TOKEN}", validResponseCodes: '100:500'
|
||||
if (response.status == 200) {
|
||||
result = readJSON text: response.content
|
||||
} else {
|
||||
|
@ -3,8 +3,7 @@ import com.cloudbees.groovy.cps.NonCPS
|
||||
import com.sap.piper.GenerateDocumentation
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.analytics.InfluxData
|
||||
|
||||
import groovy.text.SimpleTemplateEngine
|
||||
import groovy.text.GStringTemplateEngine
|
||||
import groovy.transform.Field
|
||||
import hudson.AbortException
|
||||
|
||||
@ -121,7 +120,7 @@ private String formatErrorMessage(Map config, error){
|
||||
stepName: config.stepName,
|
||||
stepParameters: (config.stepParameters?.verbose == true) ? config.stepParameters?.toString() : '*** to show step parameters, set verbose:true in general pipeline configuration\n*** WARNING: this may reveal sensitive information. ***'
|
||||
]
|
||||
return SimpleTemplateEngine
|
||||
return GStringTemplateEngine
|
||||
.newInstance()
|
||||
.createTemplate(libraryResource('com.sap.piper/templates/error.log'))
|
||||
.make(binding)
|
||||
|
@ -67,6 +67,7 @@ void call(Map parameters = [:]) {
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.use()
|
||||
|
||||
// telemetry reporting
|
||||
new Utils().pushToSWA([
|
||||
step: STEP_NAME
|
||||
], config)
|
||||
|
@ -1,6 +1,9 @@
|
||||
void call(parameters) {
|
||||
pipeline {
|
||||
agent none
|
||||
triggers {
|
||||
issueCommentTrigger('.*/piper ([a-z]*).*')
|
||||
}
|
||||
options {
|
||||
skipDefaultCheckout()
|
||||
timestamps()
|
||||
@ -9,7 +12,7 @@ void call(parameters) {
|
||||
stage('Init') {
|
||||
steps {
|
||||
library 'piper-lib-os'
|
||||
piperPipelineStageInit script: parameters.script, customDefaults: parameters.customDefaults
|
||||
piperPipelineStageInit script: parameters.script, customDefaults: ['com.sap.piper/pipeline/stageOrdinals.yml'].plus(parameters.customDefaults ?: [])
|
||||
}
|
||||
}
|
||||
stage('Pull-Request Voting') {
|
||||
|
@ -1,4 +1,5 @@
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.GenerateStageDocumentation
|
||||
import com.sap.piper.Utils
|
||||
import groovy.transform.Field
|
||||
|
||||
@ -7,9 +8,37 @@ import static com.sap.piper.Prerequisites.checkScript
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([])
|
||||
@Field STAGE_STEP_KEYS = [
|
||||
/** For Cloud Foundry use-cases: Performs deployment to Cloud Foundry space/org. */
|
||||
'cloudFoundryDeploy',
|
||||
/** Performs behavior-driven tests using Gauge test framework against the deployed application/service. */
|
||||
'gaugeExecuteTests',
|
||||
/**
|
||||
* Performs health check in order to prove one aspect of operational readiness.
|
||||
* In order to be able to respond to health checks from infrastructure components (like load balancers) it is important to provide one unprotected application endpoint which allows a judgement about the health of your application.
|
||||
*/
|
||||
'healthExecuteCheck',
|
||||
/** For Neo use-cases: Performs deployment to Neo landscape. */
|
||||
'neoDeploy',
|
||||
/** Performs API testing using Newman against the deployed application/service. */
|
||||
'newmanExecute',
|
||||
/** Publishes test results to Jenkins. It will automatically be active in cases tests are executed. */
|
||||
'testsPublishResults',
|
||||
/** Performs end-to-end UI testing using UIVeri5 test framework against the deployed application/service. */
|
||||
'uiVeri5ExecuteTests'
|
||||
]
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus(STAGE_STEP_KEYS)
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* In this stage the application/service is typically deployed and automated acceptance tests are executed.<br />
|
||||
* This is to make sure that
|
||||
*
|
||||
* * new functionality is tested end-to-end
|
||||
* * there is no end-to-end regression in existing functionality
|
||||
*
|
||||
*/
|
||||
@GenerateStageDocumentation(defaultStageName = "Acceptance")
|
||||
void call(Map parameters = [:]) {
|
||||
|
||||
def script = checkScript(this, parameters) ?: this
|
||||
@ -22,6 +51,12 @@ void call(Map parameters = [:]) {
|
||||
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
|
||||
.mixinStageConfig(script.commonPipelineEnvironment, stageName, STEP_CONFIG_KEYS)
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.addIfEmpty('cloudFoundryDeploy', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.cloudFoundryDeploy)
|
||||
.addIfEmpty('gaugeExecuteTests', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.gaugeExecuteTests)
|
||||
.addIfEmpty('healthExecuteCheck', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.healthExecuteCheck)
|
||||
.addIfEmpty('neoDeploy', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.neoDeploy)
|
||||
.addIfEmpty('newmanExecute', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.newmanExecute)
|
||||
.addIfEmpty('uiVeri5ExecuteTests', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.uiVeri5ExecuteTests)
|
||||
.use()
|
||||
|
||||
piperStageWrapper (script: script, stageName: stageName) {
|
||||
@ -29,8 +64,51 @@ void call(Map parameters = [:]) {
|
||||
// telemetry reporting
|
||||
utils.pushToSWA([step: STEP_NAME], config)
|
||||
|
||||
//ToDO: provide stage implementation
|
||||
echo "${STEP_NAME}: Stage implementation is not provided yet. You can extend the stage using the provided stage extension mechanism."
|
||||
|
||||
if (config.cloudFoundryDeploy) {
|
||||
durationMeasure(script: script, measurementName: 'deploy_test_duration') {
|
||||
cloudFoundryDeploy script: script
|
||||
}
|
||||
}
|
||||
|
||||
if (config.neoDeploy) {
|
||||
durationMeasure(script: script, measurementName: 'deploy_test_duration') {
|
||||
neoDeploy script: script
|
||||
}
|
||||
}
|
||||
|
||||
if (config.healthExecuteCheck) {
|
||||
healthExecuteCheck script: script
|
||||
}
|
||||
|
||||
|
||||
def publishMap = [script: script]
|
||||
def publishResults = false
|
||||
|
||||
if (config.gaugeExecuteTests) {
|
||||
durationMeasure(script: script, measurementName: 'gauge_duration') {
|
||||
publishResults = true
|
||||
gaugeExecuteTests script: script
|
||||
publishMap += [gauge: [archive: true]]
|
||||
}
|
||||
}
|
||||
|
||||
if (config.newmanExecute) {
|
||||
durationMeasure(script: script, measurementName: 'newman_duration') {
|
||||
publishResults = true
|
||||
newmanExecute script: script
|
||||
}
|
||||
}
|
||||
|
||||
if (config.uiVeri5ExecuteTests) {
|
||||
durationMeasure(script: script, measurementName: 'uiveri5_duration') {
|
||||
publishResults = true
|
||||
uiVeri5ExecuteTests script: script
|
||||
}
|
||||
}
|
||||
|
||||
if (publishResults) {
|
||||
testsPublishResults publishMap
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.GenerateStageDocumentation
|
||||
import com.sap.piper.Utils
|
||||
import groovy.transform.Field
|
||||
|
||||
@ -7,9 +8,22 @@ import static com.sap.piper.Prerequisites.checkScript
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([])
|
||||
@Field STAGE_STEP_KEYS = [
|
||||
/** Executes bats tests which are for example suitable for testing Docker images via a shell.*/
|
||||
'batsExecuteTests',
|
||||
/** Executes karma tests which are for example suitable for OPA5 testing as well as QUnit testing of SAP UI5 apps.*/
|
||||
'karmaExecuteTests',
|
||||
/** Publishes test results to Jenkins. It will automatically be active in cases tests are executed. */
|
||||
'testsPublishResults'
|
||||
]
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus(STAGE_STEP_KEYS)
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* In this stage unit tests, which can not or should not be executed in the central build environment, are executed.<br />
|
||||
* These are for example Karma(OPA5 & QUnit) tests.
|
||||
*/
|
||||
@GenerateStageDocumentation(defaultStageName = 'Additional Unit Tests')
|
||||
void call(Map parameters = [:]) {
|
||||
|
||||
def script = checkScript(this, parameters) ?: this
|
||||
@ -22,6 +36,8 @@ void call(Map parameters = [:]) {
|
||||
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
|
||||
.mixinStageConfig(script.commonPipelineEnvironment, stageName, STEP_CONFIG_KEYS)
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.addIfEmpty('batsExecuteTests', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.batsExecuteTests)
|
||||
.addIfEmpty('karmaExecuteTests', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.karmaExecuteTests)
|
||||
.use()
|
||||
|
||||
piperStageWrapper (script: script, stageName: stageName) {
|
||||
@ -29,8 +45,18 @@ void call(Map parameters = [:]) {
|
||||
// telemetry reporting
|
||||
utils.pushToSWA([step: STEP_NAME], config)
|
||||
|
||||
//ToDO: provide stage implementation
|
||||
echo "${STEP_NAME}: Stage implementation is not provided yet. You can extend the stage using the provided stage extension mechanism."
|
||||
if (config.batsExecuteTests) {
|
||||
durationMeasure(script: script, measurementName: 'bats_duration') {
|
||||
batsExecuteTests script: script
|
||||
testsPublishResults script: script
|
||||
}
|
||||
}
|
||||
|
||||
if (config.karmaExecuteTests) {
|
||||
durationMeasure(script: script, measurementName: 'karma_duration') {
|
||||
karmaExecuteTests script: script
|
||||
testsPublishResults script: script
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.GenerateStageDocumentation
|
||||
import com.sap.piper.Utils
|
||||
import groovy.transform.Field
|
||||
|
||||
@ -7,9 +8,31 @@ import static com.sap.piper.Prerequisites.checkScript
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([])
|
||||
@Field STAGE_STEP_KEYS = [
|
||||
/** Starts build execution. This is always being executed.*/
|
||||
'buildExecute',
|
||||
/**
|
||||
* Executes stashing of files after build execution.<br /
|
||||
* Build results are stashed with stash name `buildResult`.
|
||||
*
|
||||
* **Note: Please make sure that your build artifacts are contained here since this stash is the foundation for subsequent tests and checks, e.g. deployment to a test landscape.**
|
||||
**/
|
||||
'pipelineStashFilesAfterBuild',
|
||||
/** Publishes test results to Jenkins. It will always be active. */
|
||||
'testsPublishResults',
|
||||
/** Publishes check results to Jenkins. It will always be active. */
|
||||
'checksPublishResults'
|
||||
]
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus(STAGE_STEP_KEYS)
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* In this stage a build is executed which typically also executes tests and code checks.
|
||||
*
|
||||
* They type of build is defined using the configuration `buildTool`, see also step [buildExecute](../steps/buildExecute.md)
|
||||
*
|
||||
*/
|
||||
@GenerateStageDocumentation(defaultStageName = 'Build')
|
||||
void call(Map parameters = [:]) {
|
||||
|
||||
def script = checkScript(this, parameters) ?: this
|
||||
@ -29,8 +52,13 @@ void call(Map parameters = [:]) {
|
||||
// telemetry reporting
|
||||
utils.pushToSWA([step: STEP_NAME], config)
|
||||
|
||||
//ToDO: provide stage implementation
|
||||
echo "${STEP_NAME}: Stage implementation is not provided yet. You can extend the stage using the provided stage extension mechanism."
|
||||
durationMeasure(script: script, measurementName: 'build_duration') {
|
||||
|
||||
buildExecute script: script
|
||||
pipelineStashFilesAfterBuild script: script
|
||||
|
||||
testsPublishResults script: script, junit: [updateResults: true]
|
||||
checksPublishResults script: script
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.GenerateStageDocumentation
|
||||
import com.sap.piper.Utils
|
||||
import groovy.transform.Field
|
||||
|
||||
@ -7,9 +8,16 @@ import static com.sap.piper.Prerequisites.checkScript
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([])
|
||||
@Field STAGE_STEP_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus(STAGE_STEP_KEYS)
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* In this stage important compliance-relevant checks will be conducted.<br />
|
||||
*
|
||||
* Currently, there is no default implementation of the stage. This you can expect soon ...
|
||||
*/
|
||||
@GenerateStageDocumentation(defaultStageName = 'Compliance')
|
||||
void call(Map parameters = [:]) {
|
||||
|
||||
def script = checkScript(this, parameters) ?: this
|
||||
|
@ -18,7 +18,8 @@ import static com.sap.piper.Prerequisites.checkScript
|
||||
'manualConfirmationTimeout'
|
||||
|
||||
]
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS
|
||||
@Field STAGE_STEP_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus(STAGE_STEP_KEYS)
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
|
@ -1,4 +1,6 @@
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.GenerateStageDocumentation
|
||||
import com.sap.piper.JenkinsUtils
|
||||
import com.sap.piper.Utils
|
||||
import groovy.transform.Field
|
||||
|
||||
@ -7,14 +9,35 @@ import static com.sap.piper.Prerequisites.checkScript
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = [
|
||||
/**
|
||||
* Defines the build tool used.
|
||||
* @possibleValues `docker`, `kaniko`, `maven`, `mta, ``npm`
|
||||
*/
|
||||
'buildTool',
|
||||
/**
|
||||
* Defines the main branch for your pipeline. **Typically this is the `master` branch, which does not need to be set explicitly.** Only change this in exceptional cases
|
||||
*/
|
||||
'productiveBranch',
|
||||
/**
|
||||
* Defines the library resource containing the stash settings to be performed before and after each stage. **Caution: changing the default will break the standard behavior of the pipeline - thus only relevant when including `Init` stage into custom pipelines!**
|
||||
*/
|
||||
'stashSettings',
|
||||
/**
|
||||
* Whether verbose output should be produced.
|
||||
* @possibleValues `true`, `false`
|
||||
*/
|
||||
'verbose'
|
||||
]
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS
|
||||
@Field STAGE_STEP_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus(STAGE_STEP_KEYS)
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* This stage initializes the pipeline run and prepares further execution.
|
||||
*
|
||||
* It will check out your repository and perform some steps to initialize your pipeline run.
|
||||
*/
|
||||
@GenerateStageDocumentation(defaultStageName = 'Init')
|
||||
void call(Map parameters = [:]) {
|
||||
|
||||
def script = checkScript(this, parameters) ?: this
|
||||
@ -54,6 +77,26 @@ void call(Map parameters = [:]) {
|
||||
|
||||
piperInitRunStageConfiguration script: script, stageConfigResource: config.stageConfigResource
|
||||
|
||||
// CHANGE_ID is set only for pull requests
|
||||
if (env.CHANGE_ID) {
|
||||
List prActions = []
|
||||
|
||||
//get trigger action from comment like /piper action
|
||||
def jenkinsUtils = new JenkinsUtils()
|
||||
def commentTriggerAction = jenkinsUtils.getIssueCommentTriggerAction()
|
||||
|
||||
if (commentTriggerAction != null) prActions.add(commentTriggerAction)
|
||||
|
||||
try {
|
||||
prActions.addAll(pullRequest.getLabels().asList())
|
||||
} catch (ex) {
|
||||
echo "[${STEP_NAME}] GitHub labels could not be retrieved from Pull Request, please make sure that credentials are maintained on multi-branch job."
|
||||
}
|
||||
|
||||
|
||||
setPullRequestStageStepActivation(script, config, prActions)
|
||||
}
|
||||
|
||||
if (env.BRANCH_NAME == config.productiveBranch) {
|
||||
if (parameters.script.commonPipelineEnvironment.configuration.runStep?.get('Init')?.slackSendNotification) {
|
||||
slackSendNotification script: script, message: "STARTED: Job <${env.BUILD_URL}|${URLDecoder.decode(env.JOB_NAME, java.nio.charset.StandardCharsets.UTF_8.name())} ${env.BUILD_DISPLAY_NAME}>", color: 'WARNING'
|
||||
@ -84,7 +127,7 @@ private void checkBuildTool(config) {
|
||||
|
||||
private void initStashConfiguration (script, config) {
|
||||
Map stashConfiguration = readYaml(text: libraryResource(config.stashSettings))
|
||||
echo "Stash config: stashConfiguration"
|
||||
if (config.verbose) echo "Stash config: ${stashConfiguration}"
|
||||
script.commonPipelineEnvironment.configuration.stageStashes = stashConfiguration
|
||||
}
|
||||
|
||||
@ -92,11 +135,13 @@ private void setScmInfoOnCommonPipelineEnvironment(script, scmInfo) {
|
||||
|
||||
def gitUrl = scmInfo.GIT_URL
|
||||
|
||||
def gitPath = ''
|
||||
if (gitUrl.startsWith('http')) {
|
||||
def httpPattern = /(https?):\/\/([^:\/]+)(?:[:\d\/]*)(.*)/
|
||||
def gitMatcher = gitUrl =~ httpPattern
|
||||
if (!gitMatcher.hasGroup() && gitMatcher.groupCount() != 3) return
|
||||
script.commonPipelineEnvironment.setGitSshUrl("git@${gitMatcher[0][2]}:${gitMatcher[0][3]}")
|
||||
gitPath = gitMatcher[0][3]
|
||||
script.commonPipelineEnvironment.setGitHttpsUrl(gitUrl)
|
||||
} else if (gitUrl.startsWith('ssh')) {
|
||||
//(.*)@([^:\/]*)(?:[:\d\/]*)(.*)
|
||||
@ -105,9 +150,49 @@ private void setScmInfoOnCommonPipelineEnvironment(script, scmInfo) {
|
||||
if (!gitMatcher.hasGroup() && gitMatcher.groupCount() != 3) return
|
||||
script.commonPipelineEnvironment.setGitSshUrl(gitUrl)
|
||||
script.commonPipelineEnvironment.setGitHttpsUrl("https://${gitMatcher[0][2]}/${gitMatcher[0][3]}")
|
||||
gitPath = gitMatcher[0][3]
|
||||
}
|
||||
else if (gitUrl.indexOf('@') > 0) {
|
||||
script.commonPipelineEnvironment.setGitSshUrl(gitUrl)
|
||||
gitPath = gitUrl.split(':')[1]
|
||||
script.commonPipelineEnvironment.setGitHttpsUrl("https://${(gitUrl.split('@')[1]).replace(':', '/')}")
|
||||
}
|
||||
|
||||
List gitPathParts = gitPath.split('/')
|
||||
def gitFolder = 'N/A'
|
||||
def gitRepo = 'N/A'
|
||||
switch (gitPathParts.size()) {
|
||||
case 1:
|
||||
gitRepo = gitPathParts[0].replaceAll('.git', '')
|
||||
break
|
||||
case 2:
|
||||
gitFolder = gitPathParts[0]
|
||||
gitRepo = gitPathParts[1].replaceAll('.git', '')
|
||||
break
|
||||
case { it > 3 }:
|
||||
gitRepo = gitPathParts[gitPathParts.size()-1].replaceAll('.git', '')
|
||||
gitPathParts.remove(gitPathParts.size()-1)
|
||||
gitFolder = gitPathParts.join('/')
|
||||
break
|
||||
}
|
||||
script.commonPipelineEnvironment.setGithubOrg(gitFolder)
|
||||
script.commonPipelineEnvironment.setGithubRepo(gitRepo)
|
||||
}
|
||||
|
||||
private void setPullRequestStageStepActivation(script, config, List actions) {
|
||||
|
||||
if (script.commonPipelineEnvironment.configuration.runStep == null)
|
||||
script.commonPipelineEnvironment.configuration.runStep = [:]
|
||||
if (script.commonPipelineEnvironment.configuration.runStep[config.pullRequestStageName] == null)
|
||||
script.commonPipelineEnvironment.configuration.runStep[config.pullRequestStageName] = [:]
|
||||
|
||||
actions.each {action ->
|
||||
if (action.startsWith(config.labelPrefix))
|
||||
action = action.minus(config.labelPrefix)
|
||||
|
||||
def stepName = config.stepMappings[action]
|
||||
if (stepName) {
|
||||
script.commonPipelineEnvironment.configuration.runStep."${config.pullRequestStageName}"."${stepName}" = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.GenerateStageDocumentation
|
||||
import com.sap.piper.Utils
|
||||
import groovy.transform.Field
|
||||
|
||||
@ -7,9 +8,15 @@ import static com.sap.piper.Prerequisites.checkScript
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([])
|
||||
@Field STAGE_STEP_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus(STAGE_STEP_KEYS)
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* The stage allows to execute project-specific integration tests.<br />
|
||||
* Typically, integration tests are very project-specific, thus they can be defined here using the [stage extension mechanism](../extensibility.md).
|
||||
*/
|
||||
@GenerateStageDocumentation(defaultStageName = 'Integration')
|
||||
void call(Map parameters = [:]) {
|
||||
|
||||
def script = checkScript(this, parameters) ?: this
|
||||
@ -29,8 +36,7 @@ void call(Map parameters = [:]) {
|
||||
// telemetry reporting
|
||||
utils.pushToSWA([step: STEP_NAME], config)
|
||||
|
||||
//ToDO: provide stage implementation
|
||||
echo "${STEP_NAME}: Stage implementation is not provided yet. You can extend the stage using the provided stage extension mechanism."
|
||||
echo "${STEP_NAME}: No default stage implementation is provided for this stage. You can extend the stage using the provided stage extension mechanism."
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.GenerateStageDocumentation
|
||||
import com.sap.piper.Utils
|
||||
import groovy.transform.Field
|
||||
|
||||
@ -6,10 +7,65 @@ import static com.sap.piper.Prerequisites.checkScript
|
||||
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([])
|
||||
@Field Set GENERAL_CONFIG_KEYS = [
|
||||
/**
|
||||
* Defines the build tool used.
|
||||
* @possibleValues `docker`, `kaniko`, `maven`, `mta`, `npm`
|
||||
*/
|
||||
'buildTool'
|
||||
]
|
||||
@Field STAGE_STEP_KEYS = [
|
||||
/** Triggers the build execution. */
|
||||
'buildExecute',
|
||||
/** Publishes check results to Jenkins. It will always be active. */
|
||||
'checksPublishResults',
|
||||
/**
|
||||
* Executes karma tests. For example suitable for OPA5 testing as well as QUnit testing of SAP UI5 apps.<br />
|
||||
* This step is not active by default. It can be activated by:
|
||||
*
|
||||
* * using pull request comments or pull request lables (see [Advanced Pull-Request Voting](#advanced-pull-request-voting).
|
||||
* * explicit activation via stage configuration.
|
||||
*/
|
||||
'karmaExecuteTests',
|
||||
/** Publishes test results to Jenkins. It will always be active. */
|
||||
'testsPublishResults',
|
||||
/** Executes a WhiteSource scan
|
||||
* This step is not active by default. It can be activated by:
|
||||
*
|
||||
* * using pull request comments or pull request lables (see [Advanced Pull-Request Voting](#advanced-pull-request-voting).
|
||||
* * explicit activation via stage configuration.
|
||||
*/
|
||||
'whitesourceExecuteScan'
|
||||
]
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus(STAGE_STEP_KEYS)
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* This stage is executed for every pull-request.<br />
|
||||
* For non-Docker builds it will execute the respective build (including unit tests, static checks, ...).
|
||||
*
|
||||
* !!! note "Build Tool not in the list?"
|
||||
*
|
||||
* For build tools which are currently not in the list a custom `dockerImage` can be used with a custom `dockerCommand` as per step [buildExecute](../steps/buildExecute.md)
|
||||
*
|
||||
* For `buildTool: docker` a local Docker build will be executed in case a Docker deamon is available, if not `buildTool: 'kaniko'` will be used instead.
|
||||
*
|
||||
* ## Advanced Pull-Request Voting
|
||||
*
|
||||
* It is possible to trigger dedicated tests/checks
|
||||
*
|
||||
* * pull request comments
|
||||
* * pull request labels
|
||||
*
|
||||
* Following steps are currently supported
|
||||
*
|
||||
* | step name | comment | pull-request label |
|
||||
* | --------- | ------- | ------------------ |
|
||||
* | karmaExecuteTests | `/piper karma` | `pr_karma`
|
||||
* | whitesourceExecuteScan | `/piper whitesource` | `pr_whitesource`
|
||||
*
|
||||
*/
|
||||
@GenerateStageDocumentation(defaultStageName = 'Pull-Request Voting')
|
||||
void call(Map parameters = [:]) {
|
||||
|
||||
def script = checkScript(this, parameters) ?: this
|
||||
@ -22,6 +78,8 @@ void call(Map parameters = [:]) {
|
||||
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
|
||||
.mixinStageConfig(script.commonPipelineEnvironment, stageName, STEP_CONFIG_KEYS)
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.addIfEmpty('karmaExecuteTests', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.karmaExecuteTests)
|
||||
.addIfEmpty('whitesourceExecuteScan', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.whitesourceExecuteScan)
|
||||
.use()
|
||||
|
||||
piperStageWrapper (script: script, stageName: stageName) {
|
||||
@ -29,8 +87,28 @@ void call(Map parameters = [:]) {
|
||||
// telemetry reporting
|
||||
utils.pushToSWA([step: STEP_NAME], config)
|
||||
|
||||
//ToDO: provide stage implementation
|
||||
echo "${STEP_NAME}: Stage implementation is not provided yet. You can extend the stage using the provided stage extension mechanism."
|
||||
durationMeasure(script: script, measurementName: 'voter_duration') {
|
||||
|
||||
//prevent push to registry in case of docker/kaniko
|
||||
def dockerRegistryUrl = null
|
||||
if (config.buildTool in ['docker', 'kaniko']) {
|
||||
dockerRegistryUrl = ''
|
||||
}
|
||||
|
||||
buildExecute script: script, buildTool: config.buildTool, dockerRegistryUrl: dockerRegistryUrl
|
||||
|
||||
//needs to run right after build, otherwise we may face "ERROR: Test reports were found but none of them are new"
|
||||
testsPublishResults script: script
|
||||
checksPublishResults script: script
|
||||
|
||||
if (config.karmaExecuteTests) {
|
||||
karmaExecuteTests script: script
|
||||
testsPublishResults script: script
|
||||
}
|
||||
|
||||
if (config.whitesourceExecuteScan) {
|
||||
whitesourceExecuteScan script: script, productVersion: env.BRANCH_NAME
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.GenerateStageDocumentation
|
||||
import com.sap.piper.Utils
|
||||
import groovy.transform.Field
|
||||
|
||||
@ -7,9 +8,16 @@ import static com.sap.piper.Prerequisites.checkScript
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([])
|
||||
@Field STAGE_STEP_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus(STAGE_STEP_KEYS)
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* In this stage important performance-relevant checks will be conducted.<br />
|
||||
*
|
||||
* Currently, there is no default implementation of the stage. This you can expect soon ...
|
||||
*/
|
||||
@GenerateStageDocumentation(defaultStageName = 'Performance')
|
||||
void call(Map parameters = [:]) {
|
||||
|
||||
def script = checkScript(this, parameters) ?: this
|
||||
|
@ -8,7 +8,8 @@ import static com.sap.piper.Prerequisites.checkScript
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS
|
||||
@Field STAGE_STEP_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus(STAGE_STEP_KEYS)
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
|
@ -1,4 +1,5 @@
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.GenerateStageDocumentation
|
||||
import com.sap.piper.Utils
|
||||
import groovy.transform.Field
|
||||
|
||||
@ -7,9 +8,18 @@ import static com.sap.piper.Prerequisites.checkScript
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([])
|
||||
@Field STAGE_STEP_KEYS = [
|
||||
/** For Docker builds: pushes the Docker image to a container registry. */
|
||||
'containerPushToRegistry',
|
||||
]
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus(STAGE_STEP_KEYS)
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* This stage is responsible to promote build artifacts to an artifact repository / container registry where they can be used from in production deployments.<br />
|
||||
*
|
||||
*/
|
||||
@GenerateStageDocumentation(defaultStageName = 'Promote')
|
||||
void call(Map parameters = [:]) {
|
||||
|
||||
def script = checkScript(this, parameters) ?: this
|
||||
@ -22,6 +32,7 @@ void call(Map parameters = [:]) {
|
||||
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
|
||||
.mixinStageConfig(script.commonPipelineEnvironment, stageName, STEP_CONFIG_KEYS)
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.addIfEmpty('containerPushToRegistry', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.containerPushToRegistry)
|
||||
.use()
|
||||
|
||||
piperStageWrapper (script: script, stageName: stageName) {
|
||||
@ -29,8 +40,10 @@ void call(Map parameters = [:]) {
|
||||
// telemetry reporting
|
||||
utils.pushToSWA([step: STEP_NAME], config)
|
||||
|
||||
//ToDO: provide stage implementation
|
||||
echo "${STEP_NAME}: Stage implementation is not provided yet. You can extend the stage using the provided stage extension mechanism."
|
||||
|
||||
durationMeasure(script: script, measurementName: 'promote_duration') {
|
||||
if(config.containerPushToRegistry) {
|
||||
containerPushToRegistry script: script
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.GenerateStageDocumentation
|
||||
import com.sap.piper.Utils
|
||||
import groovy.transform.Field
|
||||
|
||||
@ -7,9 +8,23 @@ import static com.sap.piper.Prerequisites.checkScript
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([])
|
||||
@Field STAGE_STEP_KEYS = [
|
||||
/** For Cloud Foundry use-cases: Performs deployment to Cloud Foundry space/org. */
|
||||
'cloudFoundryDeploy',
|
||||
/** Performs health check in order to prove that deployment was successful. */
|
||||
'healthExecuteCheck',
|
||||
/** For Neo use-cases: Performs deployment to Neo landscape. */
|
||||
'neoDeploy',
|
||||
/** Publishes release information to GitHub. */
|
||||
'githubPublishRelease',
|
||||
]
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus(STAGE_STEP_KEYS)
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* This stage is responsible to release/deploy artifacts into your productive landscape.<br />
|
||||
*/
|
||||
@GenerateStageDocumentation(defaultStageName = 'Release')
|
||||
void call(Map parameters = [:]) {
|
||||
|
||||
def script = checkScript(this, parameters) ?: this
|
||||
@ -22,6 +37,10 @@ void call(Map parameters = [:]) {
|
||||
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
|
||||
.mixinStageConfig(script.commonPipelineEnvironment, stageName, STEP_CONFIG_KEYS)
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.addIfEmpty('cloudFoundryDeploy', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.cloudFoundryDeploy)
|
||||
.addIfEmpty('githubPublishRelease', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.githubPublishRelease)
|
||||
.addIfEmpty('healthExecuteCheck', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.healthExecuteCheck)
|
||||
.addIfEmpty('neoDeploy', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.neoDeploy)
|
||||
.use()
|
||||
|
||||
piperStageWrapper (script: script, stageName: stageName) {
|
||||
@ -29,8 +48,25 @@ void call(Map parameters = [:]) {
|
||||
// telemetry reporting
|
||||
utils.pushToSWA([step: STEP_NAME], config)
|
||||
|
||||
//ToDO: provide stage implementation
|
||||
echo "${STEP_NAME}: Stage implementation is not provided yet. You can extend the stage using the provided stage extension mechanism."
|
||||
if (config.cloudFoundryDeploy) {
|
||||
durationMeasure(script: script, measurementName: 'deploy_release_cf_duration') {
|
||||
cloudFoundryDeploy script: script
|
||||
}
|
||||
}
|
||||
|
||||
if (config.neoDeploy) {
|
||||
durationMeasure(script: script, measurementName: 'deploy_release_neo_duration') {
|
||||
neoDeploy script: script
|
||||
}
|
||||
}
|
||||
|
||||
if (config.healthExecuteCheck) {
|
||||
healthExecuteCheck script: script
|
||||
}
|
||||
|
||||
if (config.githubPublishRelease) {
|
||||
githubPublishRelease script: script
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
import com.sap.piper.ConfigurationHelper
|
||||
import com.sap.piper.GenerateStageDocumentation
|
||||
import com.sap.piper.Utils
|
||||
import groovy.transform.Field
|
||||
|
||||
@ -7,9 +8,18 @@ import static com.sap.piper.Prerequisites.checkScript
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
|
||||
@Field Set GENERAL_CONFIG_KEYS = []
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([])
|
||||
@Field STAGE_STEP_KEYS = [
|
||||
/** Executes a WhiteSource scan */
|
||||
'whitesourceExecuteScan'
|
||||
]
|
||||
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus(STAGE_STEP_KEYS)
|
||||
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
|
||||
|
||||
/**
|
||||
* In this stage important security-relevant checks will be conducted.<br />
|
||||
* This is to achieve a decent level of security for your application.
|
||||
*/
|
||||
@GenerateStageDocumentation(defaultStageName = 'Security')
|
||||
void call(Map parameters = [:]) {
|
||||
|
||||
def script = checkScript(this, parameters) ?: this
|
||||
@ -22,6 +32,7 @@ void call(Map parameters = [:]) {
|
||||
.mixinGeneralConfig(script.commonPipelineEnvironment, GENERAL_CONFIG_KEYS)
|
||||
.mixinStageConfig(script.commonPipelineEnvironment, stageName, STEP_CONFIG_KEYS)
|
||||
.mixin(parameters, PARAMETER_KEYS)
|
||||
.addIfEmpty('whitesourceExecuteScan', script.commonPipelineEnvironment.configuration.runStep?.get(stageName)?.whitesourceExecuteScan)
|
||||
.use()
|
||||
|
||||
piperStageWrapper (script: script, stageName: stageName) {
|
||||
@ -29,8 +40,10 @@ void call(Map parameters = [:]) {
|
||||
// telemetry reporting
|
||||
utils.pushToSWA([step: STEP_NAME], config)
|
||||
|
||||
//ToDO: provide stage implementation
|
||||
echo "${STEP_NAME}: Stage implementation is not provided yet. You can extend the stage using the provided stage extension mechanism."
|
||||
|
||||
if (config.whitesourceExecuteScan) {
|
||||
durationMeasure(script: script, measurementName: 'whitesource_duration') {
|
||||
whitesourceExecuteScan script: script
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -435,7 +435,7 @@ private resolveProjectIdentifiers(script, descriptorUtils, config) {
|
||||
case 'golang':
|
||||
gav = descriptorUtils.getGoGAV(config.buildDescriptorFile, new URI(script.commonPipelineEnvironment.getGitHttpsUrl()))
|
||||
break
|
||||
case 'dlang':
|
||||
case 'dub':
|
||||
break
|
||||
case 'maven':
|
||||
gav = descriptorUtils.getMavenGAV(config.buildDescriptorFile)
|
||||
|
Loading…
Reference in New Issue
Block a user