2019-02-05 16:37:59 +02:00
|
|
|
import com.sap.piper.ConfigurationLoader
|
|
|
|
import com.sap.piper.ConfigurationMerger
|
2019-12-16 11:40:44 +02:00
|
|
|
import com.sap.piper.DefaultValueCache
|
2019-04-11 11:39:41 +02:00
|
|
|
import com.sap.piper.analytics.InfluxData
|
2020-03-19 15:46:56 +02:00
|
|
|
import groovy.json.JsonOutput
|
2019-02-05 16:37:59 +02:00
|
|
|
|
2017-07-11 15:12:03 +02:00
|
|
|
class commonPipelineEnvironment implements Serializable {
|
|
|
|
|
2020-07-17 14:27:21 +02:00
|
|
|
//Project identifier which might be used to distinguish resources which are available globally, e.g. for locking
|
|
|
|
def projectName
|
|
|
|
|
2019-11-26 11:43:59 +02:00
|
|
|
//stores version of the artifact which is build during pipeline run
|
|
|
|
def artifactVersion
|
2020-05-28 10:05:22 +02:00
|
|
|
def originalArtifactVersion
|
2018-01-24 10:55:38 +02:00
|
|
|
|
2020-06-09 14:52:03 +02:00
|
|
|
//stores the build tools if it inferred automatically, e.g. in the SAP Cloud SDK pipeline
|
|
|
|
String buildTool
|
|
|
|
|
2019-11-26 11:43:59 +02:00
|
|
|
//Stores the current buildResult
|
|
|
|
String buildResult = 'SUCCESS'
|
2019-09-26 14:18:18 +02:00
|
|
|
|
2019-11-26 11:43:59 +02:00
|
|
|
//stores the gitCommitId as well as additional git information for the build during pipeline run
|
|
|
|
String gitCommitId
|
|
|
|
String gitCommitMessage
|
|
|
|
String gitSshUrl
|
|
|
|
String gitHttpsUrl
|
|
|
|
String gitBranch
|
2019-09-26 14:18:18 +02:00
|
|
|
|
2019-11-26 11:43:59 +02:00
|
|
|
String xsDeploymentId
|
2019-09-26 14:18:18 +02:00
|
|
|
|
2020-09-04 14:45:09 +02:00
|
|
|
//GitHub specific information
|
2019-11-26 11:43:59 +02:00
|
|
|
String githubOrg
|
|
|
|
String githubRepo
|
2019-09-26 14:18:18 +02:00
|
|
|
|
2019-11-26 11:43:59 +02:00
|
|
|
//stores properties for a pipeline which build an artifact and then bundles it into a container
|
|
|
|
private Map appContainerProperties = [:]
|
2019-09-26 14:18:18 +02:00
|
|
|
|
2019-11-26 11:43:59 +02:00
|
|
|
Map configuration = [:]
|
2020-01-29 17:03:18 +02:00
|
|
|
Map containerProperties = [:]
|
2019-11-26 11:43:59 +02:00
|
|
|
Map defaultConfiguration = [:]
|
2020-06-24 10:56:36 +02:00
|
|
|
|
2020-03-30 14:31:24 +02:00
|
|
|
// Location of the file from where the configuration was parsed. See setupCommonPipelineEnvironment.groovy
|
|
|
|
// Useful for making sure that the piper binary uses the same file when called from Jenkins.
|
|
|
|
String configurationFile = ''
|
2020-01-29 17:03:18 +02:00
|
|
|
|
2020-06-24 10:56:36 +02:00
|
|
|
String mtarFilePath = ""
|
|
|
|
|
2020-09-17 09:36:53 +02:00
|
|
|
String abapAddonDescriptor
|
2020-07-15 10:09:42 +02:00
|
|
|
|
2019-11-26 11:43:59 +02:00
|
|
|
private Map valueMap = [:]
|
2019-09-26 14:18:18 +02:00
|
|
|
|
2019-09-27 09:49:05 +02:00
|
|
|
void setValue(String property, value) {
|
|
|
|
valueMap[property] = value
|
|
|
|
}
|
2019-09-26 14:18:18 +02:00
|
|
|
|
2019-09-27 09:49:05 +02:00
|
|
|
def getValue(String property) {
|
|
|
|
return valueMap.get(property)
|
|
|
|
}
|
2019-09-26 14:18:18 +02:00
|
|
|
|
2019-11-26 11:43:59 +02:00
|
|
|
String changeDocumentId
|
2019-09-26 14:18:18 +02:00
|
|
|
|
2019-09-27 09:49:05 +02:00
|
|
|
def reset() {
|
2020-07-15 10:09:42 +02:00
|
|
|
|
2020-07-17 14:27:21 +02:00
|
|
|
projectName = null
|
|
|
|
|
2020-09-17 09:36:53 +02:00
|
|
|
abapAddonDescriptor = null
|
2020-07-15 10:09:42 +02:00
|
|
|
|
2019-11-26 11:43:59 +02:00
|
|
|
appContainerProperties = [:]
|
|
|
|
artifactVersion = null
|
2020-05-28 10:05:22 +02:00
|
|
|
originalArtifactVersion = null
|
2019-09-26 14:18:18 +02:00
|
|
|
|
2020-06-09 14:52:03 +02:00
|
|
|
buildTool = null
|
|
|
|
|
2019-11-26 11:43:59 +02:00
|
|
|
configuration = [:]
|
2020-01-29 17:03:18 +02:00
|
|
|
containerProperties = [:]
|
2019-11-26 11:43:59 +02:00
|
|
|
|
|
|
|
gitCommitId = null
|
|
|
|
gitCommitMessage = null
|
|
|
|
gitSshUrl = null
|
|
|
|
gitHttpsUrl = null
|
|
|
|
gitBranch = null
|
|
|
|
|
|
|
|
githubOrg = null
|
|
|
|
githubRepo = null
|
|
|
|
|
|
|
|
mtarFilePath = null
|
|
|
|
valueMap = [:]
|
|
|
|
|
|
|
|
changeDocumentId = null
|
|
|
|
|
|
|
|
InfluxData.reset()
|
2019-09-26 14:18:18 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
def setAppContainerProperty(property, value) {
|
2019-11-26 11:43:59 +02:00
|
|
|
appContainerProperties[property] = value
|
2018-02-07 14:17:33 +02:00
|
|
|
}
|
|
|
|
|
2019-09-26 14:18:18 +02:00
|
|
|
def getAppContainerProperty(property) {
|
2019-11-26 11:43:59 +02:00
|
|
|
return appContainerProperties[property]
|
2019-09-26 14:18:18 +02:00
|
|
|
}
|
|
|
|
|
2020-01-29 17:03:18 +02:00
|
|
|
def setContainerProperty(property, value) {
|
|
|
|
containerProperties[property] = value
|
|
|
|
}
|
|
|
|
|
|
|
|
def getContainerProperty(property) {
|
|
|
|
return containerProperties[property]
|
|
|
|
}
|
|
|
|
|
2019-09-26 14:18:18 +02:00
|
|
|
// goes into measurement jenkins_custom_data
|
|
|
|
def setInfluxCustomDataEntry(key, value) {
|
2019-11-26 11:43:59 +02:00
|
|
|
InfluxData.addField('jenkins_custom_data', key, value)
|
2019-09-26 14:18:18 +02:00
|
|
|
}
|
|
|
|
// goes into measurement jenkins_custom_data
|
|
|
|
@Deprecated // not used in library
|
|
|
|
def getInfluxCustomData() {
|
2019-11-26 11:43:59 +02:00
|
|
|
return InfluxData.getInstance().getFields().jenkins_custom_data
|
2019-09-26 14:18:18 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// goes into measurement jenkins_custom_data
|
|
|
|
def setInfluxCustomDataTagsEntry(key, value) {
|
2019-11-26 11:43:59 +02:00
|
|
|
InfluxData.addTag('jenkins_custom_data', key, value)
|
2019-09-26 14:18:18 +02:00
|
|
|
}
|
|
|
|
// goes into measurement jenkins_custom_data
|
|
|
|
@Deprecated // not used in library
|
|
|
|
def getInfluxCustomDataTags() {
|
2019-11-26 11:43:59 +02:00
|
|
|
return InfluxData.getInstance().getTags().jenkins_custom_data
|
2019-09-26 14:18:18 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void setInfluxCustomDataMapEntry(measurement, field, value) {
|
2019-11-26 11:43:59 +02:00
|
|
|
InfluxData.addField(measurement, field, value)
|
2019-09-26 14:18:18 +02:00
|
|
|
}
|
|
|
|
@Deprecated // not used in library
|
|
|
|
def getInfluxCustomDataMap() {
|
2019-11-26 11:43:59 +02:00
|
|
|
return InfluxData.getInstance().getFields()
|
2019-09-26 14:18:18 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
def setInfluxCustomDataMapTagsEntry(measurement, tag, value) {
|
2019-11-26 11:43:59 +02:00
|
|
|
InfluxData.addTag(measurement, tag, value)
|
2019-09-26 14:18:18 +02:00
|
|
|
}
|
|
|
|
@Deprecated // not used in library
|
|
|
|
def getInfluxCustomDataMapTags() {
|
2019-11-26 11:43:59 +02:00
|
|
|
return InfluxData.getInstance().getTags()
|
2019-09-26 14:18:18 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@Deprecated // not used in library
|
|
|
|
def setInfluxStepData(key, value) {
|
2019-11-26 11:43:59 +02:00
|
|
|
InfluxData.addField('step_data', key, value)
|
2019-09-26 14:18:18 +02:00
|
|
|
}
|
|
|
|
@Deprecated // not used in library
|
|
|
|
def getInfluxStepData(key) {
|
2019-11-26 11:43:59 +02:00
|
|
|
return InfluxData.getInstance().getFields()['step_data'][key]
|
2019-09-26 14:18:18 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@Deprecated // not used in library
|
|
|
|
def setInfluxPipelineData(key, value) {
|
2019-11-26 11:43:59 +02:00
|
|
|
InfluxData.addField('pipeline_data', key, value)
|
2019-09-26 14:18:18 +02:00
|
|
|
}
|
|
|
|
@Deprecated // not used in library
|
|
|
|
def setPipelineMeasurement(key, value){
|
2019-11-26 11:43:59 +02:00
|
|
|
setInfluxPipelineData(key, value)
|
2019-09-26 14:18:18 +02:00
|
|
|
}
|
|
|
|
@Deprecated // not used in library
|
|
|
|
def getPipelineMeasurement(key) {
|
2019-11-26 11:43:59 +02:00
|
|
|
return InfluxData.getInstance().getFields()['pipeline_data'][key]
|
2018-01-24 10:55:38 +02:00
|
|
|
}
|
2019-02-05 16:37:59 +02:00
|
|
|
|
2019-09-26 14:18:18 +02:00
|
|
|
Map getStepConfiguration(stepName, stageName = env.STAGE_NAME, includeDefaults = true) {
|
2019-11-26 11:43:59 +02:00
|
|
|
Map defaults = [:]
|
|
|
|
if (includeDefaults) {
|
|
|
|
defaults = ConfigurationLoader.defaultGeneralConfiguration()
|
|
|
|
defaults = ConfigurationMerger.merge(ConfigurationLoader.defaultStepConfiguration(null, stepName), null, defaults)
|
|
|
|
defaults = ConfigurationMerger.merge(ConfigurationLoader.defaultStageConfiguration(null, stageName), null, defaults)
|
|
|
|
}
|
2020-09-04 14:45:09 +02:00
|
|
|
Map config = ConfigurationMerger.merge(configuration.get('general') ?: [:] as Map, null, defaults)
|
2019-11-26 11:43:59 +02:00
|
|
|
config = ConfigurationMerger.merge(configuration.get('steps')?.get(stepName) ?: [:], null, config)
|
|
|
|
config = ConfigurationMerger.merge(configuration.get('stages')?.get(stageName) ?: [:], null, config)
|
|
|
|
return config
|
2019-02-05 16:37:59 +02:00
|
|
|
}
|
2020-01-15 13:16:25 +02:00
|
|
|
|
2020-01-28 14:22:37 +02:00
|
|
|
def files = [
|
|
|
|
[filename: '.pipeline/commonPipelineEnvironment/artifactVersion', property: 'artifactVersion'],
|
2020-08-24 18:10:45 +02:00
|
|
|
[filename: '.pipeline/commonPipelineEnvironment/buildTool', property: 'buildTool'],
|
2020-05-28 10:05:22 +02:00
|
|
|
[filename: '.pipeline/commonPipelineEnvironment/originalArtifactVersion', property: 'originalArtifactVersion'],
|
2020-01-28 14:22:37 +02:00
|
|
|
[filename: '.pipeline/commonPipelineEnvironment/github/owner', property: 'githubOrg'],
|
|
|
|
[filename: '.pipeline/commonPipelineEnvironment/github/repository', property: 'githubRepo'],
|
|
|
|
[filename: '.pipeline/commonPipelineEnvironment/git/branch', property: 'gitBranch'],
|
|
|
|
[filename: '.pipeline/commonPipelineEnvironment/git/commitId', property: 'gitCommitId'],
|
|
|
|
[filename: '.pipeline/commonPipelineEnvironment/git/commitMessage', property: 'gitCommitMessage'],
|
2020-03-20 19:20:52 +02:00
|
|
|
[filename: '.pipeline/commonPipelineEnvironment/mtarFilePath', property: 'mtarFilePath'],
|
2020-09-17 09:36:53 +02:00
|
|
|
[filename: '.pipeline/commonPipelineEnvironment/abap/addonDescriptor', property: 'abapAddonDescriptor'],
|
2020-01-28 14:22:37 +02:00
|
|
|
]
|
2020-01-15 13:16:25 +02:00
|
|
|
|
2020-01-28 14:22:37 +02:00
|
|
|
void writeToDisk(script) {
|
2020-01-15 13:16:25 +02:00
|
|
|
files.each({f ->
|
2021-01-11 13:58:12 +02:00
|
|
|
writeValueToFile(script, f.filename, this[f.property])
|
2020-01-15 13:16:25 +02:00
|
|
|
})
|
|
|
|
|
2020-01-29 17:03:18 +02:00
|
|
|
containerProperties.each({key, value ->
|
2021-01-11 13:58:12 +02:00
|
|
|
writeValueToFile(script, ".pipeline/commonPipelineEnvironment/container/${key}", value)
|
2020-01-29 17:03:18 +02:00
|
|
|
})
|
|
|
|
|
2020-01-15 13:16:25 +02:00
|
|
|
valueMap.each({key, value ->
|
2021-01-11 13:58:12 +02:00
|
|
|
writeValueToFile(script, ".pipeline/commonPipelineEnvironment/custom/${key}", value)
|
2020-01-15 13:16:25 +02:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-01-11 13:58:12 +02:00
|
|
|
void writeValueToFile(script, String filename, value){
|
|
|
|
if (value){
|
2021-01-12 12:36:25 +02:00
|
|
|
if (!(value in CharSequence)) filename += '.json'
|
2021-01-11 13:58:12 +02:00
|
|
|
if (script.fileExists(filename)) return
|
2021-01-12 12:36:25 +02:00
|
|
|
if (!(value in CharSequence)) value = groovy.json.JsonOutput.toJson(value)
|
2021-01-11 13:58:12 +02:00
|
|
|
script.writeFile file: filename, text: value
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-28 14:22:37 +02:00
|
|
|
void readFromDisk(script) {
|
|
|
|
files.each({f ->
|
|
|
|
if (script.fileExists(f.filename)) {
|
|
|
|
this[f.property] = script.readFile(f.filename)
|
|
|
|
}
|
|
|
|
})
|
2020-01-15 13:16:25 +02:00
|
|
|
|
2020-01-28 14:22:37 +02:00
|
|
|
def customValues = script.findFiles(glob: '.pipeline/commonPipelineEnvironment/custom/*')
|
2020-01-15 13:16:25 +02:00
|
|
|
customValues.each({f ->
|
2020-10-05 15:33:28 +02:00
|
|
|
def fileContent = script.readFile(f.getPath())
|
2020-01-15 13:16:25 +02:00
|
|
|
def fileName = f.getName()
|
|
|
|
def param = fileName.split('/')[fileName.split('\\/').size()-1]
|
2020-10-05 15:33:28 +02:00
|
|
|
if (param.endsWith(".json")){
|
|
|
|
param = param.replace(".json","")
|
2021-02-19 12:34:30 +02:00
|
|
|
valueMap[param] = getJSONValue(script, fileContent)
|
2020-10-05 15:33:28 +02:00
|
|
|
}else{
|
|
|
|
valueMap[param] = fileContent
|
|
|
|
}
|
2020-01-15 13:16:25 +02:00
|
|
|
})
|
2020-10-27 14:45:34 +02:00
|
|
|
|
|
|
|
def containerValues = script.findFiles(glob: '.pipeline/commonPipelineEnvironment/container/*')
|
|
|
|
containerValues.each({f ->
|
|
|
|
def fileContent = script.readFile(f.getPath())
|
|
|
|
def fileName = f.getName()
|
|
|
|
def param = fileName.split('/')[fileName.split('\\/').size()-1]
|
|
|
|
if (param.endsWith(".json")){
|
|
|
|
param = param.replace(".json","")
|
2021-02-19 12:34:30 +02:00
|
|
|
containerProperties[param] = getJSONValue(script, fileContent)
|
2020-10-27 14:45:34 +02:00
|
|
|
}else{
|
|
|
|
containerProperties[param] = fileContent
|
|
|
|
}
|
|
|
|
})
|
2020-01-15 13:16:25 +02:00
|
|
|
}
|
|
|
|
|
2019-12-16 11:40:44 +02:00
|
|
|
List getCustomDefaults() {
|
|
|
|
DefaultValueCache.getInstance().getCustomDefaults()
|
|
|
|
}
|
2021-02-19 12:34:30 +02:00
|
|
|
|
|
|
|
def getJSONValue(Script script, String text) {
|
|
|
|
try {
|
|
|
|
return script.readJSON(text: text)
|
|
|
|
} catch (net.sf.json.JSONException ex) {
|
|
|
|
// JSON reader cannot handle simple objects like bool, numbers, ...
|
|
|
|
// as such readJSON cannot read what writeJSON created in such cases
|
|
|
|
if (text in ['true', 'false']) {
|
|
|
|
return text.toBoolean()
|
|
|
|
}
|
|
|
|
if (text ==~ /[\d]+/) {
|
|
|
|
return text.toInteger()
|
|
|
|
}
|
|
|
|
if (text.contains('.')) {
|
|
|
|
return text.toFloat()
|
|
|
|
}
|
|
|
|
// no handling of strings since we expect strings in a non-json file
|
|
|
|
// see handling of *.json above
|
|
|
|
|
|
|
|
throw err
|
|
|
|
}
|
|
|
|
}
|
2017-07-11 15:12:03 +02:00
|
|
|
}
|