1
0
mirror of https://github.com/SAP/jenkins-library.git synced 2025-02-07 13:42:23 +02:00

influxWriteData - support Influx tags (#420)

* influxWriteData - support Influx tags

In order to better query data in Influx, tags needs to be written.
This change allows filling tag data via the Influx plugin.
This commit is contained in:
Oliver Nocon 2019-01-17 15:42:03 +01:00 committed by GitHub
parent 58a1d9eb15
commit 98139bb498
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 151 additions and 35 deletions

View File

@ -16,7 +16,7 @@ You basically need three components:
It will create following files for you and archive them into your build:
* `jenkins_data.json`: This file gives you build-specific information, like e.g. build result, stage where the build failed
* `pipeline_data.json`: This file gives you detailed information about your pipeline, e.g. stage durations, steps executed, ...
* `influx_data.json`: This file gives you detailed information about your pipeline, e.g. stage durations, steps executed, ...
## Prerequisites
@ -83,17 +83,33 @@ influxDBServer=jenkins
| parameter | mandatory | default | possible values |
| ----------|-----------|---------|-----------------|
| script | yes | | |
| artifactVersion | yes | commonPipelineEnvironment.getArtifactVersion() | |
| influxServer | no | `jenkins` | |
| influxPrefix | no | `null` | |
|script|yes|||
|artifactVersion|no|`commonPipelineEnvironment.getArtifactVersion()`||
|customData|no|`commonPipelineEnvironment.getInfluxCustomData()`||
|customDataMap|no|`commonPipelineEnvironment.getInfluxCustomDataMap()`||
|customDataMapTags|no|`commonPipelineEnvironment.getInfluxCustomDataTags()`||
|customDataTags|no|`commonPipelineEnvironment.getInfluxCustomDataTags()`||
|influxPrefix|no|||
|influxServer|no|||
|wrapInNode|no|`false`||
## Step configuration
The following parameters can also be specified as step parameters using the global configuration file:
We recommend to define values of step parameters via [config.yml file](../configuration.md).
- `influxServer`
- `influxPrefix`
In following sections the configuration is possible:
| parameter | general | step | stage |
| ----------|-----------|---------|-----------------|
|script||||
|artifactVersion||X|X|
|customData||X|X|
|customDataMap||X|X|
|customDataMapTags||X|X|
|customDataTags||X|X|
|influxPrefix||X|X|
|influxServer||X|X|
|wrapInNode||X|X|
## Example

View File

@ -100,6 +100,13 @@ class ConfigurationHelper implements Serializable {
return this
}
ConfigurationHelper addIfNull(key, value){
if (config[key] == null){
config[key] = value
}
return this
}
@NonCPS // required because we have a closure in the
// method body that cannot be CPS transformed
Map use(){

View File

@ -10,7 +10,12 @@ import util.JenkinsStepRule
import util.JenkinsReadYamlRule
import util.Rules
import static org.hamcrest.Matchers.allOf
import static org.hamcrest.Matchers.containsString
import static org.hamcrest.Matchers.hasKey
import static org.hamcrest.Matchers.hasValue
import static org.hamcrest.Matchers.is
import static org.hamcrest.Matchers.isEmptyOrNullString
import static org.junit.Assert.assertThat
import static org.junit.Assert.assertTrue
import static org.junit.Assert.assertEquals
@ -59,15 +64,18 @@ class InfluxWriteDataTest extends BasePiperTest {
nullScript.commonPipelineEnvironment.setArtifactVersion('1.2.3')
jsr.step.influxWriteData(script: nullScript)
assertTrue(loggingRule.log.contains('Artifact version: 1.2.3'))
assertThat(loggingRule.log, containsString('Artifact version: 1.2.3'))
assertEquals('testInflux', stepMap.selectedTarget)
assertEquals(null, stepMap.customPrefix)
assertEquals([:], stepMap.customData)
assertEquals([pipeline_data: [:], step_data: [:]], stepMap.customDataMap)
assertThat(stepMap.selectedTarget, is('testInflux'))
assertThat(stepMap.customPrefix, isEmptyOrNullString())
assertTrue(fileMap.containsKey('jenkins_data.json'))
assertTrue(fileMap.containsKey('pipeline_data.json'))
assertThat(stepMap.customData, isEmptyOrNullString())
assertThat(stepMap.customDataMap, is([pipeline_data: [:], step_data: [:]]))
assertThat(fileMap, hasKey('jenkins_data.json'))
assertThat(fileMap, hasKey('influx_data.json'))
assertThat(fileMap, hasKey('jenkins_data_tags.json'))
assertThat(fileMap, hasKey('influx_data_tags.json'))
assertJobStatusSuccess()
}
@ -81,7 +89,7 @@ class InfluxWriteDataTest extends BasePiperTest {
assertEquals(0, stepMap.size())
assertTrue(fileMap.containsKey('jenkins_data.json'))
assertTrue(fileMap.containsKey('pipeline_data.json'))
assertTrue(fileMap.containsKey('influx_data.json'))
assertJobStatusSuccess()
}
@ -116,4 +124,41 @@ class InfluxWriteDataTest extends BasePiperTest {
assertThat(nodeCalled, is(true))
}
@Test
void testInfluxCustomData() {
nullScript.commonPipelineEnvironment.setArtifactVersion('1.2.3')
jsr.step.influxWriteData(
//juStabUtils: utils,
script: nullScript,
influxServer: 'myInstance',
customData: [key1: 'test1'],
customDataTags: [tag1: 'testTag1'],
customDataMap: [test_data: [key1: 'keyValue1']],
customDataMapTags: [test_data: [tag1: 'tagValue1']]
)
assertThat(stepMap.customData, allOf(hasKey('key1'), hasValue('test1')))
assertThat(stepMap.customDataTags, allOf(hasKey('tag1'), hasValue('testTag1')))
assertThat(stepMap.customDataMap, hasKey('test_data'))
assertThat(stepMap.customDataMapTags, hasKey('test_data'))
}
@Test
void testInfluxCustomDataFromCPE() {
nullScript.commonPipelineEnvironment.reset()
nullScript.commonPipelineEnvironment.setArtifactVersion('1.2.3')
nullScript.commonPipelineEnvironment.setInfluxCustomDataTagsEntry('tag1', 'testTag1')
nullScript.commonPipelineEnvironment.setInfluxCustomDataMapEntry('test_data', 'key1', 'keyValue1')
nullScript.commonPipelineEnvironment.setInfluxCustomDataMapTagsEntry('test_data', 'tag1', 'tagValue1')
jsr.step.influxWriteData(
//juStabUtils: utils,
script: nullScript,
influxServer: 'myInstance'
)
assertThat(stepMap.customData, isEmptyOrNullString())
assertThat(stepMap.customDataTags, allOf(hasKey('tag1'), hasValue('testTag1')))
assertThat(stepMap.customDataMap, hasKey('test_data'))
assertThat(stepMap.customDataMapTags, hasKey('test_data'))
}
}

View File

@ -25,8 +25,12 @@ class commonPipelineEnvironment implements Serializable {
//each Map in influxCustomDataMap represents a measurement in Influx. Additional measurements can be added as a new Map entry of influxCustomDataMap
private Map influxCustomDataMap = [pipeline_data: [:], step_data: [:]]
//each Map in influxCustomDataMapTags represents tags for certain measurement in Influx. Tags are required in Influx for easier querying data
private Map influxCustomDataMapTags = [pipeline_data: [:]]
//influxCustomData represents measurement jenkins_custom_data in Influx. Metrics can be written into this map
private Map influxCustomData = [:]
//influxCustomDataTags represents tags in Influx. Tags are required in Influx for easier querying data
private Map influxCustomDataTags = [:]
String mtarFilePath
@ -49,7 +53,9 @@ class commonPipelineEnvironment implements Serializable {
githubRepo = null
influxCustomData = [:]
influxCustomDataTags = [:]
influxCustomDataMap = [pipeline_data: [:], step_data: [:]]
influxCustomDataMapTags = [pipeline_data: [:]]
mtarFilePath = null
@ -76,26 +82,56 @@ class commonPipelineEnvironment implements Serializable {
return configProperties[property]
}
// goes into measurement jenkins_data
def setInfluxCustomDataEntry(field, value) {
influxCustomData[field] = value
}
// goes into measurement jenkins_data
def getInfluxCustomData() {
return influxCustomData
}
// goes into measurement jenkins_data
def setInfluxCustomDataTagsEntry(tag, value) {
influxCustomDataTags[tag] = value
}
// goes into measurement jenkins_data
def getInfluxCustomDataTags() {
return influxCustomDataTags
}
void setInfluxCustomDataMapEntry(measurement, field, value) {
if (!influxCustomDataMap[measurement]) {
influxCustomDataMap[measurement] = [:]
}
influxCustomDataMap[measurement][field] = value
}
def getInfluxCustomDataMap() {
return influxCustomDataMap
}
def setInfluxStepData (dataKey, value) {
influxCustomDataMap.step_data[dataKey] = value
def setInfluxCustomDataMapTagsEntry(measurement, tag, value) {
if (!influxCustomDataMapTags[measurement]) {
influxCustomDataMapTags[measurement] = [:]
}
influxCustomDataMapTags[measurement][tag] = value
}
def getInfluxStepData (dataKey) {
return influxCustomDataMap.step_data[dataKey]
def getInfluxCustomDataMapTags() {
return influxCustomDataMapTags
}
def setPipelineMeasurement (measurementName, value) {
influxCustomDataMap.pipeline_data[measurementName] = value
def setInfluxStepData(key, value) {
setInfluxCustomDataMapEntry('step_data', key, value)
}
def getInfluxStepData(key) {
return influxCustomDataMap.step_data[key]
}
def getPipelineMeasurement (measurementName) {
return influxCustomDataMap.pipeline_data[measurementName]
def setPipelineMeasurement(key, value) {
setInfluxCustomDataMapEntry('pipeline_data', key, value)
}
def getPipelineMeasurement(key) {
return influxCustomDataMap.pipeline_data[key]
}
}

View File

@ -11,14 +11,17 @@ import groovy.transform.Field
@Field def STEP_NAME = getClass().getName()
@Field Set GENERAL_CONFIG_KEYS = []
@Field Set STEP_CONFIG_KEYS = [
@Field Set STEP_CONFIG_KEYS = GENERAL_CONFIG_KEYS.plus([
'artifactVersion',
'customData',
'customDataTags',
'customDataMap',
'customDataMapTags',
'influxServer',
'influxPrefix',
'wrapInNode'
]
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS.plus([
'artifactVersion'
])
@Field Set PARAMETER_KEYS = STEP_CONFIG_KEYS
void call(Map parameters = [:]) {
handlePipelineStepErrors (stepName: STEP_NAME, stepParameters: parameters, allowBuildFailure: true) {
@ -37,6 +40,10 @@ void call(Map parameters = [:]) {
artifactVersion: script.commonPipelineEnvironment.getArtifactVersion()
])
.mixin(parameters, PARAMETER_KEYS)
.addIfNull('customData', script.commonPipelineEnvironment.getInfluxCustomData())
.addIfNull('customDataTags', script.commonPipelineEnvironment.getInfluxCustomDataTags())
.addIfNull('customDataMap', script.commonPipelineEnvironment.getInfluxCustomDataMap())
.addIfNull('customDataMapTags', script.commonPipelineEnvironment.getInfluxCustomDataMapTags())
.use()
new Utils().pushToSWA([step: STEP_NAME,
@ -52,8 +59,10 @@ void call(Map parameters = [:]) {
Artifact version: ${config.artifactVersion}
Influx server: ${config.influxServer}
Influx prefix: ${config.influxPrefix}
InfluxDB data: ${script.commonPipelineEnvironment.getInfluxCustomData()}
InfluxDB data map: ${script.commonPipelineEnvironment.getInfluxCustomDataMap()}
InfluxDB data: ${config.customData}
InfluxDB data tags: ${config.customDataTags}
InfluxDB data map: ${config.customDataMap}
InfluxDB data map tags: ${config.customDataMapTags}
[${STEP_NAME}]----------------------------------------------------------"""
if(config.wrapInNode){
@ -76,15 +85,18 @@ private void writeToInflux(config, script){
$class: 'InfluxDbPublisher',
selectedTarget: config.influxServer,
customPrefix: config.influxPrefix,
customData: script.commonPipelineEnvironment.getInfluxCustomData(),
customDataMap: script.commonPipelineEnvironment.getInfluxCustomDataMap()
customData: config.customData.size()>0 ? config.customData : null,
customDataTags: config.customDataTags.size()>0 ? config.customDataTags : null,
customDataMap: config.customDataMap.size()>0 ? config.customDataMap : null,
customDataMapTags: config.customDataMapTags.size()>0 ? config.customDataMapTags : null
])
}
//write results into json file for archiving - also benefitial when no InfluxDB is available yet
def jsonUtils = new JsonUtils()
writeFile file: 'jenkins_data.json', text: jsonUtils.getPrettyJsonString(script.commonPipelineEnvironment.getInfluxCustomData())
writeFile file: 'pipeline_data.json', text: jsonUtils.getPrettyJsonString(script.commonPipelineEnvironment.getInfluxCustomDataMap())
writeFile file: 'jenkins_data.json', text: jsonUtils.getPrettyJsonString(config.customData)
writeFile file: 'influx_data.json', text: jsonUtils.getPrettyJsonString(config.customDataMap)
writeFile file: 'jenkins_data_tags.json', text: jsonUtils.getPrettyJsonString(config.customDataTags)
writeFile file: 'influx_data_tags.json', text: jsonUtils.getPrettyJsonString(config.customDataMapTags)
archiveArtifacts artifacts: '*data.json', allowEmptyArchive: true
}