mirror of
https://github.com/SAP/jenkins-library.git
synced 2024-11-28 08:49:44 +02:00
fix(influx): correct data type of influx measurements (#2171)
* update data type of influx measurements * Update checkmarx.yaml * pick changes from #1885 for testing * update generated code * update to new datatype * adjust to type changes * change back to string type * Update fortifyExecuteScan.go * add typo to be backward compatible * change type to int for files_scanned and lines_of_code_scanned * add typo * add measurements to whitesource * update generated sources * adjust test cases Co-authored-by: Oliver Nocon <33484802+OliverNocon@users.noreply.github.com>
This commit is contained in:
parent
67ef1f21c7
commit
f999925788
@ -289,34 +289,34 @@ func pollScanStatus(sys checkmarx.System, scan checkmarx.Scan) error {
|
||||
}
|
||||
|
||||
func reportToInflux(results map[string]interface{}, influx *checkmarxExecuteScanInflux) {
|
||||
influx.checkmarx_data.fields.high_issues = strconv.Itoa(results["High"].(map[string]int)["Issues"])
|
||||
influx.checkmarx_data.fields.high_not_false_positive = strconv.Itoa(results["High"].(map[string]int)["NotFalsePositive"])
|
||||
influx.checkmarx_data.fields.high_not_exploitable = strconv.Itoa(results["High"].(map[string]int)["NotExploitable"])
|
||||
influx.checkmarx_data.fields.high_confirmed = strconv.Itoa(results["High"].(map[string]int)["Confirmed"])
|
||||
influx.checkmarx_data.fields.high_urgent = strconv.Itoa(results["High"].(map[string]int)["Urgent"])
|
||||
influx.checkmarx_data.fields.high_proposed_not_exploitable = strconv.Itoa(results["High"].(map[string]int)["ProposedNotExploitable"])
|
||||
influx.checkmarx_data.fields.high_to_verify = strconv.Itoa(results["High"].(map[string]int)["ToVerify"])
|
||||
influx.checkmarx_data.fields.medium_issues = strconv.Itoa(results["Medium"].(map[string]int)["Issues"])
|
||||
influx.checkmarx_data.fields.medium_not_false_positive = strconv.Itoa(results["Medium"].(map[string]int)["NotFalsePositive"])
|
||||
influx.checkmarx_data.fields.medium_not_exploitable = strconv.Itoa(results["Medium"].(map[string]int)["NotExploitable"])
|
||||
influx.checkmarx_data.fields.medium_confirmed = strconv.Itoa(results["Medium"].(map[string]int)["Confirmed"])
|
||||
influx.checkmarx_data.fields.medium_urgent = strconv.Itoa(results["Medium"].(map[string]int)["Urgent"])
|
||||
influx.checkmarx_data.fields.medium_proposed_not_exploitable = strconv.Itoa(results["Medium"].(map[string]int)["ProposedNotExploitable"])
|
||||
influx.checkmarx_data.fields.medium_to_verify = strconv.Itoa(results["Medium"].(map[string]int)["ToVerify"])
|
||||
influx.checkmarx_data.fields.low_issues = strconv.Itoa(results["Low"].(map[string]int)["Issues"])
|
||||
influx.checkmarx_data.fields.low_not_false_positive = strconv.Itoa(results["Low"].(map[string]int)["NotFalsePositive"])
|
||||
influx.checkmarx_data.fields.low_not_exploitable = strconv.Itoa(results["Low"].(map[string]int)["NotExploitable"])
|
||||
influx.checkmarx_data.fields.low_confirmed = strconv.Itoa(results["Low"].(map[string]int)["Confirmed"])
|
||||
influx.checkmarx_data.fields.low_urgent = strconv.Itoa(results["Low"].(map[string]int)["Urgent"])
|
||||
influx.checkmarx_data.fields.low_proposed_not_exploitable = strconv.Itoa(results["Low"].(map[string]int)["ProposedNotExploitable"])
|
||||
influx.checkmarx_data.fields.low_to_verify = strconv.Itoa(results["Low"].(map[string]int)["ToVerify"])
|
||||
influx.checkmarx_data.fields.information_issues = strconv.Itoa(results["Information"].(map[string]int)["Issues"])
|
||||
influx.checkmarx_data.fields.information_not_false_positive = strconv.Itoa(results["Information"].(map[string]int)["NotFalsePositive"])
|
||||
influx.checkmarx_data.fields.information_not_exploitable = strconv.Itoa(results["Information"].(map[string]int)["NotExploitable"])
|
||||
influx.checkmarx_data.fields.information_confirmed = strconv.Itoa(results["Information"].(map[string]int)["Confirmed"])
|
||||
influx.checkmarx_data.fields.information_urgent = strconv.Itoa(results["Information"].(map[string]int)["Urgent"])
|
||||
influx.checkmarx_data.fields.information_proposed_not_exploitable = strconv.Itoa(results["Information"].(map[string]int)["ProposedNotExploitable"])
|
||||
influx.checkmarx_data.fields.information_to_verify = strconv.Itoa(results["Information"].(map[string]int)["ToVerify"])
|
||||
influx.checkmarx_data.fields.high_issues = results["High"].(map[string]int)["Issues"]
|
||||
influx.checkmarx_data.fields.high_not_false_postive = results["High"].(map[string]int)["NotFalsePositive"]
|
||||
influx.checkmarx_data.fields.high_not_exploitable = results["High"].(map[string]int)["NotExploitable"]
|
||||
influx.checkmarx_data.fields.high_confirmed = results["High"].(map[string]int)["Confirmed"]
|
||||
influx.checkmarx_data.fields.high_urgent = results["High"].(map[string]int)["Urgent"]
|
||||
influx.checkmarx_data.fields.high_proposed_not_exploitable = results["High"].(map[string]int)["ProposedNotExploitable"]
|
||||
influx.checkmarx_data.fields.high_to_verify = results["High"].(map[string]int)["ToVerify"]
|
||||
influx.checkmarx_data.fields.medium_issues = results["Medium"].(map[string]int)["Issues"]
|
||||
influx.checkmarx_data.fields.medium_not_false_postive = results["Medium"].(map[string]int)["NotFalsePositive"]
|
||||
influx.checkmarx_data.fields.medium_not_exploitable = results["Medium"].(map[string]int)["NotExploitable"]
|
||||
influx.checkmarx_data.fields.medium_confirmed = results["Medium"].(map[string]int)["Confirmed"]
|
||||
influx.checkmarx_data.fields.medium_urgent = results["Medium"].(map[string]int)["Urgent"]
|
||||
influx.checkmarx_data.fields.medium_proposed_not_exploitable = results["Medium"].(map[string]int)["ProposedNotExploitable"]
|
||||
influx.checkmarx_data.fields.medium_to_verify = results["Medium"].(map[string]int)["ToVerify"]
|
||||
influx.checkmarx_data.fields.low_issues = results["Low"].(map[string]int)["Issues"]
|
||||
influx.checkmarx_data.fields.low_not_false_postive = results["Low"].(map[string]int)["NotFalsePositive"]
|
||||
influx.checkmarx_data.fields.low_not_exploitable = results["Low"].(map[string]int)["NotExploitable"]
|
||||
influx.checkmarx_data.fields.low_confirmed = results["Low"].(map[string]int)["Confirmed"]
|
||||
influx.checkmarx_data.fields.low_urgent = results["Low"].(map[string]int)["Urgent"]
|
||||
influx.checkmarx_data.fields.low_proposed_not_exploitable = results["Low"].(map[string]int)["ProposedNotExploitable"]
|
||||
influx.checkmarx_data.fields.low_to_verify = results["Low"].(map[string]int)["ToVerify"]
|
||||
influx.checkmarx_data.fields.information_issues = results["Information"].(map[string]int)["Issues"]
|
||||
influx.checkmarx_data.fields.information_not_false_postive = results["Information"].(map[string]int)["NotFalsePositive"]
|
||||
influx.checkmarx_data.fields.information_not_exploitable = results["Information"].(map[string]int)["NotExploitable"]
|
||||
influx.checkmarx_data.fields.information_confirmed = results["Information"].(map[string]int)["Confirmed"]
|
||||
influx.checkmarx_data.fields.information_urgent = results["Information"].(map[string]int)["Urgent"]
|
||||
influx.checkmarx_data.fields.information_proposed_not_exploitable = results["Information"].(map[string]int)["ProposedNotExploitable"]
|
||||
influx.checkmarx_data.fields.information_to_verify = results["Information"].(map[string]int)["ToVerify"]
|
||||
influx.checkmarx_data.fields.initiator_name = results["InitiatorName"].(string)
|
||||
influx.checkmarx_data.fields.owner = results["Owner"].(string)
|
||||
influx.checkmarx_data.fields.scan_id = results["ScanId"].(string)
|
||||
@ -326,8 +326,8 @@ func reportToInflux(results map[string]interface{}, influx *checkmarxExecuteScan
|
||||
influx.checkmarx_data.fields.team_full_path_on_report_date = results["TeamFullPathOnReportDate"].(string)
|
||||
influx.checkmarx_data.fields.scan_start = results["ScanStart"].(string)
|
||||
influx.checkmarx_data.fields.scan_time = results["ScanTime"].(string)
|
||||
influx.checkmarx_data.fields.lines_of_code_scanned = results["LinesOfCodeScanned"].(string)
|
||||
influx.checkmarx_data.fields.files_scanned = results["FilesScanned"].(string)
|
||||
influx.checkmarx_data.fields.lines_of_code_scanned = results["LinesOfCodeScanned"].(int)
|
||||
influx.checkmarx_data.fields.files_scanned = results["FilesScanned"].(int)
|
||||
influx.checkmarx_data.fields.checkmarx_version = results["CheckmarxVersion"].(string)
|
||||
influx.checkmarx_data.fields.scan_type = results["ScanType"].(string)
|
||||
influx.checkmarx_data.fields.preset = results["Preset"].(string)
|
||||
|
@ -44,34 +44,36 @@ type checkmarxExecuteScanOptions struct {
|
||||
type checkmarxExecuteScanInflux struct {
|
||||
checkmarx_data struct {
|
||||
fields struct {
|
||||
high_issues string
|
||||
high_not_false_positive string
|
||||
high_not_exploitable string
|
||||
high_confirmed string
|
||||
high_urgent string
|
||||
high_proposed_not_exploitable string
|
||||
high_to_verify string
|
||||
medium_issues string
|
||||
medium_not_false_positive string
|
||||
medium_not_exploitable string
|
||||
medium_confirmed string
|
||||
medium_urgent string
|
||||
medium_proposed_not_exploitable string
|
||||
medium_to_verify string
|
||||
low_issues string
|
||||
low_not_false_positive string
|
||||
low_not_exploitable string
|
||||
low_confirmed string
|
||||
low_urgent string
|
||||
low_proposed_not_exploitable string
|
||||
low_to_verify string
|
||||
information_issues string
|
||||
information_not_false_positive string
|
||||
information_not_exploitable string
|
||||
information_confirmed string
|
||||
information_urgent string
|
||||
information_proposed_not_exploitable string
|
||||
information_to_verify string
|
||||
high_issues int
|
||||
high_not_false_postive int
|
||||
high_not_exploitable int
|
||||
high_confirmed int
|
||||
high_urgent int
|
||||
high_proposed_not_exploitable int
|
||||
high_to_verify int
|
||||
medium_issues int
|
||||
medium_not_false_postive int
|
||||
medium_not_exploitable int
|
||||
medium_confirmed int
|
||||
medium_urgent int
|
||||
medium_proposed_not_exploitable int
|
||||
medium_to_verify int
|
||||
low_issues int
|
||||
low_not_false_postive int
|
||||
low_not_exploitable int
|
||||
low_confirmed int
|
||||
low_urgent int
|
||||
low_proposed_not_exploitable int
|
||||
low_to_verify int
|
||||
information_issues int
|
||||
information_not_false_postive int
|
||||
information_not_exploitable int
|
||||
information_confirmed int
|
||||
information_urgent int
|
||||
information_proposed_not_exploitable int
|
||||
information_to_verify int
|
||||
lines_of_code_scanned int
|
||||
files_scanned int
|
||||
initiator_name string
|
||||
owner string
|
||||
scan_id string
|
||||
@ -81,8 +83,6 @@ type checkmarxExecuteScanInflux struct {
|
||||
team_full_path_on_report_date string
|
||||
scan_start string
|
||||
scan_time string
|
||||
lines_of_code_scanned string
|
||||
files_scanned string
|
||||
checkmarx_version string
|
||||
scan_type string
|
||||
preset string
|
||||
@ -102,33 +102,35 @@ func (i *checkmarxExecuteScanInflux) persist(path, resourceName string) {
|
||||
value interface{}
|
||||
}{
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "high_issues", value: i.checkmarx_data.fields.high_issues},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "high_not_false_positive", value: i.checkmarx_data.fields.high_not_false_positive},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "high_not_false_postive", value: i.checkmarx_data.fields.high_not_false_postive},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "high_not_exploitable", value: i.checkmarx_data.fields.high_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "high_confirmed", value: i.checkmarx_data.fields.high_confirmed},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "high_urgent", value: i.checkmarx_data.fields.high_urgent},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "high_proposed_not_exploitable", value: i.checkmarx_data.fields.high_proposed_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "high_to_verify", value: i.checkmarx_data.fields.high_to_verify},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "medium_issues", value: i.checkmarx_data.fields.medium_issues},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "medium_not_false_positive", value: i.checkmarx_data.fields.medium_not_false_positive},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "medium_not_false_postive", value: i.checkmarx_data.fields.medium_not_false_postive},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "medium_not_exploitable", value: i.checkmarx_data.fields.medium_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "medium_confirmed", value: i.checkmarx_data.fields.medium_confirmed},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "medium_urgent", value: i.checkmarx_data.fields.medium_urgent},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "medium_proposed_not_exploitable", value: i.checkmarx_data.fields.medium_proposed_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "medium_to_verify", value: i.checkmarx_data.fields.medium_to_verify},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "low_issues", value: i.checkmarx_data.fields.low_issues},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "low_not_false_positive", value: i.checkmarx_data.fields.low_not_false_positive},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "low_not_false_postive", value: i.checkmarx_data.fields.low_not_false_postive},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "low_not_exploitable", value: i.checkmarx_data.fields.low_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "low_confirmed", value: i.checkmarx_data.fields.low_confirmed},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "low_urgent", value: i.checkmarx_data.fields.low_urgent},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "low_proposed_not_exploitable", value: i.checkmarx_data.fields.low_proposed_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "low_to_verify", value: i.checkmarx_data.fields.low_to_verify},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "information_issues", value: i.checkmarx_data.fields.information_issues},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "information_not_false_positive", value: i.checkmarx_data.fields.information_not_false_positive},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "information_not_false_postive", value: i.checkmarx_data.fields.information_not_false_postive},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "information_not_exploitable", value: i.checkmarx_data.fields.information_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "information_confirmed", value: i.checkmarx_data.fields.information_confirmed},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "information_urgent", value: i.checkmarx_data.fields.information_urgent},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "information_proposed_not_exploitable", value: i.checkmarx_data.fields.information_proposed_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "information_to_verify", value: i.checkmarx_data.fields.information_to_verify},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "lines_of_code_scanned", value: i.checkmarx_data.fields.lines_of_code_scanned},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "files_scanned", value: i.checkmarx_data.fields.files_scanned},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "initiator_name", value: i.checkmarx_data.fields.initiator_name},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "owner", value: i.checkmarx_data.fields.owner},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "scan_id", value: i.checkmarx_data.fields.scan_id},
|
||||
@ -138,8 +140,6 @@ func (i *checkmarxExecuteScanInflux) persist(path, resourceName string) {
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "team_full_path_on_report_date", value: i.checkmarx_data.fields.team_full_path_on_report_date},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "scan_start", value: i.checkmarx_data.fields.scan_start},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "scan_time", value: i.checkmarx_data.fields.scan_time},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "lines_of_code_scanned", value: i.checkmarx_data.fields.lines_of_code_scanned},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "files_scanned", value: i.checkmarx_data.fields.files_scanned},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "checkmarx_version", value: i.checkmarx_data.fields.checkmarx_version},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "scan_type", value: i.checkmarx_data.fields.scan_type},
|
||||
{valType: config.InfluxField, measurement: "checkmarx_data", name: "preset", value: i.checkmarx_data.fields.preset},
|
||||
@ -491,7 +491,7 @@ func checkmarxExecuteScanMetadata() config.StepData {
|
||||
Name: "influx",
|
||||
Type: "influx",
|
||||
Parameters: []map[string]interface{}{
|
||||
{"Name": "checkmarx_data"}, {"fields": []map[string]string{{"name": "high_issues"}, {"name": "high_not_false_positive"}, {"name": "high_not_exploitable"}, {"name": "high_confirmed"}, {"name": "high_urgent"}, {"name": "high_proposed_not_exploitable"}, {"name": "high_to_verify"}, {"name": "medium_issues"}, {"name": "medium_not_false_positive"}, {"name": "medium_not_exploitable"}, {"name": "medium_confirmed"}, {"name": "medium_urgent"}, {"name": "medium_proposed_not_exploitable"}, {"name": "medium_to_verify"}, {"name": "low_issues"}, {"name": "low_not_false_positive"}, {"name": "low_not_exploitable"}, {"name": "low_confirmed"}, {"name": "low_urgent"}, {"name": "low_proposed_not_exploitable"}, {"name": "low_to_verify"}, {"name": "information_issues"}, {"name": "information_not_false_positive"}, {"name": "information_not_exploitable"}, {"name": "information_confirmed"}, {"name": "information_urgent"}, {"name": "information_proposed_not_exploitable"}, {"name": "information_to_verify"}, {"name": "initiator_name"}, {"name": "owner"}, {"name": "scan_id"}, {"name": "project_id"}, {"name": "projectName"}, {"name": "team"}, {"name": "team_full_path_on_report_date"}, {"name": "scan_start"}, {"name": "scan_time"}, {"name": "lines_of_code_scanned"}, {"name": "files_scanned"}, {"name": "checkmarx_version"}, {"name": "scan_type"}, {"name": "preset"}, {"name": "deep_link"}, {"name": "report_creation_time"}}},
|
||||
{"Name": "checkmarx_data"}, {"fields": []map[string]string{{"name": "high_issues"}, {"name": "high_not_false_postive"}, {"name": "high_not_exploitable"}, {"name": "high_confirmed"}, {"name": "high_urgent"}, {"name": "high_proposed_not_exploitable"}, {"name": "high_to_verify"}, {"name": "medium_issues"}, {"name": "medium_not_false_postive"}, {"name": "medium_not_exploitable"}, {"name": "medium_confirmed"}, {"name": "medium_urgent"}, {"name": "medium_proposed_not_exploitable"}, {"name": "medium_to_verify"}, {"name": "low_issues"}, {"name": "low_not_false_postive"}, {"name": "low_not_exploitable"}, {"name": "low_confirmed"}, {"name": "low_urgent"}, {"name": "low_proposed_not_exploitable"}, {"name": "low_to_verify"}, {"name": "information_issues"}, {"name": "information_not_false_postive"}, {"name": "information_not_exploitable"}, {"name": "information_confirmed"}, {"name": "information_urgent"}, {"name": "information_proposed_not_exploitable"}, {"name": "information_to_verify"}, {"name": "lines_of_code_scanned"}, {"name": "files_scanned"}, {"name": "initiator_name"}, {"name": "owner"}, {"name": "scan_id"}, {"name": "project_id"}, {"name": "projectName"}, {"name": "team"}, {"name": "team_full_path_on_report_date"}, {"name": "scan_start"}, {"name": "scan_time"}, {"name": "checkmarx_version"}, {"name": "scan_type"}, {"name": "preset"}, {"name": "deep_link"}, {"name": "report_creation_time"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -214,12 +214,12 @@ func executeProtecodeScan(influx *protecodeExecuteScanInflux, client protecode.P
|
||||
}
|
||||
|
||||
func setInfluxData(influx *protecodeExecuteScanInflux, result map[string]int) {
|
||||
influx.protecode_data.fields.historical_vulnerabilities = fmt.Sprintf("%v", result["historical_vulnerabilities"])
|
||||
influx.protecode_data.fields.triaged_vulnerabilities = fmt.Sprintf("%v", result["triaged_vulnerabilities"])
|
||||
influx.protecode_data.fields.excluded_vulnerabilities = fmt.Sprintf("%v", result["excluded_vulnerabilities"])
|
||||
influx.protecode_data.fields.minor_vulnerabilities = fmt.Sprintf("%v", result["minor_vulnerabilities"])
|
||||
influx.protecode_data.fields.major_vulnerabilities = fmt.Sprintf("%v", result["major_vulnerabilities"])
|
||||
influx.protecode_data.fields.vulnerabilities = fmt.Sprintf("%v", result["vulnerabilities"])
|
||||
influx.protecode_data.fields.historical_vulnerabilities = result["historical_vulnerabilities"]
|
||||
influx.protecode_data.fields.triaged_vulnerabilities = result["triaged_vulnerabilities"]
|
||||
influx.protecode_data.fields.excluded_vulnerabilities = result["excluded_vulnerabilities"]
|
||||
influx.protecode_data.fields.minor_vulnerabilities = result["minor_vulnerabilities"]
|
||||
influx.protecode_data.fields.major_vulnerabilities = result["major_vulnerabilities"]
|
||||
influx.protecode_data.fields.vulnerabilities = result["vulnerabilities"]
|
||||
}
|
||||
|
||||
func createClient(config *protecodeExecuteScanOptions) protecode.Protecode {
|
||||
|
@ -39,12 +39,12 @@ type protecodeExecuteScanOptions struct {
|
||||
type protecodeExecuteScanInflux struct {
|
||||
protecode_data struct {
|
||||
fields struct {
|
||||
historical_vulnerabilities string
|
||||
triaged_vulnerabilities string
|
||||
excluded_vulnerabilities string
|
||||
minor_vulnerabilities string
|
||||
major_vulnerabilities string
|
||||
vulnerabilities string
|
||||
excluded_vulnerabilities int
|
||||
historical_vulnerabilities int
|
||||
major_vulnerabilities int
|
||||
minor_vulnerabilities int
|
||||
triaged_vulnerabilities int
|
||||
vulnerabilities int
|
||||
}
|
||||
tags struct {
|
||||
}
|
||||
@ -58,11 +58,11 @@ func (i *protecodeExecuteScanInflux) persist(path, resourceName string) {
|
||||
name string
|
||||
value interface{}
|
||||
}{
|
||||
{valType: config.InfluxField, measurement: "protecode_data", name: "historical_vulnerabilities", value: i.protecode_data.fields.historical_vulnerabilities},
|
||||
{valType: config.InfluxField, measurement: "protecode_data", name: "triaged_vulnerabilities", value: i.protecode_data.fields.triaged_vulnerabilities},
|
||||
{valType: config.InfluxField, measurement: "protecode_data", name: "excluded_vulnerabilities", value: i.protecode_data.fields.excluded_vulnerabilities},
|
||||
{valType: config.InfluxField, measurement: "protecode_data", name: "minor_vulnerabilities", value: i.protecode_data.fields.minor_vulnerabilities},
|
||||
{valType: config.InfluxField, measurement: "protecode_data", name: "historical_vulnerabilities", value: i.protecode_data.fields.historical_vulnerabilities},
|
||||
{valType: config.InfluxField, measurement: "protecode_data", name: "major_vulnerabilities", value: i.protecode_data.fields.major_vulnerabilities},
|
||||
{valType: config.InfluxField, measurement: "protecode_data", name: "minor_vulnerabilities", value: i.protecode_data.fields.minor_vulnerabilities},
|
||||
{valType: config.InfluxField, measurement: "protecode_data", name: "triaged_vulnerabilities", value: i.protecode_data.fields.triaged_vulnerabilities},
|
||||
{valType: config.InfluxField, measurement: "protecode_data", name: "vulnerabilities", value: i.protecode_data.fields.vulnerabilities},
|
||||
}
|
||||
|
||||
@ -382,7 +382,7 @@ func protecodeExecuteScanMetadata() config.StepData {
|
||||
Name: "influx",
|
||||
Type: "influx",
|
||||
Parameters: []map[string]interface{}{
|
||||
{"Name": "protecode_data"}, {"fields": []map[string]string{{"name": "historical_vulnerabilities"}, {"name": "triaged_vulnerabilities"}, {"name": "excluded_vulnerabilities"}, {"name": "minor_vulnerabilities"}, {"name": "major_vulnerabilities"}, {"name": "vulnerabilities"}}},
|
||||
{"Name": "protecode_data"}, {"fields": []map[string]string{{"name": "excluded_vulnerabilities"}, {"name": "historical_vulnerabilities"}, {"name": "major_vulnerabilities"}, {"name": "minor_vulnerabilities"}, {"name": "triaged_vulnerabilities"}, {"name": "vulnerabilities"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -340,11 +340,11 @@ func TestExecuteProtecodeScan(t *testing.T) {
|
||||
// test
|
||||
executeProtecodeScan(influxData, pc, &config, "dummy", writeReportToFileMock)
|
||||
// assert
|
||||
assert.Equal(t, "1125", influxData.protecode_data.fields.historical_vulnerabilities)
|
||||
assert.Equal(t, "0", influxData.protecode_data.fields.triaged_vulnerabilities)
|
||||
assert.Equal(t, "1", influxData.protecode_data.fields.excluded_vulnerabilities)
|
||||
assert.Equal(t, "142", influxData.protecode_data.fields.major_vulnerabilities)
|
||||
assert.Equal(t, "226", influxData.protecode_data.fields.vulnerabilities)
|
||||
assert.Equal(t, 1125, influxData.protecode_data.fields.historical_vulnerabilities)
|
||||
assert.Equal(t, 0, influxData.protecode_data.fields.triaged_vulnerabilities)
|
||||
assert.Equal(t, 1, influxData.protecode_data.fields.excluded_vulnerabilities)
|
||||
assert.Equal(t, 142, influxData.protecode_data.fields.major_vulnerabilities)
|
||||
assert.Equal(t, 226, influxData.protecode_data.fields.vulnerabilities)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -112,17 +112,17 @@ func newWhitesourceScan(config *ScanOptions) *ws.Scan {
|
||||
}
|
||||
}
|
||||
|
||||
func whitesourceExecuteScan(config ScanOptions, _ *telemetry.CustomData, commonPipelineEnvironment *whitesourceExecuteScanCommonPipelineEnvironment) {
|
||||
func whitesourceExecuteScan(config ScanOptions, _ *telemetry.CustomData, commonPipelineEnvironment *whitesourceExecuteScanCommonPipelineEnvironment, influx *whitesourceExecuteScanInflux) {
|
||||
utils := newWhitesourceUtils(&config)
|
||||
scan := newWhitesourceScan(&config)
|
||||
sys := ws.NewSystem(config.ServiceURL, config.OrgToken, config.UserToken, time.Duration(config.Timeout)*time.Second)
|
||||
err := runWhitesourceExecuteScan(&config, scan, utils, sys, commonPipelineEnvironment)
|
||||
err := runWhitesourceExecuteScan(&config, scan, utils, sys, commonPipelineEnvironment, influx)
|
||||
if err != nil {
|
||||
log.Entry().WithError(err).Fatal("step execution failed")
|
||||
}
|
||||
}
|
||||
|
||||
func runWhitesourceExecuteScan(config *ScanOptions, scan *ws.Scan, utils whitesourceUtils, sys whitesource, commonPipelineEnvironment *whitesourceExecuteScanCommonPipelineEnvironment) error {
|
||||
func runWhitesourceExecuteScan(config *ScanOptions, scan *ws.Scan, utils whitesourceUtils, sys whitesource, commonPipelineEnvironment *whitesourceExecuteScanCommonPipelineEnvironment, influx *whitesourceExecuteScanInflux) error {
|
||||
if err := resolveAggregateProjectName(config, scan, sys); err != nil {
|
||||
return err
|
||||
}
|
||||
@ -143,14 +143,14 @@ func runWhitesourceExecuteScan(config *ScanOptions, scan *ws.Scan, utils whiteso
|
||||
return fmt.Errorf("failed to aggregate version wide vulnerabilities: %w", err)
|
||||
}
|
||||
} else {
|
||||
if err := runWhitesourceScan(config, scan, utils, sys, commonPipelineEnvironment); err != nil {
|
||||
if err := runWhitesourceScan(config, scan, utils, sys, commonPipelineEnvironment, influx); err != nil {
|
||||
return fmt.Errorf("failed to execute WhiteSource scan: %w", err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func runWhitesourceScan(config *ScanOptions, scan *ws.Scan, utils whitesourceUtils, sys whitesource, commonPipelineEnvironment *whitesourceExecuteScanCommonPipelineEnvironment) error {
|
||||
func runWhitesourceScan(config *ScanOptions, scan *ws.Scan, utils whitesourceUtils, sys whitesource, commonPipelineEnvironment *whitesourceExecuteScanCommonPipelineEnvironment, influx *whitesourceExecuteScanInflux) error {
|
||||
// Download Docker image for container scan
|
||||
// ToDo: move it to improve testability
|
||||
if config.BuildTool == "docker" {
|
||||
@ -188,7 +188,7 @@ func runWhitesourceScan(config *ScanOptions, scan *ws.Scan, utils whitesourceUti
|
||||
}
|
||||
log.Entry().Info("-----------------------------------------------------")
|
||||
|
||||
paths, err := checkAndReportScanResults(config, scan, utils, sys)
|
||||
paths, err := checkAndReportScanResults(config, scan, utils, sys, influx)
|
||||
piperutils.PersistReportsAndLinks("whitesourceExecuteScan", "", paths, nil)
|
||||
persistScannedProjects(config, scan, commonPipelineEnvironment)
|
||||
if err != nil {
|
||||
@ -197,7 +197,7 @@ func runWhitesourceScan(config *ScanOptions, scan *ws.Scan, utils whitesourceUti
|
||||
return nil
|
||||
}
|
||||
|
||||
func checkAndReportScanResults(config *ScanOptions, scan *ws.Scan, utils whitesourceUtils, sys whitesource) ([]piperutils.Path, error) {
|
||||
func checkAndReportScanResults(config *ScanOptions, scan *ws.Scan, utils whitesourceUtils, sys whitesource, influx *whitesourceExecuteScanInflux) ([]piperutils.Path, error) {
|
||||
reportPaths := []piperutils.Path{}
|
||||
if !config.Reporting && !config.SecurityVulnerabilities {
|
||||
return reportPaths, nil
|
||||
@ -220,14 +220,14 @@ func checkAndReportScanResults(config *ScanOptions, scan *ws.Scan, utils whiteso
|
||||
|
||||
checkErrors := []string{}
|
||||
|
||||
rPath, err := checkPolicyViolations(config, scan, sys, utils, reportPaths)
|
||||
rPath, err := checkPolicyViolations(config, scan, sys, utils, reportPaths, influx)
|
||||
if err != nil {
|
||||
checkErrors = append(checkErrors, fmt.Sprint(err))
|
||||
}
|
||||
reportPaths = append(reportPaths, rPath)
|
||||
|
||||
if config.SecurityVulnerabilities {
|
||||
rPaths, err := checkSecurityViolations(config, scan, sys, utils)
|
||||
rPaths, err := checkSecurityViolations(config, scan, sys, utils, influx)
|
||||
reportPaths = append(reportPaths, rPaths...)
|
||||
if err != nil {
|
||||
checkErrors = append(checkErrors, fmt.Sprint(err))
|
||||
@ -452,7 +452,7 @@ func executeScan(config *ScanOptions, scan *ws.Scan, utils whitesourceUtils) err
|
||||
return nil
|
||||
}
|
||||
|
||||
func checkPolicyViolations(config *ScanOptions, scan *ws.Scan, sys whitesource, utils whitesourceUtils, reportPaths []piperutils.Path) (piperutils.Path, error) {
|
||||
func checkPolicyViolations(config *ScanOptions, scan *ws.Scan, sys whitesource, utils whitesourceUtils, reportPaths []piperutils.Path, influx *whitesourceExecuteScanInflux) (piperutils.Path, error) {
|
||||
|
||||
policyViolationCount := 0
|
||||
for _, project := range scan.ScannedProjects() {
|
||||
@ -490,13 +490,14 @@ func checkPolicyViolations(config *ScanOptions, scan *ws.Scan, sys whitesource,
|
||||
|
||||
if policyViolationCount > 0 {
|
||||
log.SetErrorCategory(log.ErrorCompliance)
|
||||
influx.whitesource_data.fields.policy_violations = policyViolationCount
|
||||
return policyReport, fmt.Errorf("%v policy violation(s) found", policyViolationCount)
|
||||
}
|
||||
|
||||
return policyReport, nil
|
||||
}
|
||||
|
||||
func checkSecurityViolations(config *ScanOptions, scan *ws.Scan, sys whitesource, utils whitesourceUtils) ([]piperutils.Path, error) {
|
||||
func checkSecurityViolations(config *ScanOptions, scan *ws.Scan, sys whitesource, utils whitesourceUtils, influx *whitesourceExecuteScanInflux) ([]piperutils.Path, error) {
|
||||
var reportPaths []piperutils.Path
|
||||
// Check for security vulnerabilities and fail the build if cvssSeverityLimit threshold is crossed
|
||||
// convert config.CvssSeverityLimit to float64
|
||||
@ -511,7 +512,7 @@ func checkSecurityViolations(config *ScanOptions, scan *ws.Scan, sys whitesource
|
||||
project := ws.Project{Name: config.ProjectName, Token: config.ProjectToken}
|
||||
// ToDo: see if HTML report generation is really required here
|
||||
// we anyway need to do some refactoring here since config.ProjectToken != "" essentially indicates an aggregated project
|
||||
if _, _, err := checkProjectSecurityViolations(cvssSeverityLimit, project, sys); err != nil {
|
||||
if _, _, err := checkProjectSecurityViolations(cvssSeverityLimit, project, sys, influx); err != nil {
|
||||
return reportPaths, err
|
||||
}
|
||||
} else {
|
||||
@ -520,7 +521,7 @@ func checkSecurityViolations(config *ScanOptions, scan *ws.Scan, sys whitesource
|
||||
allAlerts := []ws.Alert{}
|
||||
for _, project := range scan.ScannedProjects() {
|
||||
// collect errors and aggregate vulnerabilities from all projects
|
||||
if vulCount, alerts, err := checkProjectSecurityViolations(cvssSeverityLimit, project, sys); err != nil {
|
||||
if vulCount, alerts, err := checkProjectSecurityViolations(cvssSeverityLimit, project, sys, influx); err != nil {
|
||||
allAlerts = append(allAlerts, alerts...)
|
||||
vulnerabilitiesCount += vulCount
|
||||
errorsOccured = append(errorsOccured, fmt.Sprint(err))
|
||||
@ -544,7 +545,7 @@ func checkSecurityViolations(config *ScanOptions, scan *ws.Scan, sys whitesource
|
||||
}
|
||||
|
||||
// checkSecurityViolations checks security violations and returns an error if the configured severity limit is crossed.
|
||||
func checkProjectSecurityViolations(cvssSeverityLimit float64, project ws.Project, sys whitesource) (int, []ws.Alert, error) {
|
||||
func checkProjectSecurityViolations(cvssSeverityLimit float64, project ws.Project, sys whitesource, influx *whitesourceExecuteScanInflux) (int, []ws.Alert, error) {
|
||||
// get project alerts (vulnerabilities)
|
||||
alerts, err := sys.GetProjectAlertsByType(project.Token, "SECURITY_VULNERABILITY")
|
||||
if err != nil {
|
||||
@ -552,6 +553,9 @@ func checkProjectSecurityViolations(cvssSeverityLimit float64, project ws.Projec
|
||||
}
|
||||
|
||||
severeVulnerabilities, nonSevereVulnerabilities := countSecurityVulnerabilities(&alerts, cvssSeverityLimit)
|
||||
influx.whitesource_data.fields.minor_vulnerabilities = nonSevereVulnerabilities
|
||||
influx.whitesource_data.fields.major_vulnerabilities = severeVulnerabilities
|
||||
influx.whitesource_data.fields.vulnerabilities = nonSevereVulnerabilities + severeVulnerabilities
|
||||
if nonSevereVulnerabilities > 0 {
|
||||
log.Entry().Warnf("WARNING: %v Open Source Software Security vulnerabilities with "+
|
||||
"CVSS score below threshold %.1f detected in project %s.", nonSevereVulnerabilities,
|
||||
@ -560,7 +564,6 @@ func checkProjectSecurityViolations(cvssSeverityLimit float64, project ws.Projec
|
||||
log.Entry().Infof("No Open Source Software Security vulnerabilities detected in project %s",
|
||||
project.Name)
|
||||
}
|
||||
|
||||
// https://github.com/SAP/jenkins-library/blob/master/vars/whitesourceExecuteScan.groovy#L558
|
||||
if severeVulnerabilities > 0 {
|
||||
return severeVulnerabilities, alerts, fmt.Errorf("%v Open Source Software Security vulnerabilities with CVSS score greater "+
|
||||
|
@ -88,6 +88,45 @@ func (p *whitesourceExecuteScanCommonPipelineEnvironment) persist(path, resource
|
||||
}
|
||||
}
|
||||
|
||||
type whitesourceExecuteScanInflux struct {
|
||||
whitesource_data struct {
|
||||
fields struct {
|
||||
vulnerabilities int
|
||||
major_vulnerabilities int
|
||||
minor_vulnerabilities int
|
||||
policy_violations int
|
||||
}
|
||||
tags struct {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (i *whitesourceExecuteScanInflux) persist(path, resourceName string) {
|
||||
measurementContent := []struct {
|
||||
measurement string
|
||||
valType string
|
||||
name string
|
||||
value interface{}
|
||||
}{
|
||||
{valType: config.InfluxField, measurement: "whitesource_data", name: "vulnerabilities", value: i.whitesource_data.fields.vulnerabilities},
|
||||
{valType: config.InfluxField, measurement: "whitesource_data", name: "major_vulnerabilities", value: i.whitesource_data.fields.major_vulnerabilities},
|
||||
{valType: config.InfluxField, measurement: "whitesource_data", name: "minor_vulnerabilities", value: i.whitesource_data.fields.minor_vulnerabilities},
|
||||
{valType: config.InfluxField, measurement: "whitesource_data", name: "policy_violations", value: i.whitesource_data.fields.policy_violations},
|
||||
}
|
||||
|
||||
errCount := 0
|
||||
for _, metric := range measurementContent {
|
||||
err := piperenv.SetResourceParameter(path, resourceName, filepath.Join(metric.measurement, fmt.Sprintf("%vs", metric.valType), metric.name), metric.value)
|
||||
if err != nil {
|
||||
log.Entry().WithError(err).Error("Error persisting influx environment.")
|
||||
errCount++
|
||||
}
|
||||
}
|
||||
if errCount > 0 {
|
||||
log.Entry().Fatal("failed to persist Influx environment")
|
||||
}
|
||||
}
|
||||
|
||||
// WhitesourceExecuteScanCommand Execute a WhiteSource scan
|
||||
func WhitesourceExecuteScanCommand() *cobra.Command {
|
||||
const STEP_NAME = "whitesourceExecuteScan"
|
||||
@ -96,6 +135,7 @@ func WhitesourceExecuteScanCommand() *cobra.Command {
|
||||
var stepConfig whitesourceExecuteScanOptions
|
||||
var startTime time.Time
|
||||
var commonPipelineEnvironment whitesourceExecuteScanCommonPipelineEnvironment
|
||||
var influx whitesourceExecuteScanInflux
|
||||
|
||||
var createWhitesourceExecuteScanCmd = &cobra.Command{
|
||||
Use: STEP_NAME,
|
||||
@ -140,6 +180,7 @@ The step uses the so-called WhiteSource Unified Agent. For details please refer
|
||||
handler := func() {
|
||||
config.RemoveVaultSecretFiles()
|
||||
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
|
||||
influx.persist(GeneralConfig.EnvRootPath, "influx")
|
||||
telemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
|
||||
telemetryData.ErrorCategory = log.GetErrorCategory().String()
|
||||
telemetry.Send(&telemetryData)
|
||||
@ -147,7 +188,7 @@ The step uses the so-called WhiteSource Unified Agent. For details please refer
|
||||
log.DeferExitHandler(handler)
|
||||
defer handler()
|
||||
telemetry.Initialize(GeneralConfig.NoTelemetry, STEP_NAME)
|
||||
whitesourceExecuteScan(stepConfig, &telemetryData, &commonPipelineEnvironment)
|
||||
whitesourceExecuteScan(stepConfig, &telemetryData, &commonPipelineEnvironment, &influx)
|
||||
telemetryData.ErrorCode = "0"
|
||||
log.Entry().Info("SUCCESS")
|
||||
},
|
||||
@ -604,6 +645,13 @@ func whitesourceExecuteScanMetadata() config.StepData {
|
||||
{"Name": "custom/whitesourceProjectNames"},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "influx",
|
||||
Type: "influx",
|
||||
Parameters: []map[string]interface{}{
|
||||
{"Name": "whitesource_data"}, {"fields": []map[string]string{{"name": "vulnerabilities"}, {"name": "major_vulnerabilities"}, {"name": "minor_vulnerabilities"}, {"name": "policy_violations"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -79,8 +79,9 @@ func TestRunWhitesourceExecuteScan(t *testing.T) {
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
scan := newWhitesourceScan(&config)
|
||||
cpe := whitesourceExecuteScanCommonPipelineEnvironment{}
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
// test
|
||||
err := runWhitesourceExecuteScan(&config, scan, utilsMock, systemMock, &cpe)
|
||||
err := runWhitesourceExecuteScan(&config, scan, utilsMock, systemMock, &cpe, &influx)
|
||||
// assert
|
||||
assert.EqualError(t, err, "no project with token 'no-such-project-token' found in Whitesource")
|
||||
assert.Equal(t, "", config.ProjectName)
|
||||
@ -106,8 +107,9 @@ func TestRunWhitesourceExecuteScan(t *testing.T) {
|
||||
systemMock.Alerts = []ws.Alert{}
|
||||
scan := newWhitesourceScan(&config)
|
||||
cpe := whitesourceExecuteScanCommonPipelineEnvironment{}
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
// test
|
||||
err := runWhitesourceExecuteScan(&config, scan, utilsMock, systemMock, &cpe)
|
||||
err := runWhitesourceExecuteScan(&config, scan, utilsMock, systemMock, &cpe, &influx)
|
||||
// assert
|
||||
assert.NoError(t, err)
|
||||
// Retrieved project name is stored in scan.AggregateProjectName, but not in config.ProjectName
|
||||
@ -140,8 +142,9 @@ func TestCheckAndReportScanResults(t *testing.T) {
|
||||
scan := newWhitesourceScan(config)
|
||||
utils := newWhitesourceUtilsMock()
|
||||
system := ws.NewSystemMock(time.Now().Format(ws.DateTimeLayout))
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
// test
|
||||
_, err := checkAndReportScanResults(config, scan, utils, system)
|
||||
_, err := checkAndReportScanResults(config, scan, utils, system, &influx)
|
||||
// assert
|
||||
assert.NoError(t, err)
|
||||
vPath := filepath.Join(ws.ReportsDirectory, "mock-project-vulnerability-report.txt")
|
||||
@ -158,8 +161,9 @@ func TestCheckAndReportScanResults(t *testing.T) {
|
||||
scan := newWhitesourceScan(config)
|
||||
utils := newWhitesourceUtilsMock()
|
||||
system := ws.NewSystemMock(time.Now().Format(ws.DateTimeLayout))
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
// test
|
||||
_, err := checkAndReportScanResults(config, scan, utils, system)
|
||||
_, err := checkAndReportScanResults(config, scan, utils, system, &influx)
|
||||
// assert
|
||||
assert.EqualError(t, err, "failed to parse parameter cvssSeverityLimit (invalid) as floating point number: strconv.ParseFloat: parsing \"invalid\": invalid syntax")
|
||||
})
|
||||
@ -175,8 +179,9 @@ func TestCheckAndReportScanResults(t *testing.T) {
|
||||
scan := newWhitesourceScan(config)
|
||||
utils := newWhitesourceUtilsMock()
|
||||
system := ws.NewSystemMock(time.Now().Format(ws.DateTimeLayout))
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
// test
|
||||
_, err := checkAndReportScanResults(config, scan, utils, system)
|
||||
_, err := checkAndReportScanResults(config, scan, utils, system, &influx)
|
||||
// assert
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
@ -193,8 +198,9 @@ func TestCheckAndReportScanResults(t *testing.T) {
|
||||
scan := newWhitesourceScan(config)
|
||||
utils := newWhitesourceUtilsMock()
|
||||
system := ws.NewSystemMock(time.Now().Format(ws.DateTimeLayout))
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
// test
|
||||
_, err := checkAndReportScanResults(config, scan, utils, system)
|
||||
_, err := checkAndReportScanResults(config, scan, utils, system, &influx)
|
||||
// assert
|
||||
assert.EqualError(t, err, "1 Open Source Software Security vulnerabilities with CVSS score greater or equal to 4.0 detected in project mock-project - 1")
|
||||
})
|
||||
@ -363,8 +369,9 @@ func TestCheckPolicyViolations(t *testing.T) {
|
||||
{Target: filepath.Join("whitesource", "report1.pdf")},
|
||||
{Target: filepath.Join("whitesource", "report2.pdf")},
|
||||
}
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
|
||||
path, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths)
|
||||
path, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths, &influx)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, filepath.Join(ws.ReportsDirectory, "whitesource-ip.json"), path.Target)
|
||||
|
||||
@ -382,8 +389,9 @@ func TestCheckPolicyViolations(t *testing.T) {
|
||||
systemMock.Alerts = []ws.Alert{}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
reportPaths := []piperutils.Path{}
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
|
||||
path, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths)
|
||||
path, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths, &influx)
|
||||
assert.NoError(t, err)
|
||||
|
||||
fileContent, _ := utilsMock.FileRead(path.Target)
|
||||
@ -405,8 +413,9 @@ func TestCheckPolicyViolations(t *testing.T) {
|
||||
{Target: "report1.pdf"},
|
||||
{Target: "report2.pdf"},
|
||||
}
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
|
||||
path, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths)
|
||||
path, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths, &influx)
|
||||
assert.Contains(t, fmt.Sprint(err), "2 policy violation(s) found")
|
||||
|
||||
fileContent, _ := utilsMock.FileRead(path.Target)
|
||||
@ -423,8 +432,9 @@ func TestCheckPolicyViolations(t *testing.T) {
|
||||
systemMock.AlertError = fmt.Errorf("failed to read alerts")
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
reportPaths := []piperutils.Path{}
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
|
||||
_, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths)
|
||||
_, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths, &influx)
|
||||
assert.Contains(t, fmt.Sprint(err), "failed to retrieve project policy alerts from WhiteSource")
|
||||
})
|
||||
|
||||
@ -437,8 +447,9 @@ func TestCheckPolicyViolations(t *testing.T) {
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
utilsMock.FileWriteError = fmt.Errorf("failed to write file")
|
||||
reportPaths := []piperutils.Path{}
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
|
||||
_, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths)
|
||||
_, err := checkPolicyViolations(&config, scan, systemMock, utilsMock, reportPaths, &influx)
|
||||
assert.Contains(t, fmt.Sprint(err), "failed to write policy violation report:")
|
||||
})
|
||||
}
|
||||
@ -457,8 +468,9 @@ func TestCheckSecurityViolations(t *testing.T) {
|
||||
{Vulnerability: ws.Vulnerability{Name: "vul1", CVSS3Score: 6.0}},
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
|
||||
reportPaths, err := checkSecurityViolations(&config, scan, systemMock, utilsMock)
|
||||
reportPaths, err := checkSecurityViolations(&config, scan, systemMock, utilsMock, &influx)
|
||||
assert.NoError(t, err)
|
||||
fileContent, err := utilsMock.FileRead(reportPaths[0].Target)
|
||||
assert.NoError(t, err)
|
||||
@ -476,8 +488,9 @@ func TestCheckSecurityViolations(t *testing.T) {
|
||||
{Vulnerability: ws.Vulnerability{Name: "vul1", CVSS3Score: 6.0}},
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
|
||||
reportPaths, err := checkSecurityViolations(&config, scan, systemMock, utilsMock)
|
||||
reportPaths, err := checkSecurityViolations(&config, scan, systemMock, utilsMock, &influx)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 0, len(reportPaths))
|
||||
})
|
||||
@ -487,8 +500,9 @@ func TestCheckSecurityViolations(t *testing.T) {
|
||||
scan := newWhitesourceScan(&config)
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
|
||||
_, err := checkSecurityViolations(&config, scan, systemMock, utilsMock)
|
||||
_, err := checkSecurityViolations(&config, scan, systemMock, utilsMock, &influx)
|
||||
assert.Contains(t, fmt.Sprint(err), "failed to parse parameter cvssSeverityLimit")
|
||||
|
||||
})
|
||||
@ -504,8 +518,9 @@ func TestCheckSecurityViolations(t *testing.T) {
|
||||
{Vulnerability: ws.Vulnerability{Name: "vul1", CVSS3Score: 6.0}},
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
|
||||
reportPaths, err := checkSecurityViolations(&config, scan, systemMock, utilsMock)
|
||||
reportPaths, err := checkSecurityViolations(&config, scan, systemMock, utilsMock, &influx)
|
||||
assert.Contains(t, fmt.Sprint(err), "1 Open Source Software Security vulnerabilities")
|
||||
fileContent, err := utilsMock.FileRead(reportPaths[0].Target)
|
||||
assert.NoError(t, err)
|
||||
@ -523,8 +538,9 @@ func TestCheckSecurityViolations(t *testing.T) {
|
||||
{Vulnerability: ws.Vulnerability{Name: "vul1", CVSS3Score: 6.0}},
|
||||
}
|
||||
utilsMock := newWhitesourceUtilsMock()
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
|
||||
reportPaths, err := checkSecurityViolations(&config, scan, systemMock, utilsMock)
|
||||
reportPaths, err := checkSecurityViolations(&config, scan, systemMock, utilsMock, &influx)
|
||||
assert.Contains(t, fmt.Sprint(err), "1 Open Source Software Security vulnerabilities")
|
||||
assert.Equal(t, 0, len(reportPaths))
|
||||
})
|
||||
@ -536,8 +552,9 @@ func TestCheckProjectSecurityViolations(t *testing.T) {
|
||||
t.Run("success - no alerts", func(t *testing.T) {
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
systemMock.Alerts = []ws.Alert{}
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
|
||||
severeVulnerabilities, alerts, err := checkProjectSecurityViolations(7.0, project, systemMock)
|
||||
severeVulnerabilities, alerts, err := checkProjectSecurityViolations(7.0, project, systemMock, &influx)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 0, severeVulnerabilities)
|
||||
assert.Equal(t, 0, len(alerts))
|
||||
@ -549,8 +566,9 @@ func TestCheckProjectSecurityViolations(t *testing.T) {
|
||||
{Vulnerability: ws.Vulnerability{CVSS3Score: 7}},
|
||||
{Vulnerability: ws.Vulnerability{CVSS3Score: 6}},
|
||||
}
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
|
||||
severeVulnerabilities, alerts, err := checkProjectSecurityViolations(7.0, project, systemMock)
|
||||
severeVulnerabilities, alerts, err := checkProjectSecurityViolations(7.0, project, systemMock, &influx)
|
||||
assert.Contains(t, fmt.Sprint(err), "1 Open Source Software Security vulnerabilities")
|
||||
assert.Equal(t, 1, severeVulnerabilities)
|
||||
assert.Equal(t, 2, len(alerts))
|
||||
@ -559,7 +577,9 @@ func TestCheckProjectSecurityViolations(t *testing.T) {
|
||||
t.Run("error - WhiteSource failure", func(t *testing.T) {
|
||||
systemMock := ws.NewSystemMock("ignored")
|
||||
systemMock.AlertError = fmt.Errorf("failed to read alerts")
|
||||
_, _, err := checkProjectSecurityViolations(7.0, project, systemMock)
|
||||
influx := whitesourceExecuteScanInflux{}
|
||||
|
||||
_, _, err := checkProjectSecurityViolations(7.0, project, systemMock, &influx)
|
||||
assert.Contains(t, fmt.Sprint(err), "failed to retrieve project alerts from WhiteSource")
|
||||
})
|
||||
|
||||
|
@ -146,8 +146,8 @@ type DetailedResult struct {
|
||||
ScanStart string `xml:"ScanStart,attr"`
|
||||
Preset string `xml:"Preset,attr"`
|
||||
ScanTime string `xml:"ScanTime,attr"`
|
||||
LinesOfCodeScanned string `xml:"LinesOfCodeScanned,attr"`
|
||||
FilesScanned string `xml:"FilesScanned,attr"`
|
||||
LinesOfCodeScanned int `xml:"LinesOfCodeScanned,attr"`
|
||||
FilesScanned int `xml:"FilesScanned,attr"`
|
||||
ReportCreationTime string `xml:"ReportCreationTime,attr"`
|
||||
Team string `xml:"Team,attr"`
|
||||
CheckmarxVersion string `xml:"CheckmarxVersion,attr"`
|
||||
|
@ -247,33 +247,65 @@ spec:
|
||||
- name: checkmarx_data
|
||||
fields:
|
||||
- name: high_issues
|
||||
- name: high_not_false_positive
|
||||
type: int
|
||||
- name: high_not_false_postive
|
||||
type: int
|
||||
- name: high_not_exploitable
|
||||
type: int
|
||||
- name: high_confirmed
|
||||
type: int
|
||||
- name: high_urgent
|
||||
type: int
|
||||
- name: high_proposed_not_exploitable
|
||||
type: int
|
||||
- name: high_to_verify
|
||||
type: int
|
||||
- name: medium_issues
|
||||
- name: medium_not_false_positive
|
||||
type: int
|
||||
- name: medium_not_false_postive
|
||||
type: int
|
||||
- name: medium_not_exploitable
|
||||
type: int
|
||||
- name: medium_confirmed
|
||||
type: int
|
||||
- name: medium_urgent
|
||||
type: int
|
||||
- name: medium_proposed_not_exploitable
|
||||
type: int
|
||||
- name: medium_to_verify
|
||||
type: int
|
||||
- name: low_issues
|
||||
- name: low_not_false_positive
|
||||
type: int
|
||||
- name: low_not_false_postive
|
||||
type: int
|
||||
- name: low_not_exploitable
|
||||
type: int
|
||||
- name: low_confirmed
|
||||
type: int
|
||||
- name: low_urgent
|
||||
type: int
|
||||
- name: low_proposed_not_exploitable
|
||||
type: int
|
||||
- name: low_to_verify
|
||||
type: int
|
||||
- name: information_issues
|
||||
- name: information_not_false_positive
|
||||
type: int
|
||||
- name: information_not_false_postive
|
||||
type: int
|
||||
- name: information_not_exploitable
|
||||
type: int
|
||||
- name: information_confirmed
|
||||
type: int
|
||||
- name: information_urgent
|
||||
type: int
|
||||
- name: information_proposed_not_exploitable
|
||||
type: int
|
||||
- name: information_to_verify
|
||||
type: int
|
||||
- name: lines_of_code_scanned
|
||||
type: int
|
||||
- name: files_scanned
|
||||
type: int
|
||||
- name: initiator_name
|
||||
- name: owner
|
||||
- name: scan_id
|
||||
@ -283,8 +315,6 @@ spec:
|
||||
- name: team_full_path_on_report_date
|
||||
- name: scan_start
|
||||
- name: scan_time
|
||||
- name: lines_of_code_scanned
|
||||
- name: files_scanned
|
||||
- name: checkmarx_version
|
||||
- name: scan_type
|
||||
- name: preset
|
||||
|
@ -221,9 +221,15 @@ spec:
|
||||
params:
|
||||
- name: protecode_data
|
||||
fields:
|
||||
- name: historical_vulnerabilities
|
||||
- name: triaged_vulnerabilities
|
||||
- name: excluded_vulnerabilities
|
||||
- name: minor_vulnerabilities
|
||||
type: int
|
||||
- name: historical_vulnerabilities
|
||||
type: int
|
||||
- name: major_vulnerabilities
|
||||
type: int
|
||||
- name: minor_vulnerabilities
|
||||
type: int
|
||||
- name: triaged_vulnerabilities
|
||||
type: int
|
||||
- name: vulnerabilities
|
||||
type: int
|
||||
|
@ -471,6 +471,19 @@ spec:
|
||||
params:
|
||||
- name: custom/whitesourceProjectNames
|
||||
type: "[]string"
|
||||
- name: influx
|
||||
type: influx
|
||||
params:
|
||||
- name: whitesource_data
|
||||
fields:
|
||||
- name: vulnerabilities
|
||||
type: int
|
||||
- name: major_vulnerabilities
|
||||
type: int
|
||||
- name: minor_vulnerabilities
|
||||
type: int
|
||||
- name: policy_violations
|
||||
type: int
|
||||
containers:
|
||||
- image: buildpack-deps:stretch-curl
|
||||
workingDir: /tmp
|
||||
|
@ -5,6 +5,7 @@ import com.sap.piper.JenkinsUtils
|
||||
import com.sap.piper.MapUtils
|
||||
import com.sap.piper.PiperGoUtils
|
||||
import com.sap.piper.Utils
|
||||
import com.sap.piper.analytics.InfluxData
|
||||
import groovy.transform.Field
|
||||
|
||||
import static com.sap.piper.Prerequisites.checkScript
|
||||
@ -65,13 +66,17 @@ void call(Map parameters = [:], String stepName, String metadataFile, List crede
|
||||
dockerWrapper(script, stepName, config) {
|
||||
handleErrorDetails(stepName) {
|
||||
script.commonPipelineEnvironment.writeToDisk(script)
|
||||
try {
|
||||
credentialWrapper(config, credentialInfo) {
|
||||
sh "${piperGoPath} ${stepName}${defaultConfigArgs}${customConfigArg}"
|
||||
try{
|
||||
try {
|
||||
credentialWrapper(config, credentialInfo) {
|
||||
sh "${piperGoPath} ${stepName}${defaultConfigArgs}${customConfigArg}"
|
||||
}
|
||||
} finally {
|
||||
jenkinsUtils.handleStepResults(stepName, failOnMissingReports, failOnMissingLinks)
|
||||
script.commonPipelineEnvironment.readFromDisk(script)
|
||||
}
|
||||
} finally {
|
||||
jenkinsUtils.handleStepResults(stepName, failOnMissingReports, failOnMissingLinks)
|
||||
script.commonPipelineEnvironment.readFromDisk(script)
|
||||
}finally{
|
||||
InfluxData.readFromDisk(script)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user