You've already forked sap-jenkins-library
							
							
				mirror of
				https://github.com/SAP/jenkins-library.git
				synced 2025-10-30 23:57:50 +02:00 
			
		
		
		
	fix(protecode): respect failOnSevereVulnerabilities parameter (#1969)
* fix redundant type issues * cleanup * extract report function for protecode package * use speaking status constant for API results * remove unconsidered return value * correct switch statement * handle severe vulnerabilities * Apply suggestions from code review Co-authored-by: Stephan Aßmus <stephan.assmus@sap.com> * correct test name * return errors from WriteReport function * expose ReportData struct * set Error Category * refactor constant visibility * change type name * describe type * change type name * fail after report generation * do not fail on report write errors * add error as return value * fix typo * use require statements * assert major vulnerabilities Co-authored-by: Stephan Aßmus <stephan.assmus@sap.com>
This commit is contained in:
		
				
					committed by
					
						 GitHub
						GitHub
					
				
			
			
				
	
			
			
			
						parent
						
							e84c417555
						
					
				
				
					commit
					b59bac7892
				
			| @@ -20,51 +20,38 @@ import ( | ||||
| 	"github.com/SAP/jenkins-library/pkg/telemetry" | ||||
| ) | ||||
|  | ||||
| type protecodeData struct { | ||||
| 	Target                      string           `json:"target,omitempty"` | ||||
| 	Mandatory                   bool             `json:"mandatory,omitempty"` | ||||
| 	ProductID                   string           `json:"productID,omitempty"` | ||||
| 	ServerURL                   string           `json:"serverUrl,omitempty"` | ||||
| 	FailOnSevereVulnerabilities bool             `json:"failOnSevereVulnerabilities,omitempty"` | ||||
| 	ExcludeCVEs                 string           `json:"excludeCVEs,omitempty"` | ||||
| 	Count                       string           `json:"count,omitempty"` | ||||
| 	Cvss2GreaterOrEqualSeven    string           `json:"cvss2GreaterOrEqualSeven,omitempty"` | ||||
| 	Cvss3GreaterOrEqualSeven    string           `json:"cvss3GreaterOrEqualSeven,omitempty"` | ||||
| 	ExcludedVulnerabilities     string           `json:"excludedVulnerabilities,omitempty"` | ||||
| 	TriagedVulnerabilities      string           `json:"triagedVulnerabilities,omitempty"` | ||||
| 	HistoricalVulnerabilities   string           `json:"historicalVulnerabilities,omitempty"` | ||||
| 	Vulnerabilities             []protecode.Vuln `json:"Vulnerabilities,omitempty"` | ||||
| } | ||||
| const ( | ||||
| 	webReportPath  = "%s/products/%v/" | ||||
| 	scanResultFile = "protecodescan_vulns.json" | ||||
| 	stepResultFile = "protecodeExecuteScan.json" | ||||
| ) | ||||
|  | ||||
| var reportPath = "./" | ||||
| var cachePath = "./cache" | ||||
| var cacheProtecodeImagePath = "/protecode/Image" | ||||
| var cacheProtecodePath = "/protecode" | ||||
|  | ||||
| func protecodeExecuteScan(config protecodeExecuteScanOptions, telemetryData *telemetry.CustomData, influx *protecodeExecuteScanInflux) error { | ||||
| func protecodeExecuteScan(config protecodeExecuteScanOptions, telemetryData *telemetry.CustomData, influx *protecodeExecuteScanInflux) { | ||||
| 	c := command.Command{} | ||||
| 	// reroute command output to loging framework | ||||
| 	c.Stdout(log.Writer()) | ||||
| 	c.Stderr(log.Writer()) | ||||
|  | ||||
| 	dClient := createDockerClient(&config) | ||||
| 	return runProtecodeScan(&config, influx, dClient) | ||||
| 	if err := runProtecodeScan(&config, influx, dClient); err != nil { | ||||
| 		log.Entry().WithError(err).Fatal("Execution failed") | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func runProtecodeScan(config *protecodeExecuteScanOptions, influx *protecodeExecuteScanInflux, dClient piperDocker.Download) error { | ||||
|  | ||||
| 	correctDockerConfigEnvVar(config) | ||||
|  | ||||
| 	var fileName, filePath string | ||||
| 	//create client for sending api request | ||||
| 	log.Entry().Debug("Create protecode client") | ||||
| 	client := createClient(config) | ||||
|  | ||||
| 	if len(config.FetchURL) <= 0 { | ||||
|  | ||||
| 		log.Entry().Debugf("Get docker image: %v, %v, %v, %v", config.ScanImage, config.DockerRegistryURL, config.FilePath, config.IncludeLayers) | ||||
| 		fileName, filePath = getDockerImage(dClient, config) | ||||
|  | ||||
| 		if len(config.FilePath) <= 0 { | ||||
| 			(*config).FilePath = filePath | ||||
| 			log.Entry().Debugf("Filepath for upload image: %v", config.FilePath) | ||||
| @@ -72,16 +59,13 @@ func runProtecodeScan(config *protecodeExecuteScanOptions, influx *protecodeExec | ||||
| 	} | ||||
|  | ||||
| 	log.Entry().Debug("Execute protecode scan") | ||||
| 	parsedResult := executeProtecodeScan(client, config, fileName, writeReportToFile) | ||||
|  | ||||
| 	log.Entry().Debug("Write influx data") | ||||
| 	setInfluxData(influx, parsedResult) | ||||
| 	if err := executeProtecodeScan(influx, client, config, fileName, writeReportToFile); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	defer os.Remove(config.FilePath) | ||||
|  | ||||
| 	deletePath := filepath.Join(cachePath, cacheProtecodePath) | ||||
| 	err := os.RemoveAll(deletePath) | ||||
| 	if err != nil { | ||||
| 	if err := os.RemoveAll(filepath.Join(cachePath, cacheProtecodePath)); err != nil { | ||||
| 		log.Entry().Warnf("Error during cleanup folder %v", err) | ||||
| 	} | ||||
|  | ||||
| @@ -158,9 +142,7 @@ func tarImage(config *protecodeExecuteScanOptions) (*os.File, string) { | ||||
| 	return nil, config.ScanImage | ||||
| } | ||||
|  | ||||
| func executeProtecodeScan(client protecode.Protecode, config *protecodeExecuteScanOptions, fileName string, writeReportToFile func(resp io.ReadCloser, reportFileName string) error) map[string]int { | ||||
|  | ||||
| 	var parsedResult map[string]int = make(map[string]int) | ||||
| func executeProtecodeScan(influx *protecodeExecuteScanInflux, client protecode.Protecode, config *protecodeExecuteScanOptions, fileName string, writeReportToFile func(resp io.ReadCloser, reportFileName string) error) error { | ||||
| 	//load existing product by filename | ||||
| 	log.Entry().Debugf("Load existing product Group:%v Reuse:%v", config.Group, config.ReuseExisting) | ||||
| 	productID := client.LoadExistingProduct(config.Group, config.ReuseExisting) | ||||
| @@ -168,59 +150,73 @@ func executeProtecodeScan(client protecode.Protecode, config *protecodeExecuteSc | ||||
| 	// check if no existing is found or reuse existing is false | ||||
| 	productID = uploadScanOrDeclareFetch(*config, productID, client, fileName) | ||||
| 	if productID <= 0 { | ||||
| 		log.Entry().Fatalf("The product id is not valid (product id %v <= zero)", productID) | ||||
| 		return fmt.Errorf("the product id is not valid '%d'", productID) | ||||
| 	} | ||||
| 	//pollForResult | ||||
| 	log.Entry().Debugf("Poll for scan result %v", productID) | ||||
| 	result := client.PollForResult(productID, config.TimeoutMinutes) | ||||
|  | ||||
| 	// write results to file | ||||
| 	jsonData, _ := json.Marshal(result) | ||||
| 	filePath := filepath.Join(reportPath, "protecodescan_vulns.json") | ||||
| 	ioutil.WriteFile(filePath, jsonData, 0644) | ||||
| 	ioutil.WriteFile(filepath.Join(reportPath, scanResultFile), jsonData, 0644) | ||||
|  | ||||
| 	//check if result is ok else notify | ||||
| 	if len(result.Result.Status) > 0 && result.Result.Status == "F" { | ||||
| 		log.Entry().Fatalf("Please check the log and protecode backend for more details. URL: %v/products/%v", config.ServerURL, productID) | ||||
| 	if protecode.HasFailed(result) { | ||||
| 		log.SetErrorCategory(log.ErrorService) | ||||
| 		return fmt.Errorf("protecode scan failed: %v/products/%v", config.ServerURL, productID) | ||||
| 	} | ||||
|  | ||||
| 	//loadReport | ||||
| 	log.Entry().Debugf("Load report %v for %v", config.ReportFileName, productID) | ||||
| 	resp := client.LoadReport(config.ReportFileName, productID) | ||||
|  | ||||
| 	//save report to filesystem | ||||
| 	err := writeReportToFile(*resp, config.ReportFileName) | ||||
| 	if err != nil { | ||||
| 		return parsedResult | ||||
| 	if err := writeReportToFile(*resp, config.ReportFileName); err != nil { | ||||
| 		log.Entry().Warningf("failed to write report: %s", err) | ||||
| 	} | ||||
| 	//clean scan from server | ||||
| 	log.Entry().Debugf("Delete scan %v for %v", config.CleanupMode, productID) | ||||
| 	client.DeleteScan(config.CleanupMode, productID) | ||||
|  | ||||
| 	//count vulnerabilities | ||||
| 	log.Entry().Debug("Parse scan reult") | ||||
| 	log.Entry().Debug("Parse scan result") | ||||
| 	parsedResult, vulns := client.ParseResultForInflux(result.Result, config.ExcludeCVEs) | ||||
|  | ||||
| 	log.Entry().Debug("Write report to filesystem") | ||||
| 	writeReportDataToJSONFile(config, parsedResult, productID, vulns, ioutil.WriteFile) | ||||
| 	if err := protecode.WriteReport( | ||||
| 		protecode.ReportData{ | ||||
| 			ServerURL:                   config.ServerURL, | ||||
| 			FailOnSevereVulnerabilities: config.FailOnSevereVulnerabilities, | ||||
| 			ExcludeCVEs:                 config.ExcludeCVEs, | ||||
| 			Target:                      config.ReportFileName, | ||||
| 			Vulnerabilities:             vulns, | ||||
| 			ProductID:                   fmt.Sprintf("%v", productID), | ||||
| 		}, reportPath, stepResultFile, parsedResult, ioutil.WriteFile); err != nil { | ||||
| 		log.Entry().Warningf("failed to write report: %v", err) | ||||
| 	} | ||||
|  | ||||
| 	log.Entry().Debug("Write influx data") | ||||
| 	setInfluxData(influx, parsedResult) | ||||
|  | ||||
| 	// write reports JSON | ||||
| 	reports := []StepResults.Path{ | ||||
| 		{Target: config.ReportFileName, Mandatory: true}, | ||||
| 		{Target: "protecodeExecuteScan.json", Mandatory: true}, | ||||
| 		{Target: "protecodescan_vulns.json", Mandatory: true}, | ||||
| 		{Target: stepResultFile, Mandatory: true}, | ||||
| 		{Target: scanResultFile, Mandatory: true}, | ||||
| 	} | ||||
| 	// write links JSON | ||||
| 	links := []StepResults.Path{ | ||||
| 		{Name: "Protecode WebUI", Target: fmt.Sprintf("%s/products/%v/", config.ServerURL, productID)}, | ||||
| 		{Name: "Protecode WebUI", Target: fmt.Sprintf(webReportPath, config.ServerURL, productID)}, | ||||
| 		{Name: "Protecode Report", Target: path.Join("artifact", config.ReportFileName), Scope: "job"}, | ||||
| 	} | ||||
|  | ||||
| 	StepResults.PersistReportsAndLinks("protecodeExecuteScan", "", reports, links) | ||||
|  | ||||
| 	return parsedResult | ||||
| 	if config.FailOnSevereVulnerabilities && protecode.HasSevereVulnerabilities(result.Result, config.ExcludeCVEs) { | ||||
| 		log.SetErrorCategory(log.ErrorCompliance) | ||||
| 		return fmt.Errorf("the product is not compliant") | ||||
| 	} | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func setInfluxData(influx *protecodeExecuteScanInflux, result map[string]int) { | ||||
|  | ||||
| 	influx.protecode_data.fields.historical_vulnerabilities = fmt.Sprintf("%v", result["historical_vulnerabilities"]) | ||||
| 	influx.protecode_data.fields.triaged_vulnerabilities = fmt.Sprintf("%v", result["triaged_vulnerabilities"]) | ||||
| 	influx.protecode_data.fields.excluded_vulnerabilities = fmt.Sprintf("%v", result["excluded_vulnerabilities"]) | ||||
| @@ -229,32 +225,6 @@ func setInfluxData(influx *protecodeExecuteScanInflux, result map[string]int) { | ||||
| 	influx.protecode_data.fields.vulnerabilities = fmt.Sprintf("%v", result["vulnerabilities"]) | ||||
| } | ||||
|  | ||||
| func writeReportDataToJSONFile(config *protecodeExecuteScanOptions, result map[string]int, productID int, vulns []protecode.Vuln, writeToFile func(f string, d []byte, p os.FileMode) error) { | ||||
|  | ||||
| 	protecodeData := protecodeData{} | ||||
| 	protecodeData.ServerURL = config.ServerURL | ||||
| 	protecodeData.FailOnSevereVulnerabilities = config.FailOnSevereVulnerabilities | ||||
| 	protecodeData.ExcludeCVEs = config.ExcludeCVEs | ||||
| 	protecodeData.Target = config.ReportFileName | ||||
| 	protecodeData.Mandatory = true | ||||
| 	protecodeData.ProductID = fmt.Sprintf("%v", productID) | ||||
| 	protecodeData.Count = fmt.Sprintf("%v", result["count"]) | ||||
| 	protecodeData.Cvss2GreaterOrEqualSeven = fmt.Sprintf("%v", result["cvss2GreaterOrEqualSeven"]) | ||||
| 	protecodeData.Cvss3GreaterOrEqualSeven = fmt.Sprintf("%v", result["cvss3GreaterOrEqualSeven"]) | ||||
| 	protecodeData.ExcludedVulnerabilities = fmt.Sprintf("%v", result["excluded_vulnerabilities"]) | ||||
| 	protecodeData.TriagedVulnerabilities = fmt.Sprintf("%v", result["triaged_vulnerabilities"]) | ||||
| 	protecodeData.HistoricalVulnerabilities = fmt.Sprintf("%v", result["historical_vulnerabilities"]) | ||||
| 	protecodeData.Vulnerabilities = vulns | ||||
|  | ||||
| 	jsonData, _ := json.Marshal(protecodeData) | ||||
|  | ||||
| 	log.Entry().Infof("Protecode scan info, %v of which %v had a CVSS v2 score >= 7.0 and %v had a CVSS v3 score >= 7.0.\n %v vulnerabilities were excluded via configuration (%v) and %v vulnerabilities were triaged via the webUI.\nIn addition %v historical vulnerabilities were spotted. \n\n Vulnerabilities: %v", | ||||
| 		protecodeData.Count, protecodeData.Cvss2GreaterOrEqualSeven, protecodeData.Cvss3GreaterOrEqualSeven, protecodeData.ExcludedVulnerabilities, protecodeData.ExcludeCVEs, protecodeData.TriagedVulnerabilities, protecodeData.HistoricalVulnerabilities, protecodeData.Vulnerabilities) | ||||
|  | ||||
| 	filePath := filepath.Join(reportPath, "protecodeExecuteScan.json") | ||||
| 	writeToFile(filePath, jsonData, 0644) | ||||
| } | ||||
|  | ||||
| func createClient(config *protecodeExecuteScanOptions) protecode.Protecode { | ||||
|  | ||||
| 	var duration time.Duration = time.Duration(time.Minute * 1) | ||||
| @@ -282,6 +252,7 @@ func createClient(config *protecodeExecuteScanOptions) protecode.Protecode { | ||||
|  | ||||
| 	return pc | ||||
| } | ||||
|  | ||||
| func createDockerClient(config *protecodeExecuteScanOptions) piperDocker.Download { | ||||
|  | ||||
| 	dClientOptions := piperDocker.ClientOptions{ImageName: config.ScanImage, RegistryURL: config.DockerRegistryURL, LocalPath: config.FilePath, IncludeLayers: config.IncludeLayers} | ||||
| @@ -292,14 +263,12 @@ func createDockerClient(config *protecodeExecuteScanOptions) piperDocker.Downloa | ||||
| } | ||||
|  | ||||
| func uploadScanOrDeclareFetch(config protecodeExecuteScanOptions, productID int, client protecode.Protecode, fileName string) int { | ||||
|  | ||||
| 	//check if the LoadExistingProduct) before returns an valid product id, than scip this | ||||
| 	if !hasExisting(productID, config.ReuseExisting) { | ||||
| 		if len(config.FetchURL) > 0 { | ||||
| 			log.Entry().Debugf("Declare fetch url %v", config.FetchURL) | ||||
| 			resultData := client.DeclareFetchURL(config.CleanupMode, config.Group, config.FetchURL) | ||||
| 			productID = resultData.Result.ProductID | ||||
|  | ||||
| 		} else { | ||||
| 			log.Entry().Debugf("Upload file path: %v", config.FilePath) | ||||
| 			if len(config.FilePath) <= 0 { | ||||
| @@ -319,7 +288,6 @@ func uploadScanOrDeclareFetch(config protecodeExecuteScanOptions, productID int, | ||||
| 			productID = resultData.Result.ProductID | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return productID | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -86,19 +86,13 @@ func (c *DockerClientMock) TarImage(writer io.Writer, image pkgutil.Image) error | ||||
| } | ||||
|  | ||||
| func TestRunProtecodeScan(t *testing.T) { | ||||
|  | ||||
| 	requestURI := "" | ||||
| 	dir, err := ioutil.TempDir("", "t") | ||||
| 	if err != nil { | ||||
| 		t.Fatal("Failed to create temporary directory") | ||||
| 	} | ||||
| 	require.NoError(t, err, "Failed to create temporary directory") | ||||
| 	// clean up tmp dir | ||||
| 	defer func() { _ = os.RemoveAll(dir) }() | ||||
|  | ||||
| 	testFile, err := ioutil.TempFile(dir, "t.tar") | ||||
| 	if err != nil { | ||||
| 		t.FailNow() | ||||
| 	} | ||||
| 	require.NoError(t, err) | ||||
| 	fileName := filepath.Base(testFile.Name()) | ||||
| 	path := strings.ReplaceAll(testFile.Name(), fileName, "") | ||||
|  | ||||
| @@ -109,9 +103,7 @@ func TestRunProtecodeScan(t *testing.T) { | ||||
| 		if requestURI == "/api/product/4486/" || requestURI == "/api/product/4711/" { | ||||
| 			violations := filepath.Join("testdata/TestProtecode", "protecode_result_violations.json") | ||||
| 			byteContent, err := ioutil.ReadFile(violations) | ||||
| 			if err != nil { | ||||
| 				t.Fatalf("failed reading %v", violations) | ||||
| 			} | ||||
| 			require.NoErrorf(t, err, "failed reading %v", violations) | ||||
| 			response := protecode.ResultData{Result: protecode.Result{ProductID: 4711, ReportURL: requestURI}} | ||||
| 			err = json.Unmarshal(byteContent, &response) | ||||
|  | ||||
| @@ -120,9 +112,7 @@ func TestRunProtecodeScan(t *testing.T) { | ||||
| 		} else if requestURI == "/api/fetch/" { | ||||
| 			violations := filepath.Join("testdata/TestProtecode", "protecode_result_violations.json") | ||||
| 			byteContent, err := ioutil.ReadFile(violations) | ||||
| 			if err != nil { | ||||
| 				t.Fatalf("failed reading %v", violations) | ||||
| 			} | ||||
| 			require.NoErrorf(t, err, "failed reading %v", violations) | ||||
| 			response := protecode.ResultData{Result: protecode.Result{ProductID: 4486, ReportURL: requestURI}} | ||||
| 			err = json.Unmarshal(byteContent, &response) | ||||
|  | ||||
| @@ -176,7 +166,6 @@ func TestHandleArtifactVersion(t *testing.T) { | ||||
| 		version string | ||||
| 		want    string | ||||
| 	}{ | ||||
|  | ||||
| 		{"1.0.0-20200131085038+eeb7c1033339bfd404d21ec5e7dc05c80e9e985e", "1"}, | ||||
| 		{"2.20.20-20200131085038+eeb7c1033339bfd404d21ec5e7dc05c80e9e985e", "2"}, | ||||
| 		{"3.20.20-20200131085038+eeb7c1033339bfd404d21ec5e7dc05c80e9e985e", "3"}, | ||||
| @@ -187,7 +176,6 @@ func TestHandleArtifactVersion(t *testing.T) { | ||||
| 	} | ||||
|  | ||||
| 	for _, c := range cases { | ||||
|  | ||||
| 		got := handleArtifactVersion(c.version) | ||||
| 		assert.Equal(t, c.want, got) | ||||
| 	} | ||||
| @@ -227,36 +215,10 @@ func TestCreateDockerClient(t *testing.T) { | ||||
| 	} | ||||
| } | ||||
|  | ||||
| var fileContent string | ||||
|  | ||||
| func writeToFileMock(f string, d []byte, p os.FileMode) error { | ||||
| 	fileContent = string(d) | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func TestWriteReportDataToJSONFile(t *testing.T) { | ||||
|  | ||||
| 	expected := "{\"target\":\"REPORTFILENAME\",\"mandatory\":true,\"productID\":\"4711\",\"serverUrl\":\"DUMMYURL\",\"count\":\"0\",\"cvss2GreaterOrEqualSeven\":\"4\",\"cvss3GreaterOrEqualSeven\":\"3\",\"excludedVulnerabilities\":\"2\",\"triagedVulnerabilities\":\"0\",\"historicalVulnerabilities\":\"1\",\"Vulnerabilities\":[{\"cve\":\"Vulnerability\",\"cvss\":2.5,\"cvss3_score\":\"5.5\"}]}" | ||||
|  | ||||
| 	var parsedResult map[string]int = make(map[string]int) | ||||
| 	parsedResult["historical_vulnerabilities"] = 1 | ||||
| 	parsedResult["excluded_vulnerabilities"] = 2 | ||||
| 	parsedResult["cvss3GreaterOrEqualSeven"] = 3 | ||||
| 	parsedResult["cvss2GreaterOrEqualSeven"] = 4 | ||||
| 	parsedResult["vulnerabilities"] = 5 | ||||
|  | ||||
| 	config := protecodeExecuteScanOptions{ServerURL: "DUMMYURL", ReportFileName: "REPORTFILENAME"} | ||||
|  | ||||
| 	writeReportDataToJSONFile(&config, parsedResult, 4711, []protecode.Vuln{{"Vulnerability", 2.5, "5.5"}}, writeToFileMock) | ||||
| 	assert.Equal(t, fileContent, expected, "content should be not empty") | ||||
| } | ||||
|  | ||||
| func TestUploadScanOrDeclareFetch(t *testing.T) { | ||||
|  | ||||
| 	// init | ||||
| 	testFile, err := ioutil.TempFile("", "testFileUpload") | ||||
| 	if err != nil { | ||||
| 		t.FailNow() | ||||
| 	} | ||||
| 	require.NoError(t, err) | ||||
| 	defer os.RemoveAll(testFile.Name()) // clean up | ||||
| 	fileName := filepath.Base(testFile.Name()) | ||||
| 	path := strings.ReplaceAll(testFile.Name(), fileName, "") | ||||
| @@ -302,10 +264,10 @@ func TestUploadScanOrDeclareFetch(t *testing.T) { | ||||
| 	} | ||||
|  | ||||
| 	for _, c := range cases { | ||||
|  | ||||
| 		// test | ||||
| 		config := protecodeExecuteScanOptions{ReuseExisting: c.reuse, CleanupMode: c.clean, Group: c.group, FetchURL: c.fetchURL, FilePath: c.filePath} | ||||
| 		got := uploadScanOrDeclareFetch(config, 0, pc, fileName) | ||||
|  | ||||
| 		// assert | ||||
| 		assert.Equal(t, c.want, got) | ||||
| 	} | ||||
| } | ||||
| @@ -317,9 +279,7 @@ func writeReportToFileMock(resp io.ReadCloser, reportFileName string) error { | ||||
| func TestExecuteProtecodeScan(t *testing.T) { | ||||
| 	testDataFile := filepath.Join("testdata", "TestProtecode", "protecode_result_violations.json") | ||||
| 	violationsAbsPath, err := filepath.Abs(testDataFile) | ||||
| 	if err != nil { | ||||
| 		t.Fatalf("failed to obtain absolute path to test data with violations: %v", err) | ||||
| 	} | ||||
| 	require.NoErrorf(t, err, "failed to obtain absolute path to test data with violations: %v", err) | ||||
|  | ||||
| 	requestURI := "" | ||||
| 	server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { | ||||
| @@ -328,9 +288,7 @@ func TestExecuteProtecodeScan(t *testing.T) { | ||||
|  | ||||
| 		if requestURI == "/api/product/4711/" { | ||||
| 			byteContent, err := ioutil.ReadFile(violationsAbsPath) | ||||
| 			if err != nil { | ||||
| 				t.Fatalf("failed reading %v", violationsAbsPath) | ||||
| 			} | ||||
| 			require.NoErrorf(t, err, "failed reading %v", violationsAbsPath) | ||||
| 			response := protecode.ResultData{} | ||||
| 			err = json.Unmarshal(byteContent, &response) | ||||
|  | ||||
| @@ -364,43 +322,33 @@ func TestExecuteProtecodeScan(t *testing.T) { | ||||
| 	} | ||||
|  | ||||
| 	resetDir, err := os.Getwd() | ||||
| 	if err != nil { | ||||
| 		t.Fatalf("Failed to get current directory: %v", err) | ||||
| 	} | ||||
| 	require.NoErrorf(t, err, "Failed to get current directory: %v", err) | ||||
| 	defer func() { _ = os.Chdir(resetDir) }() | ||||
|  | ||||
| 	for _, c := range cases { | ||||
|  | ||||
| 		// init | ||||
| 		dir, err := ioutil.TempDir("", "t") | ||||
| 		if err != nil { | ||||
| 			t.Fatalf("Failed to create temporary directory: %v", err) | ||||
| 		} | ||||
|  | ||||
| 		require.NoErrorf(t, err, "Failed to create temporary directory: %v", err) | ||||
| 		// clean up tmp dir | ||||
| 		defer func() { _ = os.RemoveAll(dir) }() | ||||
|  | ||||
| 		// change into tmp dir and write test data | ||||
| 		err = os.Chdir(dir) | ||||
| 		if err != nil { | ||||
| 			t.Fatalf("Failed to change into temporary directory: %v", err) | ||||
| 		} | ||||
|  | ||||
| 		require.NoErrorf(t, err, "Failed to change into temporary directory: %v", err) | ||||
| 		reportPath = dir | ||||
| 		config := protecodeExecuteScanOptions{ReuseExisting: c.reuse, CleanupMode: c.clean, Group: c.group, FetchURL: c.fetchURL, TimeoutMinutes: "3", ExcludeCVEs: "CVE-2018-1, CVE-2017-1000382", ReportFileName: "./cache/report-file.txt"} | ||||
|  | ||||
| 		got := executeProtecodeScan(pc, &config, "dummy", writeReportToFileMock) | ||||
|  | ||||
| 		assert.Equal(t, 1125, got["historical_vulnerabilities"]) | ||||
| 		assert.Equal(t, 0, got["triaged_vulnerabilities"]) | ||||
| 		assert.Equal(t, 1, got["excluded_vulnerabilities"]) | ||||
| 		assert.Equal(t, 129, got["cvss3GreaterOrEqualSeven"]) | ||||
| 		assert.Equal(t, 13, got["cvss2GreaterOrEqualSeven"]) | ||||
| 		assert.Equal(t, 226, got["vulnerabilities"]) | ||||
| 		influxData := &protecodeExecuteScanInflux{} | ||||
| 		// test | ||||
| 		executeProtecodeScan(influxData, pc, &config, "dummy", writeReportToFileMock) | ||||
| 		// assert | ||||
| 		assert.Equal(t, "1125", influxData.protecode_data.fields.historical_vulnerabilities) | ||||
| 		assert.Equal(t, "0", influxData.protecode_data.fields.triaged_vulnerabilities) | ||||
| 		assert.Equal(t, "1", influxData.protecode_data.fields.excluded_vulnerabilities) | ||||
| 		assert.Equal(t, "142", influxData.protecode_data.fields.major_vulnerabilities) | ||||
| 		assert.Equal(t, "226", influxData.protecode_data.fields.vulnerabilities) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestCorrectDockerConfigEnvVar(t *testing.T) { | ||||
|  | ||||
| 	t.Run("with credentials", func(t *testing.T) { | ||||
| 		// init | ||||
| 		testDirectory, _ := ioutil.TempDir(".", "") | ||||
|   | ||||
							
								
								
									
										40
									
								
								pkg/protecode/analysis.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								pkg/protecode/analysis.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | ||||
| package protecode | ||||
|  | ||||
| import "strconv" | ||||
|  | ||||
| const ( | ||||
| 	vulnerabilitySeverityThreshold = 7.0 | ||||
| ) | ||||
|  | ||||
| //HasFailed checks the return status of the provided result | ||||
| func HasFailed(result ResultData) bool { | ||||
| 	//TODO: check this in PollForResult and return error once | ||||
| 	return len(result.Result.Status) > 0 && result.Result.Status == statusFailed | ||||
| } | ||||
|  | ||||
| //HasSevereVulnerabilities checks if any non-historic, non-triaged, non-excluded vulnerability has a CVSS score above the defined threshold | ||||
| func HasSevereVulnerabilities(result Result, excludeCVEs string) bool { | ||||
| 	for _, component := range result.Components { | ||||
| 		for _, vulnerability := range component.Vulns { | ||||
| 			if isSevere(vulnerability) && | ||||
| 				!isTriaged(vulnerability) && | ||||
| 				!isExcluded(vulnerability, excludeCVEs) && | ||||
| 				isExact(vulnerability) { | ||||
| 				return true | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| func isSevere(vulnerability Vulnerability) bool { | ||||
| 	cvss3, _ := strconv.ParseFloat(vulnerability.Vuln.Cvss3Score, 64) | ||||
| 	if cvss3 >= vulnerabilitySeverityThreshold { | ||||
| 		return true | ||||
| 	} | ||||
| 	// CVSS v3 not set, fallback to CVSS v2 | ||||
| 	if cvss3 == 0 && vulnerability.Vuln.Cvss >= vulnerabilitySeverityThreshold { | ||||
| 		return true | ||||
| 	} | ||||
| 	return false | ||||
| } | ||||
							
								
								
									
										127
									
								
								pkg/protecode/analysis_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										127
									
								
								pkg/protecode/analysis_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,127 @@ | ||||
| package protecode | ||||
|  | ||||
| import ( | ||||
| 	"testing" | ||||
|  | ||||
| 	"github.com/stretchr/testify/assert" | ||||
| ) | ||||
|  | ||||
| func TestIsSevere(t *testing.T) { | ||||
| 	t.Run("with severe cvss v3 vulnerability", func(t *testing.T) { | ||||
| 		// init | ||||
| 		vulnerability := Vulnerability{ | ||||
| 			Exact:  true, | ||||
| 			Triage: []Triage{}, | ||||
| 			Vuln: Vuln{ | ||||
| 				Cve:        "Cve2", | ||||
| 				Cvss:       8.0, | ||||
| 				Cvss3Score: "7.3", | ||||
| 			}, | ||||
| 		} | ||||
| 		// test && assert | ||||
| 		assert.True(t, isSevere(vulnerability)) | ||||
| 	}) | ||||
| 	t.Run("with severe cvss v2 vulnerability", func(t *testing.T) { | ||||
| 		// init | ||||
| 		vulnerability := Vulnerability{ | ||||
| 			Exact:  true, | ||||
| 			Triage: []Triage{}, | ||||
| 			Vuln: Vuln{ | ||||
| 				Cve:        "Cve2", | ||||
| 				Cvss:       8.0, | ||||
| 				Cvss3Score: "0.0", | ||||
| 			}, | ||||
| 		} | ||||
| 		// test && assert | ||||
| 		assert.True(t, isSevere(vulnerability)) | ||||
| 	}) | ||||
| 	t.Run("with non-severe cvss v3 vulnerability", func(t *testing.T) { | ||||
| 		// init | ||||
| 		vulnerability := Vulnerability{ | ||||
| 			Exact:  true, | ||||
| 			Triage: []Triage{}, | ||||
| 			Vuln: Vuln{ | ||||
| 				Cve:        "Cve2", | ||||
| 				Cvss:       4.0, | ||||
| 				Cvss3Score: "4.0", | ||||
| 			}, | ||||
| 		} | ||||
| 		// test && assert | ||||
| 		assert.False(t, isSevere(vulnerability)) | ||||
| 	}) | ||||
| 	t.Run("with non-severe cvss v2 vulnerability", func(t *testing.T) { | ||||
| 		// init | ||||
| 		vulnerability := Vulnerability{ | ||||
| 			Exact:  true, | ||||
| 			Triage: []Triage{}, | ||||
| 			Vuln: Vuln{ | ||||
| 				Cve:        "Cve2", | ||||
| 				Cvss:       4.0, | ||||
| 				Cvss3Score: "0.0", | ||||
| 			}, | ||||
| 		} | ||||
| 		// test && assert | ||||
| 		assert.False(t, isSevere(vulnerability)) | ||||
| 	}) | ||||
| 	t.Run("with non-severe vulnerability with missing cvss v3 rating", func(t *testing.T) { | ||||
| 		// init | ||||
| 		vulnerability := Vulnerability{ | ||||
| 			Exact:  true, | ||||
| 			Triage: []Triage{}, | ||||
| 			Vuln: Vuln{ | ||||
| 				Cve:        "Cve2", | ||||
| 				Cvss:       4.0, | ||||
| 				Cvss3Score: "", | ||||
| 			}, | ||||
| 		} | ||||
| 		// test && assert | ||||
| 		assert.False(t, isSevere(vulnerability)) | ||||
| 	}) | ||||
| } | ||||
|  | ||||
| func TestHasSevereVulnerabilities(t *testing.T) { | ||||
| 	severeV3 := Vulnerability{Exact: true, Triage: []Triage{}, Vuln: Vuln{Cve: "Cve1", Cvss: 4.0, Cvss3Score: "8.0"}} | ||||
| 	severeV2 := Vulnerability{Exact: true, Triage: []Triage{}, Vuln: Vuln{Cve: "Cve2", Cvss: 8.0, Cvss3Score: "0.0"}} | ||||
| 	nonSevere1 := Vulnerability{Exact: true, Triage: []Triage{}, Vuln: Vuln{Cve: "Cve3", Cvss: 4.0, Cvss3Score: "4.0"}} | ||||
| 	nonSevere2 := Vulnerability{Exact: true, Triage: []Triage{}, Vuln: Vuln{Cve: "Cve4", Cvss: 4.0, Cvss3Score: "4.0"}} | ||||
| 	excluded := Vulnerability{Exact: true, Triage: []Triage{}, Vuln: Vuln{Cve: "Cve5", Cvss: 8.0, Cvss3Score: "8.0"}} | ||||
| 	triaged := Vulnerability{Exact: true, Triage: []Triage{{ID: 1}}, Vuln: Vuln{Cve: "Cve6", Cvss: 8.0, Cvss3Score: "8.0"}} | ||||
| 	historic := Vulnerability{Exact: false, Triage: []Triage{}, Vuln: Vuln{Cve: "Cve7", Cvss: 8.0, Cvss3Score: "8.0"}} | ||||
|  | ||||
| 	t.Run("with severe v3 vulnerabilities", func(t *testing.T) { | ||||
| 		// init | ||||
| 		data := Result{Components: []Component{{Vulns: []Vulnerability{nonSevere1, severeV3}}}} | ||||
| 		// test && assert | ||||
| 		assert.True(t, HasSevereVulnerabilities(data, "")) | ||||
| 	}) | ||||
| 	t.Run("with severe v2 vulnerabilities", func(t *testing.T) { | ||||
| 		// init | ||||
| 		data := Result{Components: []Component{{Vulns: []Vulnerability{nonSevere1, severeV2}}}} | ||||
| 		// test && assert | ||||
| 		assert.True(t, HasSevereVulnerabilities(data, "")) | ||||
| 	}) | ||||
| 	t.Run("without severe vulnerabilities", func(t *testing.T) { | ||||
| 		// init | ||||
| 		data := Result{Components: []Component{{Vulns: []Vulnerability{nonSevere1, nonSevere2}}}} | ||||
| 		// test && assert | ||||
| 		assert.False(t, HasSevereVulnerabilities(data, "")) | ||||
| 	}) | ||||
| 	t.Run("with historic vulnerabilities", func(t *testing.T) { | ||||
| 		// init | ||||
| 		data := Result{Components: []Component{{Vulns: []Vulnerability{nonSevere1, triaged}}}} | ||||
| 		// test && assert | ||||
| 		assert.False(t, HasSevereVulnerabilities(data, "")) | ||||
| 	}) | ||||
| 	t.Run("with excluded vulnerabilities", func(t *testing.T) { | ||||
| 		// init | ||||
| 		data := Result{Components: []Component{{Vulns: []Vulnerability{nonSevere1, excluded}}}} | ||||
| 		// test && assert | ||||
| 		assert.False(t, HasSevereVulnerabilities(data, "Cve5,Cve14")) | ||||
| 	}) | ||||
| 	t.Run("with historic vulnerabilities", func(t *testing.T) { | ||||
| 		// init | ||||
| 		data := Result{Components: []Component{{Vulns: []Vulnerability{nonSevere1, historic}}}} | ||||
| 		// test && assert | ||||
| 		assert.False(t, HasSevereVulnerabilities(data, "")) | ||||
| 	}) | ||||
| } | ||||
							
								
								
									
										7
									
								
								pkg/protecode/api.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								pkg/protecode/api.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | ||||
| package protecode | ||||
|  | ||||
| const ( | ||||
| 	statusBusy   = "B" | ||||
| 	statusReady  = "R" | ||||
| 	statusFailed = "F" | ||||
| ) | ||||
| @@ -257,22 +257,18 @@ func isSevereCVSS2(vulnerability Vulnerability) bool { | ||||
|  | ||||
| // DeleteScan deletes if configured the scan on the protecode server | ||||
| func (pc *Protecode) DeleteScan(cleanupMode string, productID int) { | ||||
|  | ||||
| 	switch cleanupMode { | ||||
| 	case "none": | ||||
| 	case "binary": | ||||
| 		return | ||||
| 	case "complete": | ||||
| 		pc.logger.Info("Deleting scan from server.") | ||||
| 		protecodeURL := pc.createURL("/api/product/", fmt.Sprintf("%v/", productID), "") | ||||
| 		headers := map[string][]string{} | ||||
|  | ||||
| 		pc.sendAPIRequest("DELETE", protecodeURL, headers) | ||||
| 		break | ||||
| 	default: | ||||
| 		pc.logger.Fatalf("Unknown cleanup mode %v", cleanupMode) | ||||
| 	} | ||||
|  | ||||
| } | ||||
|  | ||||
| // LoadReport loads the report of the protecode scan | ||||
| @@ -280,9 +276,9 @@ func (pc *Protecode) LoadReport(reportFileName string, productID int) *io.ReadCl | ||||
|  | ||||
| 	protecodeURL := pc.createURL("/api/product/", fmt.Sprintf("%v/pdf-report", productID), "") | ||||
| 	headers := map[string][]string{ | ||||
| 		"Cache-Control": []string{"no-cache, no-store, must-revalidate"}, | ||||
| 		"Pragma":        []string{"no-cache"}, | ||||
| 		"Outputfile":    []string{reportFileName}, | ||||
| 		"Cache-Control": {"no-cache, no-store, must-revalidate"}, | ||||
| 		"Pragma":        {"no-cache"}, | ||||
| 		"Outputfile":    {reportFileName}, | ||||
| 	} | ||||
|  | ||||
| 	readCloser, err := pc.sendAPIRequest(http.MethodGet, protecodeURL, headers) | ||||
| @@ -296,7 +292,7 @@ func (pc *Protecode) LoadReport(reportFileName string, productID int) *io.ReadCl | ||||
| // UploadScanFile upload the scan file to the protecode server | ||||
| func (pc *Protecode) UploadScanFile(cleanupMode, group, filePath, fileName string) *ResultData { | ||||
| 	deleteBinary := (cleanupMode == "binary" || cleanupMode == "complete") | ||||
| 	headers := map[string][]string{"Group": []string{group}, "Delete-Binary": []string{fmt.Sprintf("%v", deleteBinary)}} | ||||
| 	headers := map[string][]string{"Group": {group}, "Delete-Binary": {fmt.Sprintf("%v", deleteBinary)}} | ||||
|  | ||||
| 	uploadURL := fmt.Sprintf("%v/api/upload/%v", pc.serverURL, fileName) | ||||
|  | ||||
| @@ -316,7 +312,7 @@ func (pc *Protecode) UploadScanFile(cleanupMode, group, filePath, fileName strin | ||||
| // DeclareFetchURL configures the fetch url for the protecode scan | ||||
| func (pc *Protecode) DeclareFetchURL(cleanupMode, group, fetchURL string) *ResultData { | ||||
| 	deleteBinary := (cleanupMode == "binary" || cleanupMode == "complete") | ||||
| 	headers := map[string][]string{"Group": []string{group}, "Delete-Binary": []string{fmt.Sprintf("%v", deleteBinary)}, "Url": []string{fetchURL}, "Content-Type": []string{"application/json"}} | ||||
| 	headers := map[string][]string{"Group": {group}, "Delete-Binary": {fmt.Sprintf("%v", deleteBinary)}, "Url": {fetchURL}, "Content-Type": []string{"application/json"}} | ||||
|  | ||||
| 	protecodeURL := fmt.Sprintf("%v/api/fetch/", pc.serverURL) | ||||
| 	r, err := pc.sendAPIRequest(http.MethodPost, protecodeURL, headers) | ||||
| @@ -355,7 +351,7 @@ func (pc *Protecode) PollForResult(productID int, timeOutInMinutes string) Resul | ||||
| 			i = 0 | ||||
| 			return response | ||||
| 		} | ||||
| 		if len(response.Result.Components) > 0 && response.Result.Status != "B" { | ||||
| 		if len(response.Result.Components) > 0 && response.Result.Status != statusBusy { | ||||
| 			ticker.Stop() | ||||
| 			i = 0 | ||||
| 			break | ||||
| @@ -367,9 +363,9 @@ func (pc *Protecode) PollForResult(productID int, timeOutInMinutes string) Resul | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if len(response.Result.Components) == 0 || response.Result.Status == "B" { | ||||
| 	if len(response.Result.Components) == 0 || response.Result.Status == statusBusy { | ||||
| 		response, err = pc.pullResult(productID) | ||||
| 		if err != nil || len(response.Result.Components) == 0 || response.Result.Status == "B" { | ||||
| 		if err != nil || len(response.Result.Components) == 0 || response.Result.Status == statusBusy { | ||||
| 			pc.logger.Fatal("No result after polling") | ||||
| 		} | ||||
| 	} | ||||
| @@ -378,12 +374,10 @@ func (pc *Protecode) PollForResult(productID int, timeOutInMinutes string) Resul | ||||
| } | ||||
|  | ||||
| func (pc *Protecode) pullResult(productID int) (ResultData, error) { | ||||
|  | ||||
| 	protecodeURL := pc.createURL("/api/product/", fmt.Sprintf("%v/", productID), "") | ||||
| 	headers := map[string][]string{ | ||||
| 		"acceptType": []string{"application/json"}, | ||||
| 		"acceptType": {"application/json"}, | ||||
| 	} | ||||
|  | ||||
| 	r, err := pc.sendAPIRequest(http.MethodGet, protecodeURL, headers) | ||||
| 	if err != nil { | ||||
| 		return *new(ResultData), err | ||||
| @@ -403,7 +397,7 @@ func (pc *Protecode) LoadExistingProduct(group string, reuseExisting bool) int { | ||||
|  | ||||
| 		protecodeURL := pc.createURL("/api/apps/", fmt.Sprintf("%v/", group), "") | ||||
| 		headers := map[string][]string{ | ||||
| 			"acceptType": []string{"application/json"}, | ||||
| 			"acceptType": {"application/json"}, | ||||
| 		} | ||||
|  | ||||
| 		response := pc.loadExisting(protecodeURL, headers) | ||||
|   | ||||
| @@ -31,7 +31,7 @@ func TestMapResponse(t *testing.T) { | ||||
| 		{`{"product_id": 1}`, new(Result), &Result{ProductID: 1}}, | ||||
| 		{`"{\"product_id\": 4711}"`, new(Result), &Result{ProductID: 4711}}, | ||||
| 		{"{\"results\": {\"product_id\": 1}}", new(ResultData), &ResultData{Result: Result{ProductID: 1}}}, | ||||
| 		{`{"results": {"status": "B", "id": 209396, "product_id": 209396, "report_url": "https://protecode.c.eu-de-2.cloud.sap/products/209396/"}}`, new(ResultData), &ResultData{Result: Result{ProductID: 209396, Status: "B", ReportURL: "https://protecode.c.eu-de-2.cloud.sap/products/209396/"}}}, | ||||
| 		{`{"results": {"status": "B", "id": 209396, "product_id": 209396, "report_url": "https://protecode.c.eu-de-2.cloud.sap/products/209396/"}}`, new(ResultData), &ResultData{Result: Result{ProductID: 209396, Status: statusBusy, ReportURL: "https://protecode.c.eu-de-2.cloud.sap/products/209396/"}}}, | ||||
| 		{`{"products": [{"product_id": 1}]}`, new(ProductData), &ProductData{Products: []Product{{ProductID: 1}}}}, | ||||
| 	} | ||||
| 	pc := Protecode{} | ||||
| @@ -47,7 +47,7 @@ func TestParseResultSuccess(t *testing.T) { | ||||
| 	var result Result = Result{ | ||||
| 		ProductID: 4712, | ||||
| 		ReportURL: "ReportUrl", | ||||
| 		Status:    "B", | ||||
| 		Status:    statusBusy, | ||||
| 		Components: []Component{ | ||||
| 			{Vulns: []Vulnerability{ | ||||
| 				{Exact: true, Triage: []Triage{}, Vuln: Vuln{Cve: "Cve1", Cvss: 7.2, Cvss3Score: "0.0"}}, | ||||
|   | ||||
							
								
								
									
										52
									
								
								pkg/protecode/report.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								pkg/protecode/report.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,52 @@ | ||||
| package protecode | ||||
|  | ||||
| import ( | ||||
| 	"encoding/json" | ||||
| 	"fmt" | ||||
| 	"os" | ||||
| 	"path/filepath" | ||||
|  | ||||
| 	"github.com/SAP/jenkins-library/pkg/log" | ||||
| ) | ||||
|  | ||||
| //ReportData is representing the data of the step report JSON | ||||
| type ReportData struct { | ||||
| 	Target                      string `json:"target,omitempty"` | ||||
| 	Mandatory                   bool   `json:"mandatory,omitempty"` | ||||
| 	ProductID                   string `json:"productID,omitempty"` | ||||
| 	ServerURL                   string `json:"serverUrl,omitempty"` | ||||
| 	FailOnSevereVulnerabilities bool   `json:"failOnSevereVulnerabilities,omitempty"` | ||||
| 	ExcludeCVEs                 string `json:"excludeCVEs,omitempty"` | ||||
| 	Count                       string `json:"count,omitempty"` | ||||
| 	Cvss2GreaterOrEqualSeven    string `json:"cvss2GreaterOrEqualSeven,omitempty"` | ||||
| 	Cvss3GreaterOrEqualSeven    string `json:"cvss3GreaterOrEqualSeven,omitempty"` | ||||
| 	ExcludedVulnerabilities     string `json:"excludedVulnerabilities,omitempty"` | ||||
| 	TriagedVulnerabilities      string `json:"triagedVulnerabilities,omitempty"` | ||||
| 	HistoricalVulnerabilities   string `json:"historicalVulnerabilities,omitempty"` | ||||
| 	Vulnerabilities             []Vuln `json:"Vulnerabilities,omitempty"` | ||||
| } | ||||
|  | ||||
| // WriteReport ... | ||||
| func WriteReport(data ReportData, reportPath string, reportFileName string, result map[string]int, writeToFile func(f string, d []byte, p os.FileMode) error) error { | ||||
| 	data.Mandatory = true | ||||
| 	data.Count = fmt.Sprintf("%v", result["count"]) | ||||
| 	data.Cvss2GreaterOrEqualSeven = fmt.Sprintf("%v", result["cvss2GreaterOrEqualSeven"]) | ||||
| 	data.Cvss3GreaterOrEqualSeven = fmt.Sprintf("%v", result["cvss3GreaterOrEqualSeven"]) | ||||
| 	data.ExcludedVulnerabilities = fmt.Sprintf("%v", result["excluded_vulnerabilities"]) | ||||
| 	data.TriagedVulnerabilities = fmt.Sprintf("%v", result["triaged_vulnerabilities"]) | ||||
| 	data.HistoricalVulnerabilities = fmt.Sprintf("%v", result["historical_vulnerabilities"]) | ||||
|  | ||||
| 	log.Entry().Infof("Protecode scan info, %v of which %v had a CVSS v2 score >= 7.0 and %v had a CVSS v3 score >= 7.0.\n %v vulnerabilities were excluded via configuration (%v) and %v vulnerabilities were triaged via the webUI.\nIn addition %v historical vulnerabilities were spotted. \n\n Vulnerabilities: %v", | ||||
| 		data.Count, data.Cvss2GreaterOrEqualSeven, data.Cvss3GreaterOrEqualSeven, | ||||
| 		data.ExcludedVulnerabilities, data.ExcludeCVEs, data.TriagedVulnerabilities, | ||||
| 		data.HistoricalVulnerabilities, data.Vulnerabilities) | ||||
| 	return writeJSON(reportPath, reportFileName, data, writeToFile) | ||||
| } | ||||
|  | ||||
| func writeJSON(path, name string, data interface{}, writeToFile func(f string, d []byte, p os.FileMode) error) error { | ||||
| 	jsonData, err := json.Marshal(data) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	return writeToFile(filepath.Join(path, name), jsonData, 0644) | ||||
| } | ||||
							
								
								
									
										31
									
								
								pkg/protecode/report_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										31
									
								
								pkg/protecode/report_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,31 @@ | ||||
| package protecode | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"os" | ||||
| 	"testing" | ||||
|  | ||||
| 	"github.com/stretchr/testify/assert" | ||||
| ) | ||||
|  | ||||
| var fileContent string | ||||
|  | ||||
| func writeToFileMock(f string, d []byte, p os.FileMode) error { | ||||
| 	fileContent = string(d) | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func TestWriteReport(t *testing.T) { | ||||
| 	expected := "{\"target\":\"REPORTFILENAME\",\"mandatory\":true,\"productID\":\"4711\",\"serverUrl\":\"DUMMYURL\",\"count\":\"0\",\"cvss2GreaterOrEqualSeven\":\"4\",\"cvss3GreaterOrEqualSeven\":\"3\",\"excludedVulnerabilities\":\"2\",\"triagedVulnerabilities\":\"0\",\"historicalVulnerabilities\":\"1\",\"Vulnerabilities\":[{\"cve\":\"Vulnerability\",\"cvss\":2.5,\"cvss3_score\":\"5.5\"}]}" | ||||
|  | ||||
| 	var parsedResult map[string]int = make(map[string]int) | ||||
| 	parsedResult["historical_vulnerabilities"] = 1 | ||||
| 	parsedResult["excluded_vulnerabilities"] = 2 | ||||
| 	parsedResult["cvss3GreaterOrEqualSeven"] = 3 | ||||
| 	parsedResult["cvss2GreaterOrEqualSeven"] = 4 | ||||
| 	parsedResult["vulnerabilities"] = 5 | ||||
|  | ||||
| 	err := WriteReport(ReportData{ServerURL: "DUMMYURL", FailOnSevereVulnerabilities: false, ExcludeCVEs: "", Target: "REPORTFILENAME", ProductID: fmt.Sprintf("%v", 4711), Vulnerabilities: []Vuln{{"Vulnerability", 2.5, "5.5"}}}, ".", "", parsedResult, writeToFileMock) | ||||
| 	assert.Equal(t, fileContent, expected, "content should be not empty") | ||||
| 	assert.NoError(t, err) | ||||
| } | ||||
		Reference in New Issue
	
	Block a user