You've already forked sap-jenkins-library
							
							
				mirror of
				https://github.com/SAP/jenkins-library.git
				synced 2025-10-30 23:57:50 +02:00 
			
		
		
		
	feat(whitesourceExecuteScan): GitHub issue creation + SARIF (#3535)
* Add GH issue creation + SARIF * Code cleanup * Fix fmt, add debug * Code enhancements * Fix * Added debug info * Rework UA log scan * Fix code * read UA version * Fix nil reference * Extraction * Credentials * Issue creation * Error handling * Fix issue creation * query escape * Query escape 2 * Revert * Test avoid update * HTTP client * Add support for custom TLS certs * Fix code * Fix code 2 * Fix code 3 * Disable cert check * Fix auth * Remove implicit trust * Skip verification * Fix * Fix client * Fix HTTP auth * Fix trusted certs * Trim version * Code * Add token * Added token handling to client * Fix token * Cleanup * Fix token * Token rework * Fix code * Kick out oauth client * Kick out oauth client * Transport wrapping * Token * Simplification * Refactor * Variation * Check * Fix * Debug * Switch client * Variation * Debug * Switch to cert check * Add debug * Parse self * Cleanup * Update resources/metadata/whitesourceExecuteScan.yaml * Add debug * Expose subjects * Patch * Debug * Debug2 * Debug3 * Fix logging response body * Cleanup * Cleanup * Fix request body logging * Cleanup import * Fix import cycle * Cleanup * Fix fmt * Fix NopCloser reference * Regenerate * Reintroduce * Fix test * Fix tests * Correction * Fix error * Code fix * Fix tests * Add tests * Fix code climate issues * Code climate * Code climate again * Code climate again * Fix fmt * Fix fmt 2 Co-authored-by: Oliver Nocon <33484802+OliverNocon@users.noreply.github.com>
This commit is contained in:
		| @@ -958,7 +958,8 @@ func scanProject(config *fortifyExecuteScanOptions, command fortifyUtils, buildI | ||||
|  | ||||
| func determinePullRequestMerge(config fortifyExecuteScanOptions) (string, string) { | ||||
| 	author := "" | ||||
| 	ctx, client, err := piperGithub.NewClient(config.GithubToken, config.GithubAPIURL, "") | ||||
| 	//TODO provide parameter for trusted certs | ||||
| 	ctx, client, err := piperGithub.NewClient(config.GithubToken, config.GithubAPIURL, "", []string{}) | ||||
| 	if err == nil && ctx != nil && client != nil { | ||||
| 		prID, author, err := determinePullRequestMergeGithub(ctx, config, client.PullRequests) | ||||
| 		if err != nil { | ||||
|   | ||||
| @@ -19,7 +19,8 @@ type gitHubBranchProtectionRepositoriesService interface { | ||||
| } | ||||
|  | ||||
| func githubCheckBranchProtection(config githubCheckBranchProtectionOptions, telemetryData *telemetry.CustomData) { | ||||
| 	ctx, client, err := piperGithub.NewClient(config.Token, config.APIURL, "") | ||||
| 	//TODO provide parameter for trusted certs | ||||
| 	ctx, client, err := piperGithub.NewClient(config.Token, config.APIURL, "", []string{}) | ||||
| 	if err != nil { | ||||
| 		log.Entry().WithError(err).Fatal("Failed to get GitHub client") | ||||
| 	} | ||||
|   | ||||
| @@ -16,7 +16,8 @@ type githubIssueCommentService interface { | ||||
| } | ||||
|  | ||||
| func githubCommentIssue(config githubCommentIssueOptions, telemetryData *telemetry.CustomData) { | ||||
| 	ctx, client, err := piperGithub.NewClient(config.Token, config.APIURL, "") | ||||
| 	//TODO provide parameter for trusted certs | ||||
| 	ctx, client, err := piperGithub.NewClient(config.Token, config.APIURL, "", []string{}) | ||||
| 	if err != nil { | ||||
| 		log.Entry().WithError(err).Fatal("Failed to get GitHub client") | ||||
| 	} | ||||
|   | ||||
| @@ -20,7 +20,8 @@ type githubIssueService interface { | ||||
| } | ||||
|  | ||||
| func githubCreatePullRequest(config githubCreatePullRequestOptions, telemetryData *telemetry.CustomData) { | ||||
| 	ctx, client, err := piperGithub.NewClient(config.Token, config.APIURL, "") | ||||
| 	//TODO provide parameter for trusted certs | ||||
| 	ctx, client, err := piperGithub.NewClient(config.Token, config.APIURL, "", []string{}) | ||||
| 	if err != nil { | ||||
| 		log.Entry().WithError(err).Fatal("Failed to get GitHub client") | ||||
| 	} | ||||
|   | ||||
| @@ -29,7 +29,8 @@ type githubIssueClient interface { | ||||
| } | ||||
|  | ||||
| func githubPublishRelease(config githubPublishReleaseOptions, telemetryData *telemetry.CustomData) { | ||||
| 	ctx, client, err := piperGithub.NewClient(config.Token, config.APIURL, config.UploadURL) | ||||
| 	//TODO provide parameter for trusted certs | ||||
| 	ctx, client, err := piperGithub.NewClient(config.Token, config.APIURL, config.UploadURL, []string{}) | ||||
| 	if err != nil { | ||||
| 		log.Entry().WithError(err).Fatal("Failed to get GitHub client.") | ||||
| 	} | ||||
|   | ||||
| @@ -19,7 +19,8 @@ type gitHubCommitStatusRepositoriesService interface { | ||||
| } | ||||
|  | ||||
| func githubSetCommitStatus(config githubSetCommitStatusOptions, telemetryData *telemetry.CustomData) { | ||||
| 	ctx, client, err := piperGithub.NewClient(config.Token, config.APIURL, "") | ||||
| 	//TODO provide parameter for trusted certs | ||||
| 	ctx, client, err := piperGithub.NewClient(config.Token, config.APIURL, "", []string{}) | ||||
| 	if err != nil { | ||||
| 		log.Entry().WithError(err).Fatal("Failed to get GitHub client") | ||||
| 	} | ||||
|   | ||||
| @@ -1,12 +1,10 @@ | ||||
| package cmd | ||||
|  | ||||
| import ( | ||||
| 	"crypto/sha1" | ||||
| 	"encoding/json" | ||||
| 	"fmt" | ||||
| 	"os" | ||||
| 	"path/filepath" | ||||
| 	"sort" | ||||
| 	"strconv" | ||||
| 	"strings" | ||||
| 	"time" | ||||
| @@ -534,7 +532,7 @@ func checkPolicyViolations(config *ScanOptions, scan *ws.Scan, sys whitesource, | ||||
| 			return policyReport, errors.Wrap(err, "failed to create reporting directory") | ||||
| 		} | ||||
| 	} | ||||
| 	if err := utils.FileWrite(filepath.Join(reporting.StepReportDirectory, fmt.Sprintf("whitesourceExecuteScan_ip_%v.json", reportSha(config, scan))), jsonReport, 0666); err != nil { | ||||
| 	if err := utils.FileWrite(filepath.Join(reporting.StepReportDirectory, fmt.Sprintf("whitesourceExecuteScan_ip_%v.json", ws.ReportSha(config.ProductName, scan))), jsonReport, 0666); err != nil { | ||||
| 		return policyReport, errors.Wrapf(err, "failed to write json report") | ||||
| 	} | ||||
| 	// we do not add the json report to the overall list of reports for now, | ||||
| @@ -580,12 +578,29 @@ func checkSecurityViolations(config *ScanOptions, scan *ws.Scan, sys whitesource | ||||
| 				errorsOccured = append(errorsOccured, fmt.Sprint(err)) | ||||
| 			} | ||||
| 		} | ||||
| 		log.Entry().Debugf("Aggregated %v alerts for scanned projects", len(allAlerts)) | ||||
|  | ||||
| 		scanReport := createCustomVulnerabilityReport(config, scan, allAlerts, cvssSeverityLimit, utils) | ||||
| 		reportPaths, err = writeCustomVulnerabilityReports(config, scan, scanReport, utils) | ||||
| 		if config.CreateResultIssue && vulnerabilitiesCount > 0 && len(config.GithubToken) > 0 && len(config.GithubAPIURL) > 0 && len(config.Owner) > 0 && len(config.Repository) > 0 { | ||||
| 			log.Entry().Debugf("Creating result issues for %v alert(s)", vulnerabilitiesCount) | ||||
| 			err = ws.CreateGithubResultIssues(scan, &allAlerts, config.GithubToken, config.GithubAPIURL, config.Owner, config.Repository, config.Assignees, config.CustomTLSCertificateLinks) | ||||
| 			if err != nil { | ||||
| 				errorsOccured = append(errorsOccured, fmt.Sprint(err)) | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		scanReport := ws.CreateCustomVulnerabilityReport(config.ProductName, scan, &allAlerts, cvssSeverityLimit) | ||||
| 		paths, err := ws.WriteCustomVulnerabilityReports(config.ProductName, scan, scanReport, utils) | ||||
| 		if err != nil { | ||||
| 			errorsOccured = append(errorsOccured, fmt.Sprint(err)) | ||||
| 		} | ||||
| 		reportPaths = append(reportPaths, paths...) | ||||
|  | ||||
| 		sarif := ws.CreateSarifResultFile(scan, &allAlerts) | ||||
| 		paths, err = ws.WriteSarifFile(sarif, utils) | ||||
| 		if err != nil { | ||||
| 			errorsOccured = append(errorsOccured, fmt.Sprint(err)) | ||||
| 		} | ||||
| 		reportPaths = append(reportPaths, paths...) | ||||
|  | ||||
| 		if len(errorsOccured) > 0 { | ||||
| 			if vulnerabilitiesCount > 0 { | ||||
| @@ -605,7 +620,7 @@ func checkProjectSecurityViolations(cvssSeverityLimit float64, project ws.Projec | ||||
| 		return 0, alerts, fmt.Errorf("failed to retrieve project alerts from WhiteSource: %w", err) | ||||
| 	} | ||||
|  | ||||
| 	severeVulnerabilities, nonSevereVulnerabilities := countSecurityVulnerabilities(&alerts, cvssSeverityLimit) | ||||
| 	severeVulnerabilities, nonSevereVulnerabilities := ws.CountSecurityVulnerabilities(&alerts, cvssSeverityLimit) | ||||
| 	influx.whitesource_data.fields.minor_vulnerabilities = nonSevereVulnerabilities | ||||
| 	influx.whitesource_data.fields.major_vulnerabilities = severeVulnerabilities | ||||
| 	influx.whitesource_data.fields.vulnerabilities = nonSevereVulnerabilities + severeVulnerabilities | ||||
| @@ -627,154 +642,6 @@ func checkProjectSecurityViolations(cvssSeverityLimit float64, project ws.Projec | ||||
| 	return 0, alerts, nil | ||||
| } | ||||
|  | ||||
| func countSecurityVulnerabilities(alerts *[]ws.Alert, cvssSeverityLimit float64) (int, int) { | ||||
| 	severeVulnerabilities := 0 | ||||
| 	for _, alert := range *alerts { | ||||
| 		if isSevereVulnerability(alert, cvssSeverityLimit) { | ||||
| 			severeVulnerabilities++ | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	nonSevereVulnerabilities := len(*alerts) - severeVulnerabilities | ||||
| 	return severeVulnerabilities, nonSevereVulnerabilities | ||||
| } | ||||
|  | ||||
| func isSevereVulnerability(alert ws.Alert, cvssSeverityLimit float64) bool { | ||||
|  | ||||
| 	if vulnerabilityScore(alert) >= cvssSeverityLimit && cvssSeverityLimit >= 0 { | ||||
| 		return true | ||||
| 	} | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| func createCustomVulnerabilityReport(config *ScanOptions, scan *ws.Scan, alerts []ws.Alert, cvssSeverityLimit float64, utils whitesourceUtils) reporting.ScanReport { | ||||
|  | ||||
| 	severe, _ := countSecurityVulnerabilities(&alerts, cvssSeverityLimit) | ||||
|  | ||||
| 	// sort according to vulnerability severity | ||||
| 	sort.Slice(alerts, func(i, j int) bool { | ||||
| 		return vulnerabilityScore(alerts[i]) > vulnerabilityScore(alerts[j]) | ||||
| 	}) | ||||
|  | ||||
| 	projectNames := scan.ScannedProjectNames() | ||||
|  | ||||
| 	scanReport := reporting.ScanReport{ | ||||
| 		Title: "WhiteSource Security Vulnerability Report", | ||||
| 		Subheaders: []reporting.Subheader{ | ||||
| 			{Description: "WhiteSource product name", Details: config.ProductName}, | ||||
| 			{Description: "Filtered project names", Details: strings.Join(projectNames, ", ")}, | ||||
| 		}, | ||||
| 		Overview: []reporting.OverviewRow{ | ||||
| 			{Description: "Total number of vulnerabilities", Details: fmt.Sprint(len(alerts))}, | ||||
| 			{Description: "Total number of high/critical vulnerabilities with CVSS score >= 7.0", Details: fmt.Sprint(severe)}, | ||||
| 		}, | ||||
| 		SuccessfulScan: severe == 0, | ||||
| 		ReportTime:     utils.Now(), | ||||
| 	} | ||||
|  | ||||
| 	detailTable := reporting.ScanDetailTable{ | ||||
| 		NoRowsMessage: "No publicly known vulnerabilities detected", | ||||
| 		Headers: []string{ | ||||
| 			"Date", | ||||
| 			"CVE", | ||||
| 			"CVSS Score", | ||||
| 			"CVSS Version", | ||||
| 			"Project", | ||||
| 			"Library file name", | ||||
| 			"Library group ID", | ||||
| 			"Library artifact ID", | ||||
| 			"Library version", | ||||
| 			"Description", | ||||
| 			"Top fix", | ||||
| 		}, | ||||
| 		WithCounter:   true, | ||||
| 		CounterHeader: "Entry #", | ||||
| 	} | ||||
|  | ||||
| 	for _, alert := range alerts { | ||||
| 		var score float64 | ||||
| 		var scoreStyle reporting.ColumnStyle = reporting.Yellow | ||||
| 		if isSevereVulnerability(alert, cvssSeverityLimit) { | ||||
| 			scoreStyle = reporting.Red | ||||
| 		} | ||||
| 		var cveVersion string | ||||
| 		if alert.Vulnerability.CVSS3Score > 0 { | ||||
| 			score = alert.Vulnerability.CVSS3Score | ||||
| 			cveVersion = "v3" | ||||
| 		} else { | ||||
| 			score = alert.Vulnerability.Score | ||||
| 			cveVersion = "v2" | ||||
| 		} | ||||
|  | ||||
| 		var topFix string | ||||
| 		emptyFix := ws.Fix{} | ||||
| 		if alert.Vulnerability.TopFix != emptyFix { | ||||
| 			topFix = fmt.Sprintf(`%v<br>%v<br><a href="%v">%v</a>}"`, alert.Vulnerability.TopFix.Message, alert.Vulnerability.TopFix.FixResolution, alert.Vulnerability.TopFix.URL, alert.Vulnerability.TopFix.URL) | ||||
| 		} | ||||
|  | ||||
| 		row := reporting.ScanRow{} | ||||
| 		row.AddColumn(alert.Vulnerability.PublishDate, 0) | ||||
| 		row.AddColumn(fmt.Sprintf(`<a href="%v">%v</a>`, alert.Vulnerability.URL, alert.Vulnerability.Name), 0) | ||||
| 		row.AddColumn(score, scoreStyle) | ||||
| 		row.AddColumn(cveVersion, 0) | ||||
| 		row.AddColumn(alert.Project, 0) | ||||
| 		row.AddColumn(alert.Library.Filename, 0) | ||||
| 		row.AddColumn(alert.Library.GroupID, 0) | ||||
| 		row.AddColumn(alert.Library.ArtifactID, 0) | ||||
| 		row.AddColumn(alert.Library.Version, 0) | ||||
| 		row.AddColumn(alert.Vulnerability.Description, 0) | ||||
| 		row.AddColumn(topFix, 0) | ||||
|  | ||||
| 		detailTable.Rows = append(detailTable.Rows, row) | ||||
| 	} | ||||
| 	scanReport.DetailTable = detailTable | ||||
|  | ||||
| 	return scanReport | ||||
| } | ||||
|  | ||||
| func writeCustomVulnerabilityReports(config *ScanOptions, scan *ws.Scan, scanReport reporting.ScanReport, utils whitesourceUtils) ([]piperutils.Path, error) { | ||||
| 	reportPaths := []piperutils.Path{} | ||||
|  | ||||
| 	// ignore templating errors since template is in our hands and issues will be detected with the automated tests | ||||
| 	htmlReport, _ := scanReport.ToHTML() | ||||
| 	htmlReportPath := filepath.Join(ws.ReportsDirectory, "piper_whitesource_vulnerability_report.html") | ||||
| 	if err := utils.FileWrite(htmlReportPath, htmlReport, 0666); err != nil { | ||||
| 		log.SetErrorCategory(log.ErrorConfiguration) | ||||
| 		return reportPaths, errors.Wrapf(err, "failed to write html report") | ||||
| 	} | ||||
| 	reportPaths = append(reportPaths, piperutils.Path{Name: "WhiteSource Vulnerability Report", Target: htmlReportPath}) | ||||
|  | ||||
| 	// JSON reports are used by step pipelineCreateSummary in order to e.g. prepare an issue creation in GitHub | ||||
| 	// ignore JSON errors since structure is in our hands | ||||
| 	jsonReport, _ := scanReport.ToJSON() | ||||
| 	if exists, _ := utils.DirExists(reporting.StepReportDirectory); !exists { | ||||
| 		err := utils.MkdirAll(reporting.StepReportDirectory, 0777) | ||||
| 		if err != nil { | ||||
| 			return reportPaths, errors.Wrap(err, "failed to create reporting directory") | ||||
| 		} | ||||
| 	} | ||||
| 	if err := utils.FileWrite(filepath.Join(reporting.StepReportDirectory, fmt.Sprintf("whitesourceExecuteScan_oss_%v.json", reportSha(config, scan))), jsonReport, 0666); err != nil { | ||||
| 		return reportPaths, errors.Wrapf(err, "failed to write json report") | ||||
| 	} | ||||
| 	// we do not add the json report to the overall list of reports for now, | ||||
| 	// since it is just an intermediary report used as input for later | ||||
| 	// and there does not seem to be real benefit in archiving it. | ||||
|  | ||||
| 	return reportPaths, nil | ||||
| } | ||||
|  | ||||
| func vulnerabilityScore(alert ws.Alert) float64 { | ||||
| 	if alert.Vulnerability.CVSS3Score > 0 { | ||||
| 		return alert.Vulnerability.CVSS3Score | ||||
| 	} | ||||
| 	return alert.Vulnerability.Score | ||||
| } | ||||
|  | ||||
| func reportSha(config *ScanOptions, scan *ws.Scan) string { | ||||
| 	reportShaData := []byte(config.ProductName + "," + strings.Join(scan.ScannedProjectNames(), ",")) | ||||
| 	return fmt.Sprintf("%x", sha1.Sum(reportShaData)) | ||||
| } | ||||
|  | ||||
| func aggregateVersionWideLibraries(config *ScanOptions, utils whitesourceUtils, sys whitesource) error { | ||||
| 	log.Entry().Infof("Aggregating list of libraries used for all projects with version: %s", config.Version) | ||||
|  | ||||
|   | ||||
| @@ -66,6 +66,13 @@ type whitesourceExecuteScanOptions struct { | ||||
| 	M2Path                               string   `json:"m2Path,omitempty"` | ||||
| 	InstallArtifacts                     bool     `json:"installArtifacts,omitempty"` | ||||
| 	DefaultNpmRegistry                   string   `json:"defaultNpmRegistry,omitempty"` | ||||
| 	GithubToken                          string   `json:"githubToken,omitempty"` | ||||
| 	CreateResultIssue                    bool     `json:"createResultIssue,omitempty"` | ||||
| 	GithubAPIURL                         string   `json:"githubApiUrl,omitempty"` | ||||
| 	Owner                                string   `json:"owner,omitempty"` | ||||
| 	Repository                           string   `json:"repository,omitempty"` | ||||
| 	Assignees                            []string `json:"assignees,omitempty"` | ||||
| 	CustomTLSCertificateLinks            []string `json:"customTlsCertificateLinks,omitempty"` | ||||
| } | ||||
|  | ||||
| type whitesourceExecuteScanCommonPipelineEnvironment struct { | ||||
| @@ -232,6 +239,7 @@ The step uses the so-called WhiteSource Unified Agent. For details please refer | ||||
| 			log.RegisterSecret(stepConfig.DockerConfigJSON) | ||||
| 			log.RegisterSecret(stepConfig.OrgToken) | ||||
| 			log.RegisterSecret(stepConfig.UserToken) | ||||
| 			log.RegisterSecret(stepConfig.GithubToken) | ||||
|  | ||||
| 			if len(GeneralConfig.HookConfig.SentryConfig.Dsn) > 0 { | ||||
| 				sentryHook := log.NewSentryHook(GeneralConfig.HookConfig.SentryConfig.Dsn, GeneralConfig.CorrelationID) | ||||
| @@ -337,6 +345,13 @@ func addWhitesourceExecuteScanFlags(cmd *cobra.Command, stepConfig *whitesourceE | ||||
| 	cmd.Flags().StringVar(&stepConfig.M2Path, "m2Path", os.Getenv("PIPER_m2Path"), "Path to the location of the local repository that should be used.") | ||||
| 	cmd.Flags().BoolVar(&stepConfig.InstallArtifacts, "installArtifacts", false, "If enabled, it will install all artifacts to the local maven repository to make them available before running whitesource. This is required if any maven module has dependencies to other modules in the repository and they were not installed before.") | ||||
| 	cmd.Flags().StringVar(&stepConfig.DefaultNpmRegistry, "defaultNpmRegistry", os.Getenv("PIPER_defaultNpmRegistry"), "URL of the npm registry to use. Defaults to https://registry.npmjs.org/") | ||||
| 	cmd.Flags().StringVar(&stepConfig.GithubToken, "githubToken", os.Getenv("PIPER_githubToken"), "GitHub personal access token as per https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line") | ||||
| 	cmd.Flags().BoolVar(&stepConfig.CreateResultIssue, "createResultIssue", false, "Whether the step creates a GitHub issue containing the scan results in the originating repo. Since optimized pipelines are headless the creation is implicitly activated for scheduled runs.") | ||||
| 	cmd.Flags().StringVar(&stepConfig.GithubAPIURL, "githubApiUrl", `https://api.github.com`, "Set the GitHub API URL.") | ||||
| 	cmd.Flags().StringVar(&stepConfig.Owner, "owner", os.Getenv("PIPER_owner"), "Set the GitHub organization.") | ||||
| 	cmd.Flags().StringVar(&stepConfig.Repository, "repository", os.Getenv("PIPER_repository"), "Set the GitHub repository.") | ||||
| 	cmd.Flags().StringSliceVar(&stepConfig.Assignees, "assignees", []string{``}, "Defines the assignees for the Github Issue created/updated with the results of the scan as a list of login names.") | ||||
| 	cmd.Flags().StringSliceVar(&stepConfig.CustomTLSCertificateLinks, "customTlsCertificateLinks", []string{}, "List of download links to custom TLS certificates. This is required to ensure trusted connections to instances with repositories (like nexus) when publish flag is set to true.") | ||||
|  | ||||
| 	cmd.MarkFlagRequired("buildTool") | ||||
| 	cmd.MarkFlagRequired("orgToken") | ||||
| @@ -357,6 +372,7 @@ func whitesourceExecuteScanMetadata() config.StepData { | ||||
| 					{Name: "userTokenCredentialsId", Description: "Jenkins 'Secret text' credentials ID containing Whitesource user token.", Type: "jenkins", Aliases: []config.Alias{{Name: "whitesourceUserTokenCredentialsId", Deprecated: false}, {Name: "whitesource/userTokenCredentialsId", Deprecated: true}}}, | ||||
| 					{Name: "orgAdminUserTokenCredentialsId", Description: "Jenkins 'Secret text' credentials ID containing Whitesource org admin token.", Type: "jenkins", Aliases: []config.Alias{{Name: "whitesourceOrgAdminUserTokenCredentialsId", Deprecated: false}, {Name: "whitesource/orgAdminUserTokenCredentialsId", Deprecated: true}}}, | ||||
| 					{Name: "dockerConfigJsonCredentialsId", Description: "Jenkins 'Secret file' credentials ID containing Docker config.json (with registry credential(s)). You can find more details about the Docker credentials in the [Docker documentation](https://docs.docker.com/engine/reference/commandline/login/).", Type: "jenkins", Aliases: []config.Alias{{Name: "dockerCredentialsId", Deprecated: true}}}, | ||||
| 					{Name: "githubTokenCredentialsId", Description: "Jenkins 'Secret text' credentials ID containing token to authenticate to GitHub.", Type: "jenkins"}, | ||||
| 				}, | ||||
| 				Resources: []config.StepResources{ | ||||
| 					{Name: "buildDescriptor", Type: "stash"}, | ||||
| @@ -822,6 +838,95 @@ func whitesourceExecuteScanMetadata() config.StepData { | ||||
| 						Aliases:     []config.Alias{{Name: "npm/defaultNpmRegistry"}}, | ||||
| 						Default:     os.Getenv("PIPER_defaultNpmRegistry"), | ||||
| 					}, | ||||
| 					{ | ||||
| 						Name: "githubToken", | ||||
| 						ResourceRef: []config.ResourceReference{ | ||||
| 							{ | ||||
| 								Name: "githubTokenCredentialsId", | ||||
| 								Type: "secret", | ||||
| 							}, | ||||
|  | ||||
| 							{ | ||||
| 								Name:    "githubVaultSecretName", | ||||
| 								Type:    "vaultSecret", | ||||
| 								Default: "github", | ||||
| 							}, | ||||
| 						}, | ||||
| 						Scope:     []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, | ||||
| 						Type:      "string", | ||||
| 						Mandatory: false, | ||||
| 						Aliases:   []config.Alias{{Name: "access_token"}}, | ||||
| 						Default:   os.Getenv("PIPER_githubToken"), | ||||
| 					}, | ||||
| 					{ | ||||
| 						Name: "createResultIssue", | ||||
| 						ResourceRef: []config.ResourceReference{ | ||||
| 							{ | ||||
| 								Name:  "commonPipelineEnvironment", | ||||
| 								Param: "custom/optimizedAndScheduled", | ||||
| 							}, | ||||
| 						}, | ||||
| 						Scope:     []string{"PARAMETERS", "STAGES", "STEPS"}, | ||||
| 						Type:      "bool", | ||||
| 						Mandatory: false, | ||||
| 						Aliases:   []config.Alias{}, | ||||
| 						Default:   false, | ||||
| 					}, | ||||
| 					{ | ||||
| 						Name:        "githubApiUrl", | ||||
| 						ResourceRef: []config.ResourceReference{}, | ||||
| 						Scope:       []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, | ||||
| 						Type:        "string", | ||||
| 						Mandatory:   false, | ||||
| 						Aliases:     []config.Alias{}, | ||||
| 						Default:     `https://api.github.com`, | ||||
| 					}, | ||||
| 					{ | ||||
| 						Name: "owner", | ||||
| 						ResourceRef: []config.ResourceReference{ | ||||
| 							{ | ||||
| 								Name:  "commonPipelineEnvironment", | ||||
| 								Param: "github/owner", | ||||
| 							}, | ||||
| 						}, | ||||
| 						Scope:     []string{"PARAMETERS", "STAGES", "STEPS"}, | ||||
| 						Type:      "string", | ||||
| 						Mandatory: false, | ||||
| 						Aliases:   []config.Alias{{Name: "githubOrg"}}, | ||||
| 						Default:   os.Getenv("PIPER_owner"), | ||||
| 					}, | ||||
| 					{ | ||||
| 						Name: "repository", | ||||
| 						ResourceRef: []config.ResourceReference{ | ||||
| 							{ | ||||
| 								Name:  "commonPipelineEnvironment", | ||||
| 								Param: "github/repository", | ||||
| 							}, | ||||
| 						}, | ||||
| 						Scope:     []string{"PARAMETERS", "STAGES", "STEPS"}, | ||||
| 						Type:      "string", | ||||
| 						Mandatory: false, | ||||
| 						Aliases:   []config.Alias{{Name: "githubRepo"}}, | ||||
| 						Default:   os.Getenv("PIPER_repository"), | ||||
| 					}, | ||||
| 					{ | ||||
| 						Name:        "assignees", | ||||
| 						ResourceRef: []config.ResourceReference{}, | ||||
| 						Scope:       []string{"PARAMETERS", "STAGES", "STEPS"}, | ||||
| 						Type:        "[]string", | ||||
| 						Mandatory:   false, | ||||
| 						Aliases:     []config.Alias{}, | ||||
| 						Default:     []string{``}, | ||||
| 					}, | ||||
| 					{ | ||||
| 						Name:        "customTlsCertificateLinks", | ||||
| 						ResourceRef: []config.ResourceReference{}, | ||||
| 						Scope:       []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"}, | ||||
| 						Type:        "[]string", | ||||
| 						Mandatory:   false, | ||||
| 						Aliases:     []config.Alias{}, | ||||
| 						Default:     []string{}, | ||||
| 					}, | ||||
| 				}, | ||||
| 			}, | ||||
| 			Containers: []config.Container{ | ||||
|   | ||||
| @@ -656,161 +656,6 @@ func TestCheckProjectSecurityViolations(t *testing.T) { | ||||
|  | ||||
| } | ||||
|  | ||||
| func TestCountSecurityVulnerabilities(t *testing.T) { | ||||
| 	t.Parallel() | ||||
|  | ||||
| 	alerts := []ws.Alert{ | ||||
| 		{Vulnerability: ws.Vulnerability{CVSS3Score: 7.1}}, | ||||
| 		{Vulnerability: ws.Vulnerability{CVSS3Score: 7}}, | ||||
| 		{Vulnerability: ws.Vulnerability{CVSS3Score: 6}}, | ||||
| 	} | ||||
|  | ||||
| 	severe, nonSevere := countSecurityVulnerabilities(&alerts, 7.0) | ||||
| 	assert.Equal(t, 2, severe) | ||||
| 	assert.Equal(t, 1, nonSevere) | ||||
| } | ||||
|  | ||||
| func TestIsSevereVulnerability(t *testing.T) { | ||||
| 	tt := []struct { | ||||
| 		alert    ws.Alert | ||||
| 		limit    float64 | ||||
| 		expected bool | ||||
| 	}{ | ||||
| 		{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 0}}, limit: 0, expected: true}, | ||||
| 		{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 6.9, Score: 6}}, limit: 7.0, expected: false}, | ||||
| 		{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 7.0, Score: 6}}, limit: 7.0, expected: true}, | ||||
| 		{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 7.1, Score: 6}}, limit: 7.0, expected: true}, | ||||
| 		{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 6, Score: 6.9}}, limit: 7.0, expected: false}, | ||||
| 		{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 6, Score: 7.0}}, limit: 7.0, expected: false}, | ||||
| 		{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 6, Score: 7.1}}, limit: 7.0, expected: false}, | ||||
| 		{alert: ws.Alert{Vulnerability: ws.Vulnerability{Score: 6.9}}, limit: 7.0, expected: false}, | ||||
| 		{alert: ws.Alert{Vulnerability: ws.Vulnerability{Score: 7.0}}, limit: 7.0, expected: true}, | ||||
| 		{alert: ws.Alert{Vulnerability: ws.Vulnerability{Score: 7.1}}, limit: 7.0, expected: true}, | ||||
| 	} | ||||
|  | ||||
| 	for i, test := range tt { | ||||
| 		assert.Equalf(t, test.expected, isSevereVulnerability(test.alert, test.limit), "run %v failed", i) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestCreateCustomVulnerabilityReport(t *testing.T) { | ||||
| 	t.Parallel() | ||||
|  | ||||
| 	t.Run("success case", func(t *testing.T) { | ||||
| 		config := &ScanOptions{} | ||||
| 		scan := newWhitesourceScan(config) | ||||
| 		scan.AppendScannedProject("testProject") | ||||
| 		alerts := []ws.Alert{ | ||||
| 			{Library: ws.Library{Filename: "vul1"}, Vulnerability: ws.Vulnerability{CVSS3Score: 7.0, Score: 6}}, | ||||
| 			{Library: ws.Library{Filename: "vul2"}, Vulnerability: ws.Vulnerability{CVSS3Score: 8.0, TopFix: ws.Fix{Message: "this is the top fix"}}}, | ||||
| 			{Library: ws.Library{Filename: "vul3"}, Vulnerability: ws.Vulnerability{Score: 6}}, | ||||
| 		} | ||||
| 		utilsMock := newWhitesourceUtilsMock() | ||||
|  | ||||
| 		scanReport := createCustomVulnerabilityReport(config, scan, alerts, 7.0, utilsMock) | ||||
|  | ||||
| 		assert.Equal(t, "WhiteSource Security Vulnerability Report", scanReport.Title) | ||||
| 		assert.Equal(t, 3, len(scanReport.DetailTable.Rows)) | ||||
|  | ||||
| 		// assert that library info is filled and sorting has been executed | ||||
| 		assert.Equal(t, "vul2", scanReport.DetailTable.Rows[0].Columns[5].Content) | ||||
| 		assert.Equal(t, "vul1", scanReport.DetailTable.Rows[1].Columns[5].Content) | ||||
| 		assert.Equal(t, "vul3", scanReport.DetailTable.Rows[2].Columns[5].Content) | ||||
|  | ||||
| 		// assert that CVSS version identification has been done | ||||
| 		assert.Equal(t, "v3", scanReport.DetailTable.Rows[0].Columns[3].Content) | ||||
| 		assert.Equal(t, "v3", scanReport.DetailTable.Rows[1].Columns[3].Content) | ||||
| 		assert.Equal(t, "v2", scanReport.DetailTable.Rows[2].Columns[3].Content) | ||||
|  | ||||
| 		// assert proper rating and styling of high prio issues | ||||
| 		assert.Equal(t, "8", scanReport.DetailTable.Rows[0].Columns[2].Content) | ||||
| 		assert.Equal(t, "7", scanReport.DetailTable.Rows[1].Columns[2].Content) | ||||
| 		assert.Equal(t, "6", scanReport.DetailTable.Rows[2].Columns[2].Content) | ||||
| 		assert.Equal(t, "red-cell", scanReport.DetailTable.Rows[0].Columns[2].Style.String()) | ||||
| 		assert.Equal(t, "red-cell", scanReport.DetailTable.Rows[1].Columns[2].Style.String()) | ||||
| 		assert.Equal(t, "yellow-cell", scanReport.DetailTable.Rows[2].Columns[2].Style.String()) | ||||
|  | ||||
| 		assert.Contains(t, scanReport.DetailTable.Rows[0].Columns[10].Content, "this is the top fix") | ||||
|  | ||||
| 	}) | ||||
| } | ||||
|  | ||||
| func TestWriteCustomVulnerabilityReports(t *testing.T) { | ||||
|  | ||||
| 	t.Run("success", func(t *testing.T) { | ||||
| 		config := &ScanOptions{ | ||||
| 			ProductName: "mock-product", | ||||
| 		} | ||||
| 		scan := &ws.Scan{ProductVersion: "1"} | ||||
| 		scan.AppendScannedProject("project1") | ||||
| 		scan.AppendScannedProject("project2") | ||||
|  | ||||
| 		scanReport := reporting.ScanReport{} | ||||
| 		utilsMock := newWhitesourceUtilsMock() | ||||
|  | ||||
| 		reportPaths, err := writeCustomVulnerabilityReports(config, scan, scanReport, utilsMock) | ||||
|  | ||||
| 		assert.NoError(t, err) | ||||
| 		assert.Equal(t, 1, len(reportPaths)) | ||||
|  | ||||
| 		exists, err := utilsMock.FileExists(reportPaths[0].Target) | ||||
| 		assert.NoError(t, err) | ||||
| 		assert.True(t, exists) | ||||
|  | ||||
| 		exists, err = utilsMock.FileExists(filepath.Join(reporting.StepReportDirectory, "whitesourceExecuteScan_oss_27322f16a39c10c852ba6639538140a03e08e93f.json")) | ||||
| 		assert.NoError(t, err) | ||||
| 		assert.True(t, exists) | ||||
| 	}) | ||||
|  | ||||
| 	t.Run("failed to write HTML report", func(t *testing.T) { | ||||
| 		config := &ScanOptions{ | ||||
| 			ProductName: "mock-product", | ||||
| 		} | ||||
| 		scan := &ws.Scan{ProductVersion: "1"} | ||||
| 		scanReport := reporting.ScanReport{} | ||||
| 		utilsMock := newWhitesourceUtilsMock() | ||||
| 		utilsMock.FileWriteErrors = map[string]error{ | ||||
| 			filepath.Join(ws.ReportsDirectory, "piper_whitesource_vulnerability_report.html"): fmt.Errorf("write error"), | ||||
| 		} | ||||
|  | ||||
| 		_, err := writeCustomVulnerabilityReports(config, scan, scanReport, utilsMock) | ||||
| 		assert.Contains(t, fmt.Sprint(err), "failed to write html report") | ||||
| 	}) | ||||
|  | ||||
| 	t.Run("failed to write json report", func(t *testing.T) { | ||||
| 		config := &ScanOptions{ | ||||
| 			ProductName: "mock-product", | ||||
| 		} | ||||
| 		scan := &ws.Scan{ProductVersion: "1"} | ||||
| 		scan.AppendScannedProject("project1") | ||||
| 		scanReport := reporting.ScanReport{} | ||||
| 		utilsMock := newWhitesourceUtilsMock() | ||||
| 		utilsMock.FileWriteErrors = map[string]error{ | ||||
| 			filepath.Join(reporting.StepReportDirectory, "whitesourceExecuteScan_oss_e860d3a7cc8ca3261f065773404ba43e9a0b9d5b.json"): fmt.Errorf("write error"), | ||||
| 		} | ||||
|  | ||||
| 		_, err := writeCustomVulnerabilityReports(config, scan, scanReport, utilsMock) | ||||
| 		assert.Contains(t, fmt.Sprint(err), "failed to write json report") | ||||
| 	}) | ||||
|  | ||||
| } | ||||
|  | ||||
| func TestVulnerabilityScore(t *testing.T) { | ||||
| 	t.Parallel() | ||||
|  | ||||
| 	tt := []struct { | ||||
| 		alert    ws.Alert | ||||
| 		expected float64 | ||||
| 	}{ | ||||
| 		{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 7.0, Score: 6}}, expected: 7.0}, | ||||
| 		{alert: ws.Alert{Vulnerability: ws.Vulnerability{CVSS3Score: 7.0}}, expected: 7.0}, | ||||
| 		{alert: ws.Alert{Vulnerability: ws.Vulnerability{Score: 6}}, expected: 6}, | ||||
| 	} | ||||
| 	for i, test := range tt { | ||||
| 		assert.Equalf(t, test.expected, vulnerabilityScore(test.alert), "run %v failed", i) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestAggregateVersionWideLibraries(t *testing.T) { | ||||
| 	t.Parallel() | ||||
| 	t.Run("happy path", func(t *testing.T) { | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| //go:build integration | ||||
| // +build integration | ||||
|  | ||||
| // can be execute with go test -tags=integration ./integration/... | ||||
|  | ||||
| package main | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| //go:build integration | ||||
| // +build integration | ||||
|  | ||||
| // can be execute with go test -tags=integration ./integration/... | ||||
|  | ||||
| package main | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| //go:build integration | ||||
| // +build integration | ||||
|  | ||||
| // can be execute with go test -tags=integration ./integration/... | ||||
|  | ||||
| package main | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| //go:build integration | ||||
| // +build integration | ||||
|  | ||||
| // can be execute with go test -tags=integration ./integration/... | ||||
|  | ||||
| package main | ||||
|   | ||||
| @@ -1,3 +1,4 @@ | ||||
| //go:build integration | ||||
| // +build integration | ||||
|  | ||||
| // can be execute with go test -tags=integration ./integration/... | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| //go:build integration | ||||
| // +build integration | ||||
|  | ||||
| // can be execute with go test -tags=integration ./integration/... | ||||
|  | ||||
| package main | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| //go:build integration | ||||
| // +build integration | ||||
|  | ||||
| // can be execute with go test -tags=integration ./integration/... | ||||
|  | ||||
| package main | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| //go:build integration | ||||
| // +build integration | ||||
|  | ||||
| // can be execute with go test -tags=integration ./integration/... | ||||
|  | ||||
| package main | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| //go:build integration | ||||
| // +build integration | ||||
|  | ||||
| // can be execute with go test -tags=integration ./integration/... | ||||
|  | ||||
| package main | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| //go:build integration | ||||
| // +build integration | ||||
|  | ||||
| // can be execute with go test -tags=integration ./integration/... | ||||
|  | ||||
| package main | ||||
|   | ||||
| @@ -1,3 +1,4 @@ | ||||
| //go:build integration | ||||
| // +build integration | ||||
|  | ||||
| // can be execute with go test -tags=integration ./integration/... | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| //go:build integration | ||||
| // +build integration | ||||
|  | ||||
| // can be execute with go test -tags=integration ./integration/... | ||||
|  | ||||
| package main | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| //go:build integration | ||||
| // +build integration | ||||
|  | ||||
| // can be execute with go test -tags=integration ./integration/... | ||||
|  | ||||
| package main | ||||
|   | ||||
| @@ -1,3 +1,4 @@ | ||||
| //go:build !release | ||||
| // +build !release | ||||
|  | ||||
| package cpi | ||||
|   | ||||
							
								
								
									
										160
									
								
								pkg/format/sarif.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										160
									
								
								pkg/format/sarif.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,160 @@ | ||||
| package format | ||||
|  | ||||
| // SARIF format related JSON structs | ||||
| type SARIF struct { | ||||
| 	Schema  string `json:"$schema" default:"https://docs.oasis-open.org/sarif/sarif/v2.1.0/cos01/schemas/sarif-schema-2.1.0.json"` | ||||
| 	Version string `json:"version" default:"2.1.0"` | ||||
| 	Runs    []Runs `json:"runs"` | ||||
| } | ||||
|  | ||||
| // Runs of a Tool and related Results | ||||
| type Runs struct { | ||||
| 	Results []Results `json:"results"` | ||||
| 	Tool    Tool      `json:"tool"` | ||||
| 	/*Invocations         []Invocations      `json:"invocations"` | ||||
| 	OriginalUriBaseIds  OriginalUriBaseIds `json:"originalUriBaseIds"` | ||||
| 	Artifacts           []Artifact         `json:"artifacts"` | ||||
| 	AutomationDetails   AutomationDetails  `json:"automationDetails"` | ||||
| 	ColumnKind          string             `json:"columnKind" default:"utf16CodeUnits"` | ||||
| 	ThreadFlowLocations []Locations        `json:"threadFlowLocations"` | ||||
| 	Taxonomies          []Taxonomies       `json:"taxonomies"`*/ | ||||
| } | ||||
|  | ||||
| // Results these structs are relevant to the Results object | ||||
| type Results struct { | ||||
| 	RuleID         string           `json:"ruleId"` | ||||
| 	RuleIndex      int              `json:"ruleIndex"` | ||||
| 	Level          string           `json:"level,omitempty"` | ||||
| 	Message        Message          `json:"message"` | ||||
| 	AnalysisTarget ArtifactLocation `json:"analysisTarget,omitempty"` | ||||
| 	Locations      []Location       `json:"locations"` | ||||
| 	/*CodeFlows        []CodeFlow        `json:"codeFlows"` | ||||
| 	RelatedLocations []RelatedLocation `json:"relatedLocations"`*/ | ||||
| 	Properties SarifProperties `json:"properties"` | ||||
| } | ||||
|  | ||||
| // Message to detail the finding | ||||
| type Message struct { | ||||
| 	Text string `json:"text,omitempty"` | ||||
| } | ||||
|  | ||||
| // Location of the finding | ||||
| type Location struct { | ||||
| 	PhysicalLocation ArtifactLocation  `json:"physicalLocation,omitempty"` | ||||
| 	Region           Region            `json:"region,omitempty"` | ||||
| 	LogicalLocations []LogicalLocation `json:"logicalLocations,omitempty"` | ||||
| } | ||||
|  | ||||
| // ArtifactLocation describing the path of the artifact | ||||
| type ArtifactLocation struct { | ||||
| 	URI   string `json:"uri"` | ||||
| 	Index int    `json:"index,omitempty"` | ||||
| } | ||||
|  | ||||
| // Region where the finding was detected | ||||
| type Region struct { | ||||
| 	StartLine   int `json:"startLine,omitempty"` | ||||
| 	StartColumn int `json:"startColumn,omitempty"` | ||||
| 	EndLine     int `json:"EndLine,omitempty"` | ||||
| 	EndColumn   int `json:"EndColumn,omitempty"` | ||||
| 	ByteOffset  int `json:"ByteOffset,omitempty"` | ||||
| 	ByteLength  int `json:"ByteLength,omitempty"` | ||||
| } | ||||
|  | ||||
| // LogicalLocation of the finding | ||||
| type LogicalLocation struct { | ||||
| 	FullyQualifiedName string `json:"fullyQualifiedName"` | ||||
| } | ||||
|  | ||||
| // SarifProperties adding additional information/context to the finding | ||||
| type SarifProperties struct { | ||||
| 	InstanceID        string `json:"InstanceID"` | ||||
| 	InstanceSeverity  string `json:"InstanceSeverity"` | ||||
| 	Confidence        string `json:"Confidence"` | ||||
| 	Audited           bool   `json:"Audited"` | ||||
| 	ToolSeverity      string `json:"ToolSeverity"` | ||||
| 	ToolSeverityIndex int    `json:"ToolSeverityIndex"` | ||||
| 	ToolState         string `json:"ToolState"` | ||||
| 	ToolStateIndex    int    `json:"ToolStateIndex"` | ||||
| 	ToolAuditMessage  string `json:"ToolAuditMessage"` | ||||
| 	UnifiedAuditState string `json:"UnifiedAuditState"` | ||||
| } | ||||
|  | ||||
| // Tool these structs are relevant to the Tool object | ||||
| type Tool struct { | ||||
| 	Driver Driver `json:"driver"` | ||||
| } | ||||
|  | ||||
| // Driver meta information for the scan and tool context | ||||
| type Driver struct { | ||||
| 	Name           string      `json:"name"` | ||||
| 	Version        string      `json:"version"` | ||||
| 	InformationUri string      `json:"informationUri,omitempty"` | ||||
| 	Rules          []SarifRule `json:"rules"` | ||||
| 	//SupportedTaxonomies []SupportedTaxonomies `json:"supportedTaxonomies"` | ||||
| } | ||||
|  | ||||
| // SarifRule related rule use to identify the finding | ||||
| type SarifRule struct { | ||||
| 	ID                   string               `json:"id"` | ||||
| 	GUID                 string               `json:"guid"` | ||||
| 	Name                 string               `json:"name,omitempty"` | ||||
| 	ShortDescription     Message              `json:"shortDescription"` | ||||
| 	FullDescription      Message              `json:"fullDescription"` | ||||
| 	DefaultConfiguration DefaultConfiguration `json:"defaultConfiguration"` | ||||
| 	HelpURI              string               `json:"helpUri,omitempty"` | ||||
| 	Help                 Help                 `json:"help,omitempty"` | ||||
| 	Relationships        []Relationships      `json:"relationships,omitempty"` | ||||
| 	Properties           *SarifRuleProperties `json:"properties,omitempty"` | ||||
| } | ||||
|  | ||||
| // Help provides additional guidance to resolve the finding | ||||
| type Help struct { | ||||
| 	Text     string `json:"text,omitempty"` | ||||
| 	Markdown string `json:"markdown,omitempty"` | ||||
| } | ||||
|  | ||||
| // SupportedTaxonomies | ||||
| type SupportedTaxonomies struct { | ||||
| 	Name  string `json:"name"` | ||||
| 	Index int    `json:"index"` | ||||
| 	GUID  string `json:"guid"` | ||||
| } | ||||
|  | ||||
| // DefaultConfiguration | ||||
| type DefaultConfiguration struct { | ||||
| 	Properties DefaultProperties `json:"properties"` | ||||
| 	Level      string            `json:"level,omitempty"` //This exists in the template, but not sure how it is populated. TODO. | ||||
| } | ||||
|  | ||||
| //DefaultProperties | ||||
| type DefaultProperties struct { | ||||
| 	DefaultSeverity string `json:"DefaultSeverity"` | ||||
| } | ||||
|  | ||||
| // Relationships | ||||
| type Relationships struct { | ||||
| 	Target Target   `json:"target"` | ||||
| 	Kinds  []string `json:"kinds"` | ||||
| } | ||||
|  | ||||
| // Target | ||||
| type Target struct { | ||||
| 	ID            string        `json:"id"` | ||||
| 	ToolComponent ToolComponent `json:"toolComponent"` | ||||
| } | ||||
|  | ||||
| //ToolComponent | ||||
| type ToolComponent struct { | ||||
| 	Name string `json:"name"` | ||||
| 	GUID string `json:"guid"` | ||||
| } | ||||
|  | ||||
| // SarifRuleProperties | ||||
| type SarifRuleProperties struct { | ||||
| 	Accuracy    string   `json:"Accuracy,omitempty"` | ||||
| 	Impact      string   `json:"Impact,omitempty"` | ||||
| 	Probability string   `json:"Probability,omitempty"` | ||||
| 	Tags        []string `json:"tags,omitempty"` | ||||
| 	Precision   string   `json:"precision,omitempty"` | ||||
| } | ||||
| @@ -12,12 +12,12 @@ import ( | ||||
|  | ||||
| 	"github.com/piper-validation/fortify-client-go/models" | ||||
|  | ||||
| 	"github.com/SAP/jenkins-library/pkg/format" | ||||
| 	"github.com/SAP/jenkins-library/pkg/log" | ||||
| 	FileUtils "github.com/SAP/jenkins-library/pkg/piperutils" | ||||
| ) | ||||
|  | ||||
| // This struct encapsulates everyting in the FVDL document | ||||
|  | ||||
| // FVDL This struct encapsulates everyting in the FVDL document | ||||
| type FVDL struct { | ||||
| 	XMLName         xml.Name `xml:"FVDL"` | ||||
| 	Xmlns           string   `xml:"xmlns,attr"` | ||||
| @@ -36,25 +36,27 @@ type FVDL struct { | ||||
| 	EngineData      EngineData      `xml:"EngineData"` | ||||
| } | ||||
|  | ||||
| // CreatedTS | ||||
| type CreatedTS struct { | ||||
| 	XMLName xml.Name `xml:"CreatedTS"` | ||||
| 	Date    string   `xml:"date,attr"` | ||||
| 	Time    string   `xml:"time,attr"` | ||||
| } | ||||
|  | ||||
| // UUID | ||||
| type UUID struct { | ||||
| 	XMLName xml.Name `xml:"UUID"` | ||||
| 	Uuid    string   `xml:",innerxml"` | ||||
| } | ||||
|  | ||||
| // These structures are relevant to the Build object | ||||
|  | ||||
| // LOC These structures are relevant to the Build object | ||||
| type LOC struct { | ||||
| 	XMLName  xml.Name `xml:"LOC"` | ||||
| 	LocType  string   `xml:"type,attr"` | ||||
| 	LocValue string   `xml:",innerxml"` | ||||
| } | ||||
|  | ||||
| // Build | ||||
| type Build struct { | ||||
| 	XMLName        xml.Name `xml:"Build"` | ||||
| 	Project        string   `xml:"Project"` | ||||
| @@ -68,6 +70,7 @@ type Build struct { | ||||
| 	Scantime       ScanTime `xml:"ScanTime"` | ||||
| } | ||||
|  | ||||
| // File | ||||
| type File struct { | ||||
| 	XMLName       xml.Name `xml:"File"` | ||||
| 	FileSize      int      `xml:"size,attr"` | ||||
| @@ -79,18 +82,19 @@ type File struct { | ||||
| 	Locs          []LOC    `xml:",any,omitempty"` | ||||
| } | ||||
|  | ||||
| // ScanTime | ||||
| type ScanTime struct { | ||||
| 	XMLName xml.Name `xml:"ScanTime"` | ||||
| 	Value   int      `xml:"value,attr"` | ||||
| } | ||||
|  | ||||
| // These structures are relevant to the Vulnerabilities object | ||||
|  | ||||
| // Vulnerabilities These structures are relevant to the Vulnerabilities object | ||||
| type Vulnerabilities struct { | ||||
| 	XMLName       xml.Name        `xml:"Vulnerabilities"` | ||||
| 	Vulnerability []Vulnerability `xml:"Vulnerability"` | ||||
| } | ||||
|  | ||||
| // Vulnerability | ||||
| type Vulnerability struct { | ||||
| 	XMLName      xml.Name     `xml:"Vulnerability"` | ||||
| 	ClassInfo    ClassInfo    `xml:"ClassInfo"` | ||||
| @@ -98,6 +102,7 @@ type Vulnerability struct { | ||||
| 	AnalysisInfo AnalysisInfo `xml:"AnalysisInfo>Unified"` | ||||
| } | ||||
|  | ||||
| // ClassInfo | ||||
| type ClassInfo struct { | ||||
| 	XMLName         xml.Name `xml:"ClassInfo"` | ||||
| 	ClassID         string   `xml:"ClassID"` | ||||
| @@ -108,6 +113,7 @@ type ClassInfo struct { | ||||
| 	DefaultSeverity string   `xml:"DefaultSeverity"` | ||||
| } | ||||
|  | ||||
| // InstanceInfo | ||||
| type InstanceInfo struct { | ||||
| 	XMLName          xml.Name `xml:"InstanceInfo"` | ||||
| 	InstanceID       string   `xml:"InstanceID"` | ||||
| @@ -115,12 +121,14 @@ type InstanceInfo struct { | ||||
| 	Confidence       string   `xml:"Confidence"` | ||||
| } | ||||
|  | ||||
| // AnalysisInfo | ||||
| type AnalysisInfo struct { //Note that this is directly the "Unified" object | ||||
| 	Context                Context | ||||
| 	ReplacementDefinitions ReplacementDefinitions `xml:"ReplacementDefinitions"` | ||||
| 	Trace                  []Trace                `xml:"Trace"` | ||||
| } | ||||
|  | ||||
| // Context | ||||
| type Context struct { | ||||
| 	XMLName   xml.Name `xml:"Context"` | ||||
| 	ContextId string   `xml:"id,attr,omitempty"` | ||||
| @@ -128,6 +136,7 @@ type Context struct { | ||||
| 	FDSL      FunctionDeclarationSourceLocation | ||||
| } | ||||
|  | ||||
| // Function | ||||
| type Function struct { | ||||
| 	XMLName                xml.Name `xml:"Function"` | ||||
| 	FunctionName           string   `xml:"name,attr"` | ||||
| @@ -135,6 +144,7 @@ type Function struct { | ||||
| 	FunctionEnclosingClass string   `xml:"enclosingClass,attr"` | ||||
| } | ||||
|  | ||||
| // FunctionDeclarationSourceLocation | ||||
| type FunctionDeclarationSourceLocation struct { | ||||
| 	XMLName      xml.Name `xml:"FunctionDeclarationSourceLocation"` | ||||
| 	FDSLPath     string   `xml:"path,attr"` | ||||
| @@ -144,18 +154,21 @@ type FunctionDeclarationSourceLocation struct { | ||||
| 	FDSLColEnd   string   `xml:"colEnd,attr"` | ||||
| } | ||||
|  | ||||
| // ReplacementDefinitions | ||||
| type ReplacementDefinitions struct { | ||||
| 	XMLName     xml.Name      `xml:"ReplacementDefinitions"` | ||||
| 	Def         []Def         `xml:"Def"` | ||||
| 	LocationDef []LocationDef `xml:"LocationDef"` | ||||
| } | ||||
|  | ||||
| // Def | ||||
| type Def struct { | ||||
| 	XMLName  xml.Name `xml:"Def"` | ||||
| 	DefKey   string   `xml:"key,attr"` | ||||
| 	DefValue string   `xml:"value,attr"` | ||||
| } | ||||
|  | ||||
| // LocationDef | ||||
| type LocationDef struct { | ||||
| 	XMLName  xml.Name `xml:"LocationDef"` | ||||
| 	Path     string   `xml:"path,attr"` | ||||
| @@ -166,27 +179,32 @@ type LocationDef struct { | ||||
| 	Key      string   `xml:"key,attr"` | ||||
| } | ||||
|  | ||||
| // Trace | ||||
| type Trace struct { | ||||
| 	XMLName xml.Name `xml:"Trace"` | ||||
| 	Primary Primary  `xml:"Primary"` | ||||
| } | ||||
|  | ||||
| // Primary | ||||
| type Primary struct { | ||||
| 	XMLName xml.Name `xml:"Primary"` | ||||
| 	Entry   []Entry  `xml:"Entry"` | ||||
| } | ||||
|  | ||||
| // Entry | ||||
| type Entry struct { | ||||
| 	XMLName xml.Name `xml:"Entry"` | ||||
| 	NodeRef NodeRef  `xml:"NodeRef,omitempty"` | ||||
| 	Node    Node     `xml:"Node,omitempty"` | ||||
| } | ||||
|  | ||||
| // NodeRef | ||||
| type NodeRef struct { | ||||
| 	XMLName xml.Name `xml:"NodeRef"` | ||||
| 	RefId   int      `xml:"id,attr"` | ||||
| } | ||||
|  | ||||
| // Node | ||||
| type Node struct { | ||||
| 	XMLName        xml.Name       `xml:"Node"` | ||||
| 	IsDefault      string         `xml:"isDefault,attr,omitempty"` | ||||
| @@ -197,6 +215,7 @@ type Node struct { | ||||
| 	Knowledge      Knowledge      `xml:"Knowledge,omitempty"` | ||||
| } | ||||
|  | ||||
| // SourceLocation | ||||
| type SourceLocation struct { | ||||
| 	XMLName   xml.Name `xml:"SourceLocation"` | ||||
| 	Path      string   `xml:"path,attr"` | ||||
| @@ -208,34 +227,40 @@ type SourceLocation struct { | ||||
| 	Snippet   string   `xml:"snippet,attr"` | ||||
| } | ||||
|  | ||||
| // Action | ||||
| type Action struct { | ||||
| 	XMLName    xml.Name `xml:"Action"` | ||||
| 	Type       string   `xml:"type,attr"` | ||||
| 	ActionData string   `xml:",innerxml"` | ||||
| } | ||||
|  | ||||
| // Reason | ||||
| type Reason struct { | ||||
| 	XMLName xml.Name `xml:"Reason"` | ||||
| 	Rule    Rule     `xml:"Rule,omitempty"` | ||||
| 	Trace   Trace    `xml:"Trace,omitempty"` | ||||
| } | ||||
|  | ||||
| // Rule | ||||
| type Rule struct { | ||||
| 	XMLName xml.Name `xml:"Rule"` | ||||
| 	RuleID  string   `xml:"ruleID,attr"` | ||||
| } | ||||
|  | ||||
| // Group | ||||
| type Group struct { | ||||
| 	XMLName xml.Name `xml:"Group"` | ||||
| 	Name    string   `xml:"name,attr"` | ||||
| 	Data    string   `xml:",innerxml"` | ||||
| } | ||||
|  | ||||
| // Knowledge | ||||
| type Knowledge struct { | ||||
| 	XMLName xml.Name `xml:"Knowledge"` | ||||
| 	Facts   []Fact   `xml:"Fact"` | ||||
| } | ||||
|  | ||||
| // Fact | ||||
| type Fact struct { | ||||
| 	XMLName  xml.Name `xml:"Fact"` | ||||
| 	Primary  string   `xml:"primary,attr"` | ||||
| @@ -243,22 +268,19 @@ type Fact struct { | ||||
| 	FactData string   `xml:",innerxml"` | ||||
| } | ||||
|  | ||||
| // These structures are relevant to the ContextPool object | ||||
|  | ||||
| // ContextPool These structures are relevant to the ContextPool object | ||||
| type ContextPool struct { | ||||
| 	XMLName xml.Name  `xml:"ContextPool"` | ||||
| 	Context []Context `xml:"Context"` | ||||
| } | ||||
|  | ||||
| // These structures are relevant to the UnifiedNodePool object | ||||
|  | ||||
| // UnifiedNodePool These structures are relevant to the UnifiedNodePool object | ||||
| type UnifiedNodePool struct { | ||||
| 	XMLName xml.Name `xml:"UnifiedNodePool"` | ||||
| 	Node    []Node   `xml:"Node"` | ||||
| } | ||||
|  | ||||
| // These structures are relevant to the Description object | ||||
|  | ||||
| // Description These structures are relevant to the Description object | ||||
| type Description struct { | ||||
| 	XMLName           xml.Name          `xml:"Description"` | ||||
| 	ContentType       string            `xml:"contentType,attr"` | ||||
| @@ -271,32 +293,38 @@ type Description struct { | ||||
| 	CustomDescription CustomDescription `xml:"CustomDescription,omitempty"` | ||||
| } | ||||
|  | ||||
| // Abstract | ||||
| type Abstract struct { | ||||
| 	XMLName xml.Name `xml:"Abstract"` | ||||
| 	Text    string   `xml:",innerxml"` | ||||
| } | ||||
|  | ||||
| // Explanation | ||||
| type Explanation struct { | ||||
| 	XMLName xml.Name `xml:"Explanation"` | ||||
| 	Text    string   `xml:",innerxml"` | ||||
| } | ||||
|  | ||||
| // Recommendations | ||||
| type Recommendations struct { | ||||
| 	XMLName xml.Name `xml:"Recommendations"` | ||||
| 	Text    string   `xml:",innerxml"` | ||||
| } | ||||
|  | ||||
| // Reference | ||||
| type Reference struct { | ||||
| 	XMLName xml.Name `xml:"Reference"` | ||||
| 	Title   string   `xml:"Title"` | ||||
| 	Author  string   `xml:"Author"` | ||||
| } | ||||
|  | ||||
| // Tip | ||||
| type Tip struct { | ||||
| 	XMLName xml.Name `xml:"Tip"` | ||||
| 	Tip     string   `xml:",innerxml"` | ||||
| } | ||||
|  | ||||
| // CustomDescription | ||||
| type CustomDescription struct { | ||||
| 	XMLName         xml.Name        `xml:"CustomDescription"` | ||||
| 	ContentType     string          `xml:"contentType,attr"` | ||||
| @@ -306,8 +334,7 @@ type CustomDescription struct { | ||||
| 	References      []Reference     `xml:"References>Reference"` | ||||
| } | ||||
|  | ||||
| // These structures are relevant to the Snippets object | ||||
|  | ||||
| // Snippet These structures are relevant to the Snippets object | ||||
| type Snippet struct { | ||||
| 	XMLName   xml.Name `xml:"Snippet"` | ||||
| 	SnippetId string   `xml:"id,attr"` | ||||
| @@ -317,8 +344,7 @@ type Snippet struct { | ||||
| 	Text      string   `xml:"Text"` | ||||
| } | ||||
|  | ||||
| // These structures are relevant to the ProgramData object | ||||
|  | ||||
| // ProgramData These structures are relevant to the ProgramData object | ||||
| type ProgramData struct { | ||||
| 	XMLName         xml.Name         `xml:"ProgramData"` | ||||
| 	Sources         []SourceInstance `xml:"Sources>SourceInstance"` | ||||
| @@ -326,6 +352,7 @@ type ProgramData struct { | ||||
| 	CalledWithNoDef []Function       `xml:"CalledWithNoDef>Function"` | ||||
| } | ||||
|  | ||||
| // SourceInstance | ||||
| type SourceInstance struct { | ||||
| 	XMLName        xml.Name       `xml:"SourceInstance"` | ||||
| 	RuleID         string         `xml:"ruleID,attr"` | ||||
| @@ -335,28 +362,33 @@ type SourceInstance struct { | ||||
| 	TaintFlags     TaintFlags     `xml:"TaintFlags"` | ||||
| } | ||||
|  | ||||
| // FunctionCall | ||||
| type FunctionCall struct { | ||||
| 	XMLName        xml.Name       `xml:"FunctionCall"` | ||||
| 	SourceLocation SourceLocation `xml:"SourceLocation"` | ||||
| 	Function       Function       `xml:"Function"` | ||||
| } | ||||
|  | ||||
| // FunctionEntry | ||||
| type FunctionEntry struct { | ||||
| 	XMLName        xml.Name       `xml:"FunctionEntry"` | ||||
| 	SourceLocation SourceLocation `xml:"SourceLocation"` | ||||
| 	Function       Function       `xml:"Function"` | ||||
| } | ||||
|  | ||||
| // TaintFlags | ||||
| type TaintFlags struct { | ||||
| 	XMLName   xml.Name    `xml:"TaintFlags"` | ||||
| 	TaintFlag []TaintFlag `xml:"TaintFlag"` | ||||
| } | ||||
|  | ||||
| // TaintFlag | ||||
| type TaintFlag struct { | ||||
| 	XMLName       xml.Name `xml:"TaintFlag"` | ||||
| 	TaintFlagName string   `xml:"name,attr"` | ||||
| } | ||||
|  | ||||
| // SinkInstance | ||||
| type SinkInstance struct { | ||||
| 	XMLName        xml.Name       `xml:"SinkInstance"` | ||||
| 	RuleID         string         `xml:"ruleID,attr"` | ||||
| @@ -364,8 +396,7 @@ type SinkInstance struct { | ||||
| 	SourceLocation SourceLocation `xml:"SourceLocation,omitempty"` | ||||
| } | ||||
|  | ||||
| // These structures are relevant to the EngineData object | ||||
|  | ||||
| // EngineData These structures are relevant to the EngineData object | ||||
| type EngineData struct { | ||||
| 	XMLName       xml.Name     `xml:"EngineData"` | ||||
| 	EngineVersion string       `xml:"EngineVersion"` | ||||
| @@ -379,6 +410,7 @@ type EngineData struct { | ||||
| 	LicenseInfo   LicenseInfo  `xml:"LicenseInfo"` | ||||
| } | ||||
|  | ||||
| // RulePack | ||||
| type RulePack struct { | ||||
| 	XMLName    xml.Name `xml:"RulePack"` | ||||
| 	RulePackID string   `xml:"RulePackID"` | ||||
| @@ -388,24 +420,28 @@ type RulePack struct { | ||||
| 	MAC        string   `xml:"MAC"` | ||||
| } | ||||
|  | ||||
| // Properties | ||||
| type Properties struct { | ||||
| 	XMLName        xml.Name   `xml:"Properties"` | ||||
| 	PropertiesType string     `xml:"type,attr"` | ||||
| 	Property       []Property `xml:"Property"` | ||||
| } | ||||
|  | ||||
| // Property | ||||
| type Property struct { | ||||
| 	XMLName xml.Name `xml:"Property"` | ||||
| 	Name    string   `xml:"name"` | ||||
| 	Value   string   `xml:"value"` | ||||
| } | ||||
|  | ||||
| // Error | ||||
| type Error struct { | ||||
| 	XMLName      xml.Name `xml:"Error"` | ||||
| 	ErrorCode    string   `xml:"code,attr"` | ||||
| 	ErrorMessage string   `xml:",innerxml"` | ||||
| } | ||||
|  | ||||
| // MachineInfo | ||||
| type MachineInfo struct { | ||||
| 	XMLName  xml.Name `xml:"MachineInfo"` | ||||
| 	Hostname string   `xml:"Hostname"` | ||||
| @@ -413,29 +449,34 @@ type MachineInfo struct { | ||||
| 	Platform string   `xml:"Platform"` | ||||
| } | ||||
|  | ||||
| // FilterResult | ||||
| type FilterResult struct { | ||||
| 	XMLName xml.Name `xml:"FilterResult"` | ||||
| 	//Todo? No data in sample audit file | ||||
| } | ||||
|  | ||||
| // RuleInfo | ||||
| type RuleInfo struct { | ||||
| 	XMLName       xml.Name `xml:"Rule"` | ||||
| 	RuleID        string   `xml:"id,attr"` | ||||
| 	MetaInfoGroup []Group  `xml:"MetaInfo>Group,omitempty"` | ||||
| } | ||||
|  | ||||
| // LicenseInfo | ||||
| type LicenseInfo struct { | ||||
| 	XMLName    xml.Name     `xml:"LicenseInfo"` | ||||
| 	Metadata   []Metadata   `xml:"Metadata"` | ||||
| 	Capability []Capability `xml:"Capability"` | ||||
| } | ||||
|  | ||||
| // Metadata | ||||
| type Metadata struct { | ||||
| 	XMLName xml.Name `xml:"Metadata"` | ||||
| 	Name    string   `xml:"name"` | ||||
| 	Value   string   `xml:"value"` | ||||
| } | ||||
|  | ||||
| // Capability | ||||
| type Capability struct { | ||||
| 	XMLName    xml.Name  `xml:"Capability"` | ||||
| 	Name       string    `xml:"Name"` | ||||
| @@ -443,126 +484,17 @@ type Capability struct { | ||||
| 	Attribute  Attribute `xml:"Attribute"` | ||||
| } | ||||
|  | ||||
| // Attribute | ||||
| type Attribute struct { | ||||
| 	XMLName xml.Name `xml:"Attribute"` | ||||
| 	Name    string   `xml:"name"` | ||||
| 	Value   string   `xml:"value"` | ||||
| } | ||||
|  | ||||
| // JSON receptacle structs | ||||
|  | ||||
| type SARIF struct { | ||||
| 	Schema  string `json:"$schema" default:"https://docs.oasis-open.org/sarif/sarif/v2.1.0/cos01/schemas/sarif-schema-2.1.0.json"` | ||||
| 	Version string `json:"version" default:"2.1.0"` | ||||
| 	Runs    []Runs `json:"runs"` | ||||
| } | ||||
|  | ||||
| type Runs struct { | ||||
| 	Results []Results `json:"results"` | ||||
| 	Tool    Tool      `json:"tool"` | ||||
| 	/*Invocations         []Invocations      `json:"invocations"` | ||||
| 	OriginalUriBaseIds  OriginalUriBaseIds `json:"originalUriBaseIds"` | ||||
| 	Artifacts           []Artifact         `json:"artifacts"` | ||||
| 	AutomationDetails   AutomationDetails  `json:"automationDetails"` | ||||
| 	ColumnKind          string             `json:"columnKind" default:"utf16CodeUnits"` | ||||
| 	ThreadFlowLocations []Locations        `json:"threadFlowLocations"` | ||||
| 	Taxonomies          []Taxonomies       `json:"taxonomies"`*/ | ||||
| } | ||||
|  | ||||
| // These structs are relevant to the Results object | ||||
|  | ||||
| type Results struct { | ||||
| 	RuleID    string  `json:"ruleId"` | ||||
| 	RuleIndex int     `json:"ruleIndex"` | ||||
| 	Level     string  `json:"level,omitempty"` | ||||
| 	Message   Message `json:"message"` | ||||
| 	/*Locations        []Location        `json:"locations"` | ||||
| 	CodeFlows        []CodeFlow        `json:"codeFlows"` | ||||
| 	RelatedLocations []RelatedLocation `json:"relatedLocations"`*/ | ||||
| 	Properties SarifProperties `json:"properties"` | ||||
| } | ||||
|  | ||||
| type Message struct { | ||||
| 	Text string `json:"text,omitempty"` | ||||
| } | ||||
|  | ||||
| type SarifProperties struct { | ||||
| 	InstanceID        string `json:"InstanceID"` | ||||
| 	InstanceSeverity  string `json:"InstanceSeverity"` | ||||
| 	Confidence        string `json:"Confidence"` | ||||
| 	Audited           bool   `json:"Audited"` | ||||
| 	ToolSeverity      string `json:"ToolSeverity"` | ||||
| 	ToolSeverityIndex int    `json:"ToolSeverityIndex"` | ||||
| 	ToolState         string `json:"ToolState"` | ||||
| 	ToolStateIndex    int    `json:"ToolStateIndex"` | ||||
| 	ToolAuditMessage  string `json:"ToolAuditMessage"` | ||||
| 	UnifiedAuditState string `json:"UnifiedAuditState"` | ||||
| } | ||||
|  | ||||
| // These structs are relevant to the Tool object | ||||
|  | ||||
| type Tool struct { | ||||
| 	Driver Driver `json:"driver"` | ||||
| } | ||||
|  | ||||
| type Driver struct { | ||||
| 	Name           string      `json:"name"` | ||||
| 	Version        string      `json:"version"` | ||||
| 	InformationUri string      `json:"informationUri,omitempty"` | ||||
| 	Rules          []SarifRule `json:"rules"` | ||||
| 	//SupportedTaxonomies []SupportedTaxonomies `json:"supportedTaxonomies"` | ||||
| } | ||||
|  | ||||
| type SarifRule struct { | ||||
| 	Id                   string               `json:"id"` | ||||
| 	Guid                 string               `json:"guid"` | ||||
| 	Name                 string               `json:"name,omitempty"` | ||||
| 	ShortDescription     Message              `json:"shortDescription"` | ||||
| 	FullDescription      Message              `json:"fullDescription"` | ||||
| 	DefaultConfiguration DefaultConfiguration `json:"defaultConfiguration"` | ||||
| 	Relationships        []Relationships      `json:"relationships,omitempty"` | ||||
| 	Properties           *SarifRuleProperties `json:"properties,omitempty"` | ||||
| } | ||||
|  | ||||
| type SupportedTaxonomies struct { | ||||
| 	Name  string `json:"name"` | ||||
| 	Index int    `json:"index"` | ||||
| 	Guid  string `json:"guid"` | ||||
| } | ||||
|  | ||||
| type DefaultConfiguration struct { | ||||
| 	Properties DefaultProperties `json:"properties"` | ||||
| 	Level      string            `json:"level,omitempty"` //This exists in the template, but not sure how it is populated. TODO. | ||||
| } | ||||
|  | ||||
| type DefaultProperties struct { | ||||
| 	DefaultSeverity string `json:"DefaultSeverity"` | ||||
| } | ||||
|  | ||||
| type Relationships struct { | ||||
| 	Target Target   `json:"target"` | ||||
| 	Kinds  []string `json:"kinds"` | ||||
| } | ||||
|  | ||||
| type Target struct { | ||||
| 	Id            string        `json:"id"` | ||||
| 	ToolComponent ToolComponent `json:"toolComponent"` | ||||
| } | ||||
|  | ||||
| type ToolComponent struct { | ||||
| 	Name string `json:"name"` | ||||
| 	Guid string `json:"guid"` | ||||
| } | ||||
|  | ||||
| type SarifRuleProperties struct { | ||||
| 	Accuracy    string `json:"Accuracy,omitempty"` | ||||
| 	Impact      string `json:"Impact,omitempty"` | ||||
| 	Probability string `json:"Probability,omitempty"` | ||||
| } | ||||
|  | ||||
| func ConvertFprToSarif(sys System, project *models.Project, projectVersion *models.ProjectVersion, resultFilePath string) (SARIF, error) { | ||||
| // ConvertFprToSarif converts the FPR file contents into SARIF format | ||||
| func ConvertFprToSarif(sys System, project *models.Project, projectVersion *models.ProjectVersion, resultFilePath string) (format.SARIF, error) { | ||||
| 	log.Entry().Debug("Extracting FPR.") | ||||
| 	var sarif SARIF | ||||
| 	var sarif format.SARIF | ||||
|  | ||||
| 	tmpFolder, err := ioutil.TempDir(".", "temp-") | ||||
| 	defer os.RemoveAll(tmpFolder) | ||||
| @@ -584,7 +516,8 @@ func ConvertFprToSarif(sys System, project *models.Project, projectVersion *mode | ||||
| 	return Parse(sys, project, projectVersion, data) | ||||
| } | ||||
|  | ||||
| func Parse(sys System, project *models.Project, projectVersion *models.ProjectVersion, data []byte) (SARIF, error) { | ||||
| // Parse parses the FPR file | ||||
| func Parse(sys System, project *models.Project, projectVersion *models.ProjectVersion, data []byte) (format.SARIF, error) { | ||||
| 	//To read XML data, Unmarshal or Decode can be used, here we use Decode to work on the stream | ||||
| 	reader := bytes.NewReader(data) | ||||
| 	decoder := xml.NewDecoder(reader) | ||||
| @@ -593,15 +526,15 @@ func Parse(sys System, project *models.Project, projectVersion *models.ProjectVe | ||||
| 	decoder.Decode(&fvdl) | ||||
|  | ||||
| 	//Now, we handle the sarif | ||||
| 	var sarif SARIF | ||||
| 	var sarif format.SARIF | ||||
| 	sarif.Schema = "https://docs.oasis-open.org/sarif/sarif/v2.1.0/cos01/schemas/sarif-schema-2.1.0.json" | ||||
| 	sarif.Version = "2.1.0" | ||||
| 	var fortifyRun Runs | ||||
| 	var fortifyRun format.Runs | ||||
| 	sarif.Runs = append(sarif.Runs, fortifyRun) | ||||
|  | ||||
| 	// Handle results/vulnerabilities | ||||
| 	for i := 0; i < len(fvdl.Vulnerabilities.Vulnerability); i++ { | ||||
| 		result := *new(Results) | ||||
| 		result := *new(format.Results) | ||||
| 		result.RuleID = fvdl.Vulnerabilities.Vulnerability[i].ClassInfo.ClassID | ||||
| 		result.Level = "none" //TODO | ||||
| 		//get message | ||||
| @@ -613,13 +546,13 @@ func Parse(sys System, project *models.Project, projectVersion *models.ProjectVe | ||||
| 				for l := 0; l < len(fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.ReplacementDefinitions.Def); l++ { | ||||
| 					rawMessage = strings.ReplaceAll(rawMessage, "Replace key=\""+fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.ReplacementDefinitions.Def[l].DefKey+"\"", fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.ReplacementDefinitions.Def[l].DefValue) | ||||
| 				} | ||||
| 				result.Message = Message{rawMessage} | ||||
| 				result.Message = format.Message{rawMessage} | ||||
| 				break | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		//handle properties | ||||
| 		prop := *new(SarifProperties) | ||||
| 		prop := *new(format.SarifProperties) | ||||
| 		prop.InstanceSeverity = fvdl.Vulnerabilities.Vulnerability[i].InstanceInfo.InstanceSeverity | ||||
| 		prop.Confidence = fvdl.Vulnerabilities.Vulnerability[i].InstanceInfo.Confidence | ||||
| 		prop.InstanceID = fvdl.Vulnerabilities.Vulnerability[i].InstanceInfo.InstanceID | ||||
| @@ -632,7 +565,7 @@ func Parse(sys System, project *models.Project, projectVersion *models.ProjectVe | ||||
| 			prop.ToolState = "Not an Issue" | ||||
| 			prop.ToolStateIndex = 1 | ||||
| 		} else if sys != nil { | ||||
| 			if err := prop.IntegrateAuditData(fvdl.Vulnerabilities.Vulnerability[i].InstanceInfo.InstanceID, sys, project, projectVersion); err != nil { | ||||
| 			if err := integrateAuditData(&prop, fvdl.Vulnerabilities.Vulnerability[i].InstanceInfo.InstanceID, sys, project, projectVersion); err != nil { | ||||
| 				log.Entry().Debug(err) | ||||
| 				prop.Audited = false | ||||
| 				prop.ToolState = "Unknown" | ||||
| @@ -649,17 +582,17 @@ func Parse(sys System, project *models.Project, projectVersion *models.ProjectVe | ||||
| 	} | ||||
|  | ||||
| 	//handle the tool object | ||||
| 	tool := *new(Tool) | ||||
| 	tool.Driver = *new(Driver) | ||||
| 	tool := *new(format.Tool) | ||||
| 	tool.Driver = *new(format.Driver) | ||||
| 	tool.Driver.Name = "MicroFocus Fortify SCA" | ||||
| 	tool.Driver.Version = fvdl.EngineData.EngineVersion | ||||
| 	tool.Driver.InformationUri = "https://www.microfocus.com/documentation/fortify-static-code-analyzer-and-tools/2020/SCA_Guide_20.2.0.pdf" | ||||
|  | ||||
| 	//handles rules | ||||
| 	for i := 0; i < len(fvdl.EngineData.RuleInfo); i++ { //i iterates on rules | ||||
| 		sarifRule := *new(SarifRule) | ||||
| 		sarifRule.Id = fvdl.EngineData.RuleInfo[i].RuleID | ||||
| 		sarifRule.Guid = fvdl.EngineData.RuleInfo[i].RuleID | ||||
| 		sarifRule := *new(format.SarifRule) | ||||
| 		sarifRule.ID = fvdl.EngineData.RuleInfo[i].RuleID | ||||
| 		sarifRule.GUID = fvdl.EngineData.RuleInfo[i].RuleID | ||||
| 		for j := 0; j < len(fvdl.Vulnerabilities.Vulnerability); j++ { //j iterates on vulns to find the name | ||||
| 			if fvdl.Vulnerabilities.Vulnerability[j].ClassInfo.ClassID == fvdl.EngineData.RuleInfo[i].RuleID { | ||||
| 				var nameArray []string | ||||
| @@ -679,7 +612,7 @@ func Parse(sys System, project *models.Project, projectVersion *models.ProjectVe | ||||
| 		} | ||||
| 		//Descriptions | ||||
| 		for j := 0; j < len(fvdl.Description); j++ { | ||||
| 			if fvdl.Description[j].ClassID == sarifRule.Id { | ||||
| 			if fvdl.Description[j].ClassID == sarifRule.ID { | ||||
| 				rawAbstract := fvdl.Description[j].Abstract.Text | ||||
| 				rawExplanation := fvdl.Description[j].Explanation.Text | ||||
| 				// Replacement defintions in abstract/explanation | ||||
| @@ -721,9 +654,9 @@ func Parse(sys System, project *models.Project, projectVersion *models.ProjectVe | ||||
| 				propArray = append(propArray, []string{fvdl.EngineData.RuleInfo[i].MetaInfoGroup[j].Name, fvdl.EngineData.RuleInfo[i].MetaInfoGroup[j].Data}) | ||||
| 			} | ||||
| 		} | ||||
| 		var ruleProp *SarifRuleProperties | ||||
| 		var ruleProp *format.SarifRuleProperties | ||||
| 		if len(propArray) != 0 { | ||||
| 			ruleProp = new(SarifRuleProperties) | ||||
| 			ruleProp = new(format.SarifRuleProperties) | ||||
| 			for j := 0; j < len(propArray); j++ { | ||||
| 				if propArray[j][0] == "Accuracy" { | ||||
| 					ruleProp.Accuracy = propArray[j][1] | ||||
| @@ -745,7 +678,7 @@ func Parse(sys System, project *models.Project, projectVersion *models.ProjectVe | ||||
| 	return sarif, nil | ||||
| } | ||||
|  | ||||
| func (RuleProp *SarifProperties) IntegrateAuditData(issueInstanceID string, sys System, project *models.Project, projectVersion *models.ProjectVersion) error { | ||||
| func integrateAuditData(ruleProp *format.SarifProperties, issueInstanceID string, sys System, project *models.Project, projectVersion *models.ProjectVersion) error { | ||||
| 	data, err := sys.GetIssueDetails(projectVersion.ID, issueInstanceID) | ||||
| 	log.Entry().Debug("Looking up audit state of " + issueInstanceID) | ||||
| 	if err != nil { | ||||
| @@ -755,36 +688,36 @@ func (RuleProp *SarifProperties) IntegrateAuditData(issueInstanceID string, sys | ||||
| 		log.Entry().Error("not exactly 1 issue found, found " + fmt.Sprint(len(data))) | ||||
| 		return errors.New("not exactly 1 issue found, found " + fmt.Sprint(len(data))) | ||||
| 	} | ||||
| 	RuleProp.Audited = data[0].Audited | ||||
| 	RuleProp.ToolSeverity = *data[0].Friority | ||||
| 	switch RuleProp.ToolSeverity { | ||||
| 	ruleProp.Audited = data[0].Audited | ||||
| 	ruleProp.ToolSeverity = *data[0].Friority | ||||
| 	switch ruleProp.ToolSeverity { | ||||
| 	case "Critical": | ||||
| 		RuleProp.ToolSeverityIndex = 5 | ||||
| 		ruleProp.ToolSeverityIndex = 5 | ||||
| 	case "Urgent": | ||||
| 		RuleProp.ToolSeverityIndex = 4 | ||||
| 		ruleProp.ToolSeverityIndex = 4 | ||||
| 	case "High": | ||||
| 		RuleProp.ToolSeverityIndex = 3 | ||||
| 		ruleProp.ToolSeverityIndex = 3 | ||||
| 	case "Medium": | ||||
| 		RuleProp.ToolSeverityIndex = 2 | ||||
| 		ruleProp.ToolSeverityIndex = 2 | ||||
| 	case "Low": | ||||
| 		RuleProp.ToolSeverityIndex = 1 | ||||
| 		ruleProp.ToolSeverityIndex = 1 | ||||
| 	} | ||||
| 	if RuleProp.Audited { | ||||
| 		RuleProp.ToolState = *data[0].PrimaryTag | ||||
| 		switch RuleProp.ToolState { //This is as easy as it can get, seeing that the index is not in the response. | ||||
| 	if ruleProp.Audited { | ||||
| 		ruleProp.ToolState = *data[0].PrimaryTag | ||||
| 		switch ruleProp.ToolState { //This is as easy as it can get, seeing that the index is not in the response. | ||||
| 		case "Exploitable": | ||||
| 			RuleProp.ToolStateIndex = 5 | ||||
| 			ruleProp.ToolStateIndex = 5 | ||||
| 		case "Suspicious": | ||||
| 			RuleProp.ToolStateIndex = 4 | ||||
| 			ruleProp.ToolStateIndex = 4 | ||||
| 		case "Bad Practice": | ||||
| 			RuleProp.ToolStateIndex = 3 | ||||
| 			ruleProp.ToolStateIndex = 3 | ||||
| 		case "Reliability Issue": | ||||
| 			RuleProp.ToolStateIndex = 2 | ||||
| 			ruleProp.ToolStateIndex = 2 | ||||
| 		case "Not an Issue": | ||||
| 			RuleProp.ToolStateIndex = 1 | ||||
| 			ruleProp.ToolStateIndex = 1 | ||||
| 		} | ||||
| 	} else { | ||||
| 		RuleProp.ToolState = "Unreviewed" | ||||
| 		ruleProp.ToolState = "Unreviewed" | ||||
| 	} | ||||
| 	if *data[0].HasComments { //fetch latest message if comments exist | ||||
| 		//Fetch the ID | ||||
| @@ -793,7 +726,7 @@ func (RuleProp *SarifProperties) IntegrateAuditData(issueInstanceID string, sys | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		RuleProp.ToolAuditMessage = *commentData[0].Comment | ||||
| 		ruleProp.ToolAuditMessage = *commentData[0].Comment | ||||
| 	} | ||||
| 	return nil | ||||
| } | ||||
|   | ||||
| @@ -9,6 +9,7 @@ import ( | ||||
| 	"strings" | ||||
| 	"time" | ||||
|  | ||||
| 	"github.com/SAP/jenkins-library/pkg/format" | ||||
| 	"github.com/SAP/jenkins-library/pkg/log" | ||||
| 	"github.com/SAP/jenkins-library/pkg/piperutils" | ||||
| 	"github.com/SAP/jenkins-library/pkg/reporting" | ||||
| @@ -129,7 +130,7 @@ func WriteJSONReport(jsonReport FortifyReportData) ([]piperutils.Path, error) { | ||||
| 	return reportPaths, nil | ||||
| } | ||||
|  | ||||
| func WriteSarif(sarif SARIF) ([]piperutils.Path, error) { | ||||
| func WriteSarif(sarif format.SARIF) ([]piperutils.Path, error) { | ||||
| 	utils := piperutils.Files{} | ||||
| 	reportPaths := []piperutils.Path{} | ||||
|  | ||||
|   | ||||
| @@ -6,11 +6,12 @@ import ( | ||||
|  | ||||
| // FetchCommitOptions to configure the lookup | ||||
| type FetchCommitOptions struct { | ||||
| 	APIURL     string `json:"apiUrl,omitempty"` | ||||
| 	Owner      string `json:"owner,omitempty"` | ||||
| 	Repository string `json:"repository,omitempty"` | ||||
| 	Token      string `json:"token,omitempty"` | ||||
| 	SHA        string `json:"sha,omitempty"` | ||||
| 	APIURL       string   `json:"apiUrl,omitempty"` | ||||
| 	Owner        string   `json:"owner,omitempty"` | ||||
| 	Repository   string   `json:"repository,omitempty"` | ||||
| 	Token        string   `json:"token,omitempty"` | ||||
| 	SHA          string   `json:"sha,omitempty"` | ||||
| 	TrustedCerts []string `json:"trustedCerts,omitempty"` | ||||
| } | ||||
|  | ||||
| // FetchCommitResult to handle the lookup result | ||||
| @@ -25,7 +26,7 @@ type FetchCommitResult struct { | ||||
| // FetchCommitStatistics looks up the statistics for a certain commit SHA. | ||||
| func FetchCommitStatistics(options *FetchCommitOptions) (FetchCommitResult, error) { | ||||
| 	// create GitHub client | ||||
| 	ctx, client, err := NewClient(options.Token, options.APIURL, "") | ||||
| 	ctx, client, err := NewClient(options.Token, options.APIURL, "", options.TrustedCerts) | ||||
| 	if err != nil { | ||||
| 		return FetchCommitResult{}, errors.Wrap(err, "failed to get GitHub client") | ||||
| 	} | ||||
|   | ||||
| @@ -6,6 +6,7 @@ import ( | ||||
| 	"net/url" | ||||
| 	"strings" | ||||
|  | ||||
| 	piperhttp "github.com/SAP/jenkins-library/pkg/http" | ||||
| 	"github.com/SAP/jenkins-library/pkg/log" | ||||
| 	"github.com/google/go-github/v32/github" | ||||
| 	"github.com/pkg/errors" | ||||
| @@ -34,14 +35,20 @@ type CreateIssueOptions struct { | ||||
| 	Title          string   `json:"title,omitempty"` | ||||
| 	UpdateExisting bool     `json:"updateExisting,omitempty"` | ||||
| 	Token          string   `json:"token,omitempty"` | ||||
| 	TrustedCerts   []string `json:"trustedCerts,omitempty"` | ||||
| } | ||||
|  | ||||
| //NewClient creates a new GitHub client using an OAuth token for authentication | ||||
| func NewClient(token, apiURL, uploadURL string) (context.Context, *github.Client, error) { | ||||
| 	ctx := context.Background() | ||||
| 	ts := oauth2.StaticTokenSource( | ||||
| 		&oauth2.Token{AccessToken: token}, | ||||
| 	) | ||||
| func NewClient(token, apiURL, uploadURL string, trustedCerts []string) (context.Context, *github.Client, error) { | ||||
| 	httpClient := piperhttp.Client{} | ||||
| 	httpClient.SetOptions(piperhttp.ClientOptions{ | ||||
| 		TrustedCerts:             trustedCerts, | ||||
| 		DoLogRequestBodyOnDebug:  true, | ||||
| 		DoLogResponseBodyOnDebug: true, | ||||
| 	}) | ||||
| 	stdClient := httpClient.StandardClient() | ||||
| 	ctx := context.WithValue(context.Background(), oauth2.HTTPClient, stdClient) | ||||
| 	ts := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: token, TokenType: "Bearer"}) | ||||
| 	tc := oauth2.NewClient(ctx, ts) | ||||
|  | ||||
| 	if !strings.HasSuffix(apiURL, "/") { | ||||
| @@ -68,7 +75,7 @@ func NewClient(token, apiURL, uploadURL string) (context.Context, *github.Client | ||||
| } | ||||
|  | ||||
| func CreateIssue(ghCreateIssueOptions *CreateIssueOptions) error { | ||||
| 	ctx, client, err := NewClient(ghCreateIssueOptions.Token, ghCreateIssueOptions.APIURL, "") | ||||
| 	ctx, client, err := NewClient(ghCreateIssueOptions.Token, ghCreateIssueOptions.APIURL, "", ghCreateIssueOptions.TrustedCerts) | ||||
| 	if err != nil { | ||||
| 		return errors.Wrap(err, "failed to get GitHub client") | ||||
| 	} | ||||
| @@ -100,7 +107,7 @@ func createIssueLocal(ctx context.Context, ghCreateIssueOptions *CreateIssueOpti | ||||
| 		searchResult, resp, err := ghSearchIssuesService.Issues(ctx, queryString, nil) | ||||
| 		if err != nil { | ||||
| 			if resp != nil { | ||||
| 				log.Entry().Errorf("GitHub response code %v", resp.Status) | ||||
| 				log.Entry().Errorf("GitHub search issue returned response code %v", resp.Status) | ||||
| 			} | ||||
| 			return errors.Wrap(err, "error occurred when looking for existing issue") | ||||
| 		} else { | ||||
| @@ -116,9 +123,9 @@ func createIssueLocal(ctx context.Context, ghCreateIssueOptions *CreateIssueOpti | ||||
| 			_, resp, err := ghCreateCommentService.CreateComment(ctx, ghCreateIssueOptions.Owner, ghCreateIssueOptions.Repository, *existingIssue.Number, comment) | ||||
| 			if err != nil { | ||||
| 				if resp != nil { | ||||
| 					log.Entry().Errorf("GitHub response code %v", resp.Status) | ||||
| 					log.Entry().Errorf("GitHub create comment returned response code %v", resp.Status) | ||||
| 				} | ||||
| 				return errors.Wrap(err, "error occurred when looking for existing issue") | ||||
| 				return errors.Wrap(err, "error occurred when adding comment to existing issue") | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| @@ -127,7 +134,7 @@ func createIssueLocal(ctx context.Context, ghCreateIssueOptions *CreateIssueOpti | ||||
| 		newIssue, resp, err := ghCreateIssueService.Create(ctx, ghCreateIssueOptions.Owner, ghCreateIssueOptions.Repository, &issue) | ||||
| 		if err != nil { | ||||
| 			if resp != nil { | ||||
| 				log.Entry().Errorf("GitHub response code %v", resp.Status) | ||||
| 				log.Entry().Errorf("GitHub create issue returned response code %v", resp.Status) | ||||
| 			} | ||||
| 			return errors.Wrap(err, "error occurred when creating issue") | ||||
| 		} | ||||
|   | ||||
							
								
								
									
										178
									
								
								pkg/http/http.go
									
									
									
									
									
								
							
							
						
						
									
										178
									
								
								pkg/http/http.go
									
									
									
									
									
								
							| @@ -21,7 +21,6 @@ import ( | ||||
|  | ||||
| 	"github.com/SAP/jenkins-library/pkg/log" | ||||
| 	"github.com/SAP/jenkins-library/pkg/piperutils" | ||||
| 	"github.com/SAP/jenkins-library/pkg/reporting" | ||||
| 	"github.com/hashicorp/go-retryablehttp" | ||||
| 	"github.com/motemen/go-nuts/roundtime" | ||||
| 	"github.com/pkg/errors" | ||||
| @@ -68,11 +67,14 @@ type ClientOptions struct { | ||||
| 	TrustedCerts              []string | ||||
| } | ||||
|  | ||||
| // TransportWrapper is a wrapper for central logging capabilities | ||||
| // TransportWrapper is a wrapper for central roundtrip capabilities | ||||
| type TransportWrapper struct { | ||||
| 	Transport                http.RoundTripper | ||||
| 	doLogRequestBodyOnDebug  bool | ||||
| 	doLogResponseBodyOnDebug bool | ||||
| 	username                 string | ||||
| 	password                 string | ||||
| 	token                    string | ||||
| } | ||||
|  | ||||
| // UploadRequestData encapsulates the parameters for calling uploader.Upload() | ||||
| @@ -175,10 +177,13 @@ func (c *Client) Upload(data UploadRequestData) (*http.Response, error) { | ||||
| 			return &http.Response{}, errors.Wrapf(err, "unable to copy file content of %v into request body", data.File) | ||||
| 		} | ||||
| 		err = bodyWriter.Close() | ||||
| 		if err != nil { | ||||
| 			log.Entry().Warn("failed to close writer on request body") | ||||
| 		} | ||||
|  | ||||
| 		request, err := c.createRequest(data.Method, data.URL, bodyBuffer, &data.Header, data.Cookies) | ||||
| 		if err != nil { | ||||
| 			c.logger.Debugf("New %v request to %v", data.Method, data.URL) | ||||
| 			c.logger.Debugf("new %v request to %v", data.Method, data.URL) | ||||
| 			return &http.Response{}, errors.Wrapf(err, "error creating %v request to %v", data.Method, data.URL) | ||||
| 		} | ||||
|  | ||||
| @@ -265,13 +270,16 @@ func (c *Client) initialize() *http.Client { | ||||
| 		}, | ||||
| 		doLogRequestBodyOnDebug:  c.doLogRequestBodyOnDebug, | ||||
| 		doLogResponseBodyOnDebug: c.doLogResponseBodyOnDebug, | ||||
| 		token:                    c.token, | ||||
| 		username:                 c.username, | ||||
| 		password:                 c.password, | ||||
| 	} | ||||
|  | ||||
| 	if (len(c.trustedCerts)) > 0 && !c.useDefaultTransport && !c.transportSkipVerification { | ||||
| 	if len(c.trustedCerts) > 0 && !c.useDefaultTransport && !c.transportSkipVerification { | ||||
| 		log.Entry().Info("adding certs for tls to trust") | ||||
| 		err := c.configureTLSToTrustCertificates(transport) | ||||
| 		if err != nil { | ||||
| 			log.Entry().Infof("adding certs for tls config failed : v%, continuing with the existing tsl config", err) | ||||
| 			log.Entry().Infof("adding certs for tls config failed : %v, continuing with the existing tsl config", err) | ||||
| 		} | ||||
| 	} else { | ||||
| 		log.Entry().Debug("no trusted certs found / using default transport / insecure skip set to true / : continuing with existing tls config") | ||||
| @@ -288,6 +296,14 @@ func (c *Client) initialize() *http.Client { | ||||
| 		retryClient.RetryMax = c.maxRetries | ||||
| 		if !c.useDefaultTransport { | ||||
| 			retryClient.HTTPClient.Transport = transport | ||||
| 		} else { | ||||
| 			retryClient.HTTPClient.Transport = &TransportWrapper{ | ||||
| 				Transport:                retryClient.HTTPClient.Transport, | ||||
| 				doLogRequestBodyOnDebug:  c.doLogRequestBodyOnDebug, | ||||
| 				doLogResponseBodyOnDebug: c.doLogResponseBodyOnDebug, | ||||
| 				token:                    c.token, | ||||
| 				username:                 c.username, | ||||
| 				password:                 c.password} | ||||
| 		} | ||||
| 		retryClient.CheckRetry = func(ctx context.Context, resp *http.Response, err error) (bool, error) { | ||||
| 			if err != nil && (strings.Contains(err.Error(), "timeout") || strings.Contains(err.Error(), "timed out") || strings.Contains(err.Error(), "connection refused") || strings.Contains(err.Error(), "connection reset")) { | ||||
| @@ -320,6 +336,7 @@ type contextKey struct { | ||||
| } | ||||
|  | ||||
| var contextKeyRequestStart = &contextKey{"RequestStart"} | ||||
| var authHeaderKey = "Authorization" | ||||
|  | ||||
| // RoundTrip is the core part of this module and implements http.RoundTripper. | ||||
| // Executes HTTP request with request/response logging. | ||||
| @@ -327,20 +344,40 @@ func (t *TransportWrapper) RoundTrip(req *http.Request) (*http.Response, error) | ||||
| 	ctx := context.WithValue(req.Context(), contextKeyRequestStart, time.Now()) | ||||
| 	req = req.WithContext(ctx) | ||||
|  | ||||
| 	handleAuthentication(req, t.username, t.password, t.token) | ||||
|  | ||||
| 	t.logRequest(req) | ||||
|  | ||||
| 	resp, err := t.Transport.RoundTrip(req) | ||||
|  | ||||
| 	t.logResponse(resp) | ||||
|  | ||||
| 	return resp, err | ||||
| } | ||||
|  | ||||
| func handleAuthentication(req *http.Request, username, password, token string) { | ||||
| 	// Handle authenticaion if not done already | ||||
| 	if (len(username) > 0 || len(password) > 0) && len(req.Header.Get(authHeaderKey)) == 0 { | ||||
| 		req.SetBasicAuth(username, password) | ||||
| 		log.Entry().Debug("Using Basic Authentication ****/****") | ||||
| 	} | ||||
| 	if len(token) > 0 && len(req.Header.Get(authHeaderKey)) == 0 { | ||||
| 		req.Header.Add(authHeaderKey, token) | ||||
| 		log.Entry().Debug("Using Token Authentication ****") | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (t *TransportWrapper) logRequest(req *http.Request) { | ||||
| 	log.Entry().Debug("--------------------------------") | ||||
| 	log.Entry().Debugf("--> %v request to %v", req.Method, req.URL) | ||||
| 	log.Entry().Debugf("headers: %v", transformHeaders(req.Header)) | ||||
| 	log.Entry().Debugf("cookies: %v", transformCookies(req.Cookies())) | ||||
| 	if t.doLogRequestBodyOnDebug { | ||||
| 		log.Entry().Debugf("body: %v", transformBody(req.Body)) | ||||
| 	if t.doLogRequestBodyOnDebug && req.Body != nil { | ||||
| 		var buf bytes.Buffer | ||||
| 		tee := io.TeeReader(req.Body, &buf) | ||||
| 		log.Entry().Debugf("body: %v", transformBody(tee)) | ||||
| 		req.Body = ioutil.NopCloser(bytes.NewReader(buf.Bytes())) | ||||
| 		log.Entry().Debugf("body: %v", transformBody(tee)) | ||||
| 	} | ||||
| 	log.Entry().Debug("--------------------------------") | ||||
| } | ||||
| @@ -353,8 +390,11 @@ func (t *TransportWrapper) logResponse(resp *http.Response) { | ||||
| 		} else { | ||||
| 			log.Entry().Debugf("<-- response %v %v", resp.StatusCode, resp.Request.URL) | ||||
| 		} | ||||
| 		if t.doLogResponseBodyOnDebug { | ||||
| 			log.Entry().Debugf("body: %v", transformBody(resp.Body)) | ||||
| 		if t.doLogResponseBodyOnDebug && resp.Body != nil { | ||||
| 			var buf bytes.Buffer | ||||
| 			tee := io.TeeReader(resp.Body, &buf) | ||||
| 			log.Entry().Debugf("body: %v", transformBody(tee)) | ||||
| 			resp.Body = ioutil.NopCloser(bytes.NewReader(buf.Bytes())) | ||||
| 		} | ||||
| 	} else { | ||||
| 		log.Entry().Debug("response <nil>") | ||||
| @@ -396,7 +436,7 @@ func transformCookies(cookies []*http.Cookie) string { | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| func transformBody(body io.ReadCloser) string { | ||||
| func transformBody(body io.Reader) string { | ||||
| 	if body == nil { | ||||
| 		return "" | ||||
| 	} | ||||
| @@ -419,19 +459,10 @@ func (c *Client) createRequest(method, url string, body io.Reader, header *http. | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if cookies != nil { | ||||
| 		for _, cookie := range cookies { | ||||
| 			request.AddCookie(cookie) | ||||
| 		} | ||||
| 	} | ||||
| 	handleAuthentication(request, c.username, c.password, c.token) | ||||
|  | ||||
| 	if len(c.username) > 0 || len(c.password) > 0 { | ||||
| 		request.SetBasicAuth(c.username, c.password) | ||||
| 		c.logger.Debug("Using Basic Authentication ****/****") | ||||
| 	} | ||||
|  | ||||
| 	if len(c.token) > 0 { | ||||
| 		request.Header.Add("Authorization", c.token) | ||||
| 	for _, cookie := range cookies { | ||||
| 		request.AddCookie(cookie) | ||||
| 	} | ||||
|  | ||||
| 	return request, nil | ||||
| @@ -474,14 +505,37 @@ func (c *Client) configureTLSToTrustCertificates(transport *TransportWrapper) er | ||||
| 		return errors.Wrap(err, "failed to create trust store directory") | ||||
| 	} | ||||
| 	/* insecure := flag.Bool("insecure-ssl", false, "Accept/Ignore all server SSL certificates") */ | ||||
| 	// Get the SystemCertPool, continue with an empty pool on error | ||||
| 	rootCAs, err := x509.SystemCertPool() | ||||
| 	if err != nil { | ||||
| 		log.Entry().Debugf("Caught error on store lookup %v", err) | ||||
| 	} | ||||
|  | ||||
| 	if rootCAs == nil { | ||||
| 		rootCAs = x509.NewCertPool() | ||||
| 	} | ||||
|  | ||||
| 	*transport = TransportWrapper{ | ||||
| 		Transport: &http.Transport{ | ||||
| 			DialContext: (&net.Dialer{ | ||||
| 				Timeout: c.transportTimeout, | ||||
| 			}).DialContext, | ||||
| 			ResponseHeaderTimeout: c.transportTimeout, | ||||
| 			ExpectContinueTimeout: c.transportTimeout, | ||||
| 			TLSHandshakeTimeout:   c.transportTimeout, | ||||
| 			TLSClientConfig: &tls.Config{ | ||||
| 				InsecureSkipVerify: false, | ||||
| 				RootCAs:            rootCAs, | ||||
| 			}, | ||||
| 		}, | ||||
| 		doLogRequestBodyOnDebug:  c.doLogRequestBodyOnDebug, | ||||
| 		doLogResponseBodyOnDebug: c.doLogResponseBodyOnDebug, | ||||
| 		token:                    c.token, | ||||
| 		username:                 c.username, | ||||
| 		password:                 c.password, | ||||
| 	} | ||||
|  | ||||
| 	for _, certificate := range c.trustedCerts { | ||||
| 		rootCAs, _ := x509.SystemCertPool() | ||||
|  | ||||
| 		if rootCAs == nil { | ||||
| 			rootCAs = x509.NewCertPool() | ||||
| 		} | ||||
|  | ||||
| 		filename := path.Base(certificate) | ||||
| 		filename = strings.ReplaceAll(filename, " ", "") | ||||
| 		target := filepath.Join(trustStoreDir, filename) | ||||
| @@ -517,72 +571,35 @@ func (c *Client) configureTLSToTrustCertificates(transport *TransportWrapper) er | ||||
| 				} | ||||
| 				defer fileHandler.Close() | ||||
|  | ||||
| 				_, err = io.Copy(fileHandler, response.Body) | ||||
| 				numWritten, err := io.Copy(fileHandler, response.Body) | ||||
| 				if err != nil { | ||||
| 					return errors.Wrapf(err, "unable to copy content from url to file %v", filename) | ||||
| 				} | ||||
| 				log.Entry().Debugf("wrote %v bytes from response body to file", numWritten) | ||||
|  | ||||
| 				// Get the SystemCertPool, continue with an empty pool on error | ||||
| 				certs, err := ioutil.ReadFile(target) | ||||
| 				if err != nil { | ||||
| 					return errors.Wrapf(err, "Failed to read cert file %v", certificate) | ||||
| 					return errors.Wrapf(err, "failed to read cert file %v", certificate) | ||||
| 				} | ||||
|  | ||||
| 				// Append our cert to the system pool | ||||
| 				if ok := rootCAs.AppendCertsFromPEM(certs); !ok { | ||||
| 					log.Entry().Infof("cert not appended to root ca %v", certificate) | ||||
| 					return fmt.Errorf("cert not appended to root ca %v", certificate) | ||||
| 				ok := rootCAs.AppendCertsFromPEM(certs) | ||||
| 				if !ok { | ||||
| 					return errors.Errorf("failed to append %v to root CA store", certificate) | ||||
| 				} | ||||
|  | ||||
| 				*transport = TransportWrapper{ | ||||
| 					Transport: &http.Transport{ | ||||
| 						DialContext: (&net.Dialer{ | ||||
| 							Timeout: c.transportTimeout, | ||||
| 						}).DialContext, | ||||
| 						ResponseHeaderTimeout: c.transportTimeout, | ||||
| 						ExpectContinueTimeout: c.transportTimeout, | ||||
| 						TLSHandshakeTimeout:   c.transportTimeout, | ||||
| 						TLSClientConfig: &tls.Config{ | ||||
| 							InsecureSkipVerify: false, | ||||
| 							RootCAs:            rootCAs, | ||||
| 						}, | ||||
| 					}, | ||||
| 					doLogRequestBodyOnDebug:  c.doLogRequestBodyOnDebug, | ||||
| 					doLogResponseBodyOnDebug: c.doLogResponseBodyOnDebug, | ||||
| 				} | ||||
|  | ||||
| 				log.Entry().Infof("%v appended to root CA successfully", certificate) | ||||
|  | ||||
| 			} else { | ||||
| 				return errors.Wrapf(err, "Download of TLS certificate %v failed with status code %v", certificate, response.StatusCode) | ||||
| 			} | ||||
| 		} else { | ||||
| 			log.Entry().Infof("existing certs found, appending to rootCA") | ||||
| 			log.Entry().Infof("existing certificate file %v found, appending it to rootCA", target) | ||||
| 			certs, err := ioutil.ReadFile(target) | ||||
| 			if err != nil { | ||||
| 				return errors.Wrapf(err, "Failed to read cert file %v", certificate) | ||||
| 				return errors.Wrapf(err, "failed to read cert file %v", certificate) | ||||
| 			} | ||||
|  | ||||
| 			// Append our cert to the system pool | ||||
| 			if ok := rootCAs.AppendCertsFromPEM(certs); !ok { | ||||
| 				log.Entry().Infof("cert not appended to root ca %v", certificate) | ||||
| 			} | ||||
|  | ||||
| 			*transport = TransportWrapper{ | ||||
| 				Transport: &http.Transport{ | ||||
| 					DialContext: (&net.Dialer{ | ||||
| 						Timeout: c.transportTimeout, | ||||
| 					}).DialContext, | ||||
| 					ResponseHeaderTimeout: c.transportTimeout, | ||||
| 					ExpectContinueTimeout: c.transportTimeout, | ||||
| 					TLSHandshakeTimeout:   c.transportTimeout, | ||||
| 					TLSClientConfig: &tls.Config{ | ||||
| 						InsecureSkipVerify: false, | ||||
| 						RootCAs:            rootCAs, | ||||
| 					}, | ||||
| 				}, | ||||
| 				doLogRequestBodyOnDebug:  c.doLogRequestBodyOnDebug, | ||||
| 				doLogResponseBodyOnDebug: c.doLogResponseBodyOnDebug, | ||||
| 			ok := rootCAs.AppendCertsFromPEM(certs) | ||||
| 			if !ok { | ||||
| 				return errors.Errorf("failed to append %v to root CA store", certificate) | ||||
| 			} | ||||
| 			log.Entry().Infof("%v appended to root CA successfully", certificate) | ||||
| 		} | ||||
| @@ -591,15 +608,18 @@ func (c *Client) configureTLSToTrustCertificates(transport *TransportWrapper) er | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // default truststore location | ||||
| const TrustStoreDirectory = ".pipeline/trustStore" | ||||
|  | ||||
| func getWorkingDirForTrustStore() (string, error) { | ||||
| 	fileUtils := &piperutils.Files{} | ||||
| 	if exists, _ := fileUtils.DirExists(reporting.StepReportDirectory); !exists { | ||||
| 		err := fileUtils.MkdirAll(".pipeline/trustStore", 0777) | ||||
| 	if exists, _ := fileUtils.DirExists(TrustStoreDirectory); !exists { | ||||
| 		err := fileUtils.MkdirAll(TrustStoreDirectory, 0777) | ||||
| 		if err != nil { | ||||
| 			return "", errors.Wrap(err, "failed to create trust store directory") | ||||
| 		} | ||||
| 	} | ||||
| 	return ".pipeline/trustStore", nil | ||||
| 	return TrustStoreDirectory, nil | ||||
| } | ||||
|  | ||||
| // ParseHTTPResponseBodyXML parses a XML http response into a given interface | ||||
|   | ||||
| @@ -1,3 +1,4 @@ | ||||
| //go:build !release | ||||
| // +build !release | ||||
|  | ||||
| package mock | ||||
|   | ||||
| @@ -1,3 +1,4 @@ | ||||
| //go:build !release | ||||
| // +build !release | ||||
|  | ||||
| package mock | ||||
|   | ||||
| @@ -1,3 +1,4 @@ | ||||
| //go:build !release | ||||
| // +build !release | ||||
|  | ||||
| package npm | ||||
|   | ||||
							
								
								
									
										287
									
								
								pkg/whitesource/reporting.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										287
									
								
								pkg/whitesource/reporting.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,287 @@ | ||||
| package whitesource | ||||
|  | ||||
| import ( | ||||
| 	"crypto/sha1" | ||||
| 	"encoding/json" | ||||
| 	"fmt" | ||||
| 	"path/filepath" | ||||
| 	"sort" | ||||
| 	"strings" | ||||
| 	"time" | ||||
|  | ||||
| 	"github.com/SAP/jenkins-library/pkg/format" | ||||
| 	piperGithub "github.com/SAP/jenkins-library/pkg/github" | ||||
| 	"github.com/SAP/jenkins-library/pkg/log" | ||||
| 	"github.com/SAP/jenkins-library/pkg/piperutils" | ||||
| 	"github.com/SAP/jenkins-library/pkg/reporting" | ||||
| 	"github.com/pkg/errors" | ||||
| ) | ||||
|  | ||||
| // CreateCustomVulnerabilityReport creates a vulnerability ScanReport to be used for uploading into various sinks | ||||
| func CreateCustomVulnerabilityReport(productName string, scan *Scan, alerts *[]Alert, cvssSeverityLimit float64) reporting.ScanReport { | ||||
| 	severe, _ := CountSecurityVulnerabilities(alerts, cvssSeverityLimit) | ||||
|  | ||||
| 	// sort according to vulnerability severity | ||||
| 	sort.Slice(*alerts, func(i, j int) bool { | ||||
| 		return vulnerabilityScore((*alerts)[i]) > vulnerabilityScore((*alerts)[j]) | ||||
| 	}) | ||||
|  | ||||
| 	projectNames := scan.ScannedProjectNames() | ||||
|  | ||||
| 	scanReport := reporting.ScanReport{ | ||||
| 		Title: "WhiteSource Security Vulnerability Report", | ||||
| 		Subheaders: []reporting.Subheader{ | ||||
| 			{Description: "WhiteSource product name", Details: productName}, | ||||
| 			{Description: "Filtered project names", Details: strings.Join(projectNames, ", ")}, | ||||
| 		}, | ||||
| 		Overview: []reporting.OverviewRow{ | ||||
| 			{Description: "Total number of vulnerabilities", Details: fmt.Sprint(len((*alerts)))}, | ||||
| 			{Description: "Total number of high/critical vulnerabilities with CVSS score >= 7.0", Details: fmt.Sprint(severe)}, | ||||
| 		}, | ||||
| 		SuccessfulScan: severe == 0, | ||||
| 		ReportTime:     time.Now(), | ||||
| 	} | ||||
|  | ||||
| 	detailTable := reporting.ScanDetailTable{ | ||||
| 		NoRowsMessage: "No publicly known vulnerabilities detected", | ||||
| 		Headers: []string{ | ||||
| 			"Date", | ||||
| 			"CVE", | ||||
| 			"CVSS Score", | ||||
| 			"CVSS Version", | ||||
| 			"Project", | ||||
| 			"Library file name", | ||||
| 			"Library group ID", | ||||
| 			"Library artifact ID", | ||||
| 			"Library version", | ||||
| 			"Description", | ||||
| 			"Top fix", | ||||
| 		}, | ||||
| 		WithCounter:   true, | ||||
| 		CounterHeader: "Entry #", | ||||
| 	} | ||||
|  | ||||
| 	for _, alert := range *alerts { | ||||
| 		var score float64 | ||||
| 		var scoreStyle reporting.ColumnStyle = reporting.Yellow | ||||
| 		if isSevereVulnerability(alert, cvssSeverityLimit) { | ||||
| 			scoreStyle = reporting.Red | ||||
| 		} | ||||
| 		var cveVersion string | ||||
| 		if alert.Vulnerability.CVSS3Score > 0 { | ||||
| 			score = alert.Vulnerability.CVSS3Score | ||||
| 			cveVersion = "v3" | ||||
| 		} else { | ||||
| 			score = alert.Vulnerability.Score | ||||
| 			cveVersion = "v2" | ||||
| 		} | ||||
|  | ||||
| 		var topFix string | ||||
| 		emptyFix := Fix{} | ||||
| 		if alert.Vulnerability.TopFix != emptyFix { | ||||
| 			topFix = fmt.Sprintf(`%v<br>%v<br><a href="%v">%v</a>}"`, alert.Vulnerability.TopFix.Message, alert.Vulnerability.TopFix.FixResolution, alert.Vulnerability.TopFix.URL, alert.Vulnerability.TopFix.URL) | ||||
| 		} | ||||
|  | ||||
| 		row := reporting.ScanRow{} | ||||
| 		row.AddColumn(alert.Vulnerability.PublishDate, 0) | ||||
| 		row.AddColumn(fmt.Sprintf(`<a href="%v">%v</a>`, alert.Vulnerability.URL, alert.Vulnerability.Name), 0) | ||||
| 		row.AddColumn(score, scoreStyle) | ||||
| 		row.AddColumn(cveVersion, 0) | ||||
| 		row.AddColumn(alert.Project, 0) | ||||
| 		row.AddColumn(alert.Library.Filename, 0) | ||||
| 		row.AddColumn(alert.Library.GroupID, 0) | ||||
| 		row.AddColumn(alert.Library.ArtifactID, 0) | ||||
| 		row.AddColumn(alert.Library.Version, 0) | ||||
| 		row.AddColumn(alert.Vulnerability.Description, 0) | ||||
| 		row.AddColumn(topFix, 0) | ||||
|  | ||||
| 		detailTable.Rows = append(detailTable.Rows, row) | ||||
| 	} | ||||
| 	scanReport.DetailTable = detailTable | ||||
|  | ||||
| 	return scanReport | ||||
| } | ||||
|  | ||||
| // CountSecurityVulnerabilities counts the security vulnerabilities above severityLimit | ||||
| func CountSecurityVulnerabilities(alerts *[]Alert, cvssSeverityLimit float64) (int, int) { | ||||
| 	severeVulnerabilities := 0 | ||||
| 	for _, alert := range *alerts { | ||||
| 		if isSevereVulnerability(alert, cvssSeverityLimit) { | ||||
| 			severeVulnerabilities++ | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	nonSevereVulnerabilities := len(*alerts) - severeVulnerabilities | ||||
| 	return severeVulnerabilities, nonSevereVulnerabilities | ||||
| } | ||||
|  | ||||
| func isSevereVulnerability(alert Alert, cvssSeverityLimit float64) bool { | ||||
|  | ||||
| 	if vulnerabilityScore(alert) >= cvssSeverityLimit && cvssSeverityLimit >= 0 { | ||||
| 		return true | ||||
| 	} | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| func vulnerabilityScore(alert Alert) float64 { | ||||
| 	if alert.Vulnerability.CVSS3Score > 0 { | ||||
| 		return alert.Vulnerability.CVSS3Score | ||||
| 	} | ||||
| 	return alert.Vulnerability.Score | ||||
| } | ||||
|  | ||||
| // ReportSha creates a SHA unique to the WS product and scan to be used as part of the report filename | ||||
| func ReportSha(productName string, scan *Scan) string { | ||||
| 	reportShaData := []byte(productName + "," + strings.Join(scan.ScannedProjectNames(), ",")) | ||||
| 	return fmt.Sprintf("%x", sha1.Sum(reportShaData)) | ||||
| } | ||||
|  | ||||
| // WriteCustomVulnerabilityReports creates an HTML and a JSON format file based on the alerts brought up by the scan | ||||
| func WriteCustomVulnerabilityReports(productName string, scan *Scan, scanReport reporting.ScanReport, utils piperutils.FileUtils) ([]piperutils.Path, error) { | ||||
| 	reportPaths := []piperutils.Path{} | ||||
|  | ||||
| 	// ignore templating errors since template is in our hands and issues will be detected with the automated tests | ||||
| 	htmlReport, _ := scanReport.ToHTML() | ||||
| 	if err := utils.MkdirAll(ReportsDirectory, 0777); err != nil { | ||||
| 		return reportPaths, errors.Wrapf(err, "failed to create report directory") | ||||
| 	} | ||||
| 	htmlReportPath := filepath.Join(ReportsDirectory, "piper_whitesource_vulnerability_report.html") | ||||
| 	if err := utils.FileWrite(htmlReportPath, htmlReport, 0666); err != nil { | ||||
| 		log.SetErrorCategory(log.ErrorConfiguration) | ||||
| 		return reportPaths, errors.Wrapf(err, "failed to write html report") | ||||
| 	} | ||||
| 	reportPaths = append(reportPaths, piperutils.Path{Name: "WhiteSource Vulnerability Report", Target: htmlReportPath}) | ||||
|  | ||||
| 	// JSON reports are used by step pipelineCreateSummary in order to e.g. prepare an issue creation in GitHub | ||||
| 	// ignore JSON errors since structure is in our hands | ||||
| 	jsonReport, _ := scanReport.ToJSON() | ||||
| 	if exists, _ := utils.DirExists(reporting.StepReportDirectory); !exists { | ||||
| 		err := utils.MkdirAll(reporting.StepReportDirectory, 0777) | ||||
| 		if err != nil { | ||||
| 			return reportPaths, errors.Wrap(err, "failed to create step reporting directory") | ||||
| 		} | ||||
| 	} | ||||
| 	if err := utils.FileWrite(filepath.Join(reporting.StepReportDirectory, fmt.Sprintf("whitesourceExecuteScan_oss_%v.json", ReportSha(productName, scan))), jsonReport, 0666); err != nil { | ||||
| 		return reportPaths, errors.Wrapf(err, "failed to write json report") | ||||
| 	} | ||||
| 	// we do not add the json report to the overall list of reports for now, | ||||
| 	// since it is just an intermediary report used as input for later | ||||
| 	// and there does not seem to be real benefit in archiving it. | ||||
|  | ||||
| 	return reportPaths, nil | ||||
| } | ||||
|  | ||||
| // Creates a SARIF result from the Alerts that were brought up by the scan | ||||
| func CreateSarifResultFile(scan *Scan, alerts *[]Alert) *format.SARIF { | ||||
| 	//Now, we handle the sarif | ||||
| 	log.Entry().Debug("Creating SARIF file for data transfer") | ||||
| 	var sarif format.SARIF | ||||
| 	sarif.Schema = "https://docs.oasis-open.org/sarif/sarif/v2.1.0/cos01/schemas/sarif-schema-2.1.0.json" | ||||
| 	sarif.Version = "2.1.0" | ||||
| 	var wsRun format.Runs | ||||
| 	sarif.Runs = append(sarif.Runs, wsRun) | ||||
|  | ||||
| 	//handle the tool object | ||||
| 	tool := *new(format.Tool) | ||||
| 	tool.Driver = *new(format.Driver) | ||||
| 	tool.Driver.Name = scan.AgentName | ||||
| 	tool.Driver.Version = scan.AgentVersion | ||||
| 	tool.Driver.InformationUri = "https://whitesource.atlassian.net/wiki/spaces/WD/pages/804814917/Unified+Agent+Overview" | ||||
|  | ||||
| 	// Handle results/vulnerabilities | ||||
| 	for i := 0; i < len(*alerts); i++ { | ||||
| 		alert := (*alerts)[i] | ||||
| 		result := *new(format.Results) | ||||
| 		id := fmt.Sprintf("%v/%v/%v", alert.Type, alert.Vulnerability.Name, alert.Library.ArtifactID) | ||||
| 		log.Entry().Debugf("Transforming alert %v into SARIF format", id) | ||||
| 		result.RuleID = id | ||||
| 		result.Level = alert.Level | ||||
| 		result.RuleIndex = i //Seems very abstract | ||||
| 		result.Message = format.Message{Text: alert.Vulnerability.Description} | ||||
| 		result.Level = alert.Level | ||||
| 		result.AnalysisTarget = format.ArtifactLocation{URI: alert.Library.Filename, Index: 0} | ||||
| 		location := format.Location{PhysicalLocation: format.ArtifactLocation{URI: alert.Library.Filename}, Region: format.Region{}, LogicalLocations: []format.LogicalLocation{{FullyQualifiedName: ""}}} | ||||
| 		result.Locations = append(result.Locations, location) | ||||
|  | ||||
| 		sarifRule := *new(format.SarifRule) | ||||
| 		sarifRule.ID = id | ||||
| 		sarifRule.ShortDescription = format.Message{Text: fmt.Sprintf("%v Package %v", alert.Vulnerability.Name, alert.Library.ArtifactID)} | ||||
| 		sarifRule.FullDescription = format.Message{Text: alert.Vulnerability.Description} | ||||
| 		sarifRule.DefaultConfiguration.Level = alert.Level | ||||
| 		sarifRule.HelpURI = alert.Vulnerability.URL | ||||
| 		sarifRule.Help = format.Help{Text: fmt.Sprintf("Vulnerability %v\nSeverity: %v\nPackage: %v\nInstalled Version: %v\nFix Resolution: %v\nLink: [%v](%v)", alert.Vulnerability.Name, alert.Vulnerability.Severity, alert.Library.ArtifactID, alert.Library.Version, alert.Vulnerability.TopFix.FixResolution, alert.Vulnerability.Name, alert.Vulnerability.URL), Markdown: alert.ToMarkdown()} | ||||
|  | ||||
| 		// Avoid empty descriptions to respect standard | ||||
| 		if sarifRule.ShortDescription.Text == "" { | ||||
| 			sarifRule.ShortDescription.Text = "None." | ||||
| 		} | ||||
| 		if sarifRule.FullDescription.Text == "" { // OR USE OMITEMPTY | ||||
| 			sarifRule.FullDescription.Text = "None." | ||||
| 		} | ||||
|  | ||||
| 		ruleProp := *new(format.SarifRuleProperties) | ||||
| 		ruleProp.Tags = append(ruleProp.Tags, alert.Type) | ||||
| 		ruleProp.Tags = append(ruleProp.Tags, alert.Description) | ||||
| 		ruleProp.Tags = append(ruleProp.Tags, alert.Library.ArtifactID) | ||||
| 		ruleProp.Precision = "very-high" | ||||
| 		sarifRule.Properties = &ruleProp | ||||
|  | ||||
| 		//Finalize: append the result and the rule | ||||
| 		sarif.Runs[0].Results = append(sarif.Runs[0].Results, result) | ||||
| 		tool.Driver.Rules = append(tool.Driver.Rules, sarifRule) | ||||
| 	} | ||||
| 	//Finalize: tool | ||||
| 	sarif.Runs[0].Tool = tool | ||||
|  | ||||
| 	return &sarif | ||||
| } | ||||
|  | ||||
| // WriteSarifFile write a JSON sarif format file for upload into Cumulus | ||||
| func WriteSarifFile(sarif *format.SARIF, utils piperutils.FileUtils) ([]piperutils.Path, error) { | ||||
| 	reportPaths := []piperutils.Path{} | ||||
|  | ||||
| 	// ignore templating errors since template is in our hands and issues will be detected with the automated tests | ||||
| 	sarifReport, errorMarshall := json.Marshal(sarif) | ||||
| 	if errorMarshall != nil { | ||||
| 		return reportPaths, errors.Wrapf(errorMarshall, "failed to marshall SARIF json file") | ||||
| 	} | ||||
| 	if err := utils.MkdirAll(ReportsDirectory, 0777); err != nil { | ||||
| 		return reportPaths, errors.Wrapf(err, "failed to create report directory") | ||||
| 	} | ||||
| 	sarifReportPath := filepath.Join(ReportsDirectory, "piper_whitesource_vulnerability.sarif") | ||||
| 	if err := utils.FileWrite(sarifReportPath, sarifReport, 0666); err != nil { | ||||
| 		log.SetErrorCategory(log.ErrorConfiguration) | ||||
| 		return reportPaths, errors.Wrapf(err, "failed to write SARIF file") | ||||
| 	} | ||||
| 	reportPaths = append(reportPaths, piperutils.Path{Name: "WhiteSource Vulnerability SARIF file", Target: sarifReportPath}) | ||||
|  | ||||
| 	return reportPaths, nil | ||||
| } | ||||
|  | ||||
| // CreateGithubResultIssues creates a number of GitHub issues, one per Alert to create transparency on the findings | ||||
| func CreateGithubResultIssues(scan *Scan, alerts *[]Alert, token, APIURL, owner, repository string, assignees, trustedCerts []string) error { | ||||
| 	for i := 0; i < len(*alerts); i++ { | ||||
| 		alert := (*alerts)[i] | ||||
| 		title := fmt.Sprintf("%v/%v/%v", alert.Type, alert.Vulnerability.Name, alert.Library.ArtifactID) | ||||
| 		markdownReport := alert.ToMarkdown() | ||||
| 		options := piperGithub.CreateIssueOptions{ | ||||
| 			Token:          token, | ||||
| 			APIURL:         APIURL, | ||||
| 			Owner:          owner, | ||||
| 			Repository:     repository, | ||||
| 			Title:          title, | ||||
| 			Body:           []byte(markdownReport), | ||||
| 			Assignees:      assignees, | ||||
| 			UpdateExisting: true, | ||||
| 			TrustedCerts:   trustedCerts, | ||||
| 		} | ||||
|  | ||||
| 		log.Entry().Debugf("Creating/updating GitHub issue(s) with title %v in org %v and repo %v", title, owner, repository) | ||||
| 		err := piperGithub.CreateIssue(&options) | ||||
| 		if err != nil { | ||||
| 			return errors.Wrapf(err, "Failed to upload WhiteSource result for %v into GitHub issue", alert.Vulnerability.Name) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
							
								
								
									
										220
									
								
								pkg/whitesource/reporting_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										220
									
								
								pkg/whitesource/reporting_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,220 @@ | ||||
| package whitesource | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"path/filepath" | ||||
| 	"testing" | ||||
|  | ||||
| 	"github.com/SAP/jenkins-library/pkg/format" | ||||
| 	"github.com/SAP/jenkins-library/pkg/mock" | ||||
| 	"github.com/SAP/jenkins-library/pkg/piperutils" | ||||
| 	"github.com/SAP/jenkins-library/pkg/reporting" | ||||
| 	"github.com/stretchr/testify/assert" | ||||
| ) | ||||
|  | ||||
| func TestCreateCustomVulnerabilityReport(t *testing.T) { | ||||
| 	t.Parallel() | ||||
|  | ||||
| 	t.Run("success case", func(t *testing.T) { | ||||
| 		config := &ScanOptions{} | ||||
| 		scan := &Scan{ | ||||
| 			AggregateProjectName: config.ProjectName, | ||||
| 			ProductVersion:       config.ProductVersion, | ||||
| 		} | ||||
| 		scan.AppendScannedProject("testProject") | ||||
| 		alerts := []Alert{ | ||||
| 			{Library: Library{Filename: "vul1"}, Vulnerability: Vulnerability{CVSS3Score: 7.0, Score: 6}}, | ||||
| 			{Library: Library{Filename: "vul2"}, Vulnerability: Vulnerability{CVSS3Score: 8.0, TopFix: Fix{Message: "this is the top fix"}}}, | ||||
| 			{Library: Library{Filename: "vul3"}, Vulnerability: Vulnerability{Score: 6}}, | ||||
| 		} | ||||
|  | ||||
| 		scanReport := CreateCustomVulnerabilityReport(config.ProductName, scan, &alerts, 7.0) | ||||
|  | ||||
| 		assert.Equal(t, "WhiteSource Security Vulnerability Report", scanReport.Title) | ||||
| 		assert.Equal(t, 3, len(scanReport.DetailTable.Rows)) | ||||
|  | ||||
| 		// assert that library info is filled and sorting has been executed | ||||
| 		assert.Equal(t, "vul2", scanReport.DetailTable.Rows[0].Columns[5].Content) | ||||
| 		assert.Equal(t, "vul1", scanReport.DetailTable.Rows[1].Columns[5].Content) | ||||
| 		assert.Equal(t, "vul3", scanReport.DetailTable.Rows[2].Columns[5].Content) | ||||
|  | ||||
| 		// assert that CVSS version identification has been done | ||||
| 		assert.Equal(t, "v3", scanReport.DetailTable.Rows[0].Columns[3].Content) | ||||
| 		assert.Equal(t, "v3", scanReport.DetailTable.Rows[1].Columns[3].Content) | ||||
| 		assert.Equal(t, "v2", scanReport.DetailTable.Rows[2].Columns[3].Content) | ||||
|  | ||||
| 		// assert proper rating and styling of high prio issues | ||||
| 		assert.Equal(t, "8", scanReport.DetailTable.Rows[0].Columns[2].Content) | ||||
| 		assert.Equal(t, "7", scanReport.DetailTable.Rows[1].Columns[2].Content) | ||||
| 		assert.Equal(t, "6", scanReport.DetailTable.Rows[2].Columns[2].Content) | ||||
| 		assert.Equal(t, "red-cell", scanReport.DetailTable.Rows[0].Columns[2].Style.String()) | ||||
| 		assert.Equal(t, "red-cell", scanReport.DetailTable.Rows[1].Columns[2].Style.String()) | ||||
| 		assert.Equal(t, "yellow-cell", scanReport.DetailTable.Rows[2].Columns[2].Style.String()) | ||||
|  | ||||
| 		assert.Contains(t, scanReport.DetailTable.Rows[0].Columns[10].Content, "this is the top fix") | ||||
|  | ||||
| 	}) | ||||
| } | ||||
|  | ||||
| func TestCreateSarifResultFile(t *testing.T) { | ||||
| 	scan := &Scan{ProductVersion: "1"} | ||||
| 	scan.AppendScannedProject("project1") | ||||
| 	scan.AgentName = "Some test agent" | ||||
| 	scan.AgentVersion = "1.2.6" | ||||
| 	alerts := []Alert{ | ||||
| 		{Library: Library{Filename: "vul1", ArtifactID: "org.some.lib"}, Vulnerability: Vulnerability{CVSS3Score: 7.0, Score: 6}}, | ||||
| 		{Library: Library{Filename: "vul2", ArtifactID: "org.some.lib"}, Vulnerability: Vulnerability{CVSS3Score: 8.0, TopFix: Fix{Message: "this is the top fix"}}}, | ||||
| 		{Library: Library{Filename: "vul3", ArtifactID: "org.some.lib2"}, Vulnerability: Vulnerability{Score: 6}}, | ||||
| 	} | ||||
|  | ||||
| 	sarif := CreateSarifResultFile(scan, &alerts) | ||||
|  | ||||
| 	assert.Equal(t, "https://docs.oasis-open.org/sarif/sarif/v2.1.0/cos01/schemas/sarif-schema-2.1.0.json", sarif.Schema) | ||||
| 	assert.Equal(t, "2.1.0", sarif.Version) | ||||
| 	assert.Equal(t, 1, len(sarif.Runs)) | ||||
| 	assert.Equal(t, "Some test agent", sarif.Runs[0].Tool.Driver.Name) | ||||
| 	assert.Equal(t, "1.2.6", sarif.Runs[0].Tool.Driver.Version) | ||||
| 	assert.Equal(t, 3, len(sarif.Runs[0].Tool.Driver.Rules)) | ||||
| 	assert.Equal(t, 3, len(sarif.Runs[0].Results)) | ||||
| 	// TODO add more extensive verification once we agree on the format details | ||||
| } | ||||
|  | ||||
| func TestWriteCustomVulnerabilityReports(t *testing.T) { | ||||
|  | ||||
| 	t.Run("success", func(t *testing.T) { | ||||
| 		productName := "mock-product" | ||||
| 		scan := &Scan{ProductVersion: "1"} | ||||
| 		scan.AppendScannedProject("project1") | ||||
| 		scan.AppendScannedProject("project2") | ||||
|  | ||||
| 		scanReport := reporting.ScanReport{} | ||||
| 		var utilsMock piperutils.FileUtils | ||||
| 		utilsMock = &mock.FilesMock{} | ||||
|  | ||||
| 		reportPaths, err := WriteCustomVulnerabilityReports(productName, scan, scanReport, utilsMock) | ||||
|  | ||||
| 		assert.NoError(t, err) | ||||
| 		assert.Equal(t, 1, len(reportPaths)) | ||||
|  | ||||
| 		exists, err := utilsMock.FileExists(reportPaths[0].Target) | ||||
| 		assert.NoError(t, err) | ||||
| 		assert.True(t, exists) | ||||
|  | ||||
| 		exists, err = utilsMock.FileExists(filepath.Join(reporting.StepReportDirectory, "whitesourceExecuteScan_oss_27322f16a39c10c852ba6639538140a03e08e93f.json")) | ||||
| 		assert.NoError(t, err) | ||||
| 		assert.True(t, exists) | ||||
| 	}) | ||||
|  | ||||
| 	t.Run("failed to write HTML report", func(t *testing.T) { | ||||
| 		productName := "mock-product" | ||||
| 		scan := &Scan{ProductVersion: "1"} | ||||
| 		scanReport := reporting.ScanReport{} | ||||
| 		utilsMock := &mock.FilesMock{} | ||||
| 		utilsMock.FileWriteErrors = map[string]error{ | ||||
| 			filepath.Join(ReportsDirectory, "piper_whitesource_vulnerability_report.html"): fmt.Errorf("write error"), | ||||
| 		} | ||||
|  | ||||
| 		_, err := WriteCustomVulnerabilityReports(productName, scan, scanReport, utilsMock) | ||||
| 		assert.Contains(t, fmt.Sprint(err), "failed to write html report") | ||||
| 	}) | ||||
|  | ||||
| 	t.Run("failed to write json report", func(t *testing.T) { | ||||
| 		productName := "mock-product" | ||||
| 		scan := &Scan{ProductVersion: "1"} | ||||
| 		scan.AppendScannedProject("project1") | ||||
| 		scanReport := reporting.ScanReport{} | ||||
| 		utilsMock := &mock.FilesMock{} | ||||
| 		utilsMock.FileWriteErrors = map[string]error{ | ||||
| 			filepath.Join(reporting.StepReportDirectory, "whitesourceExecuteScan_oss_e860d3a7cc8ca3261f065773404ba43e9a0b9d5b.json"): fmt.Errorf("write error"), | ||||
| 		} | ||||
|  | ||||
| 		_, err := WriteCustomVulnerabilityReports(productName, scan, scanReport, utilsMock) | ||||
| 		assert.Contains(t, fmt.Sprint(err), "failed to write json report") | ||||
| 	}) | ||||
| } | ||||
|  | ||||
| func TestWriteSarifFile(t *testing.T) { | ||||
|  | ||||
| 	t.Run("success", func(t *testing.T) { | ||||
| 		sarif := format.SARIF{} | ||||
| 		var utilsMock piperutils.FileUtils | ||||
| 		utilsMock = &mock.FilesMock{} | ||||
|  | ||||
| 		reportPaths, err := WriteSarifFile(&sarif, utilsMock) | ||||
|  | ||||
| 		assert.NoError(t, err) | ||||
| 		assert.Equal(t, 1, len(reportPaths)) | ||||
|  | ||||
| 		exists, err := utilsMock.FileExists(reportPaths[0].Target) | ||||
| 		assert.NoError(t, err) | ||||
| 		assert.True(t, exists) | ||||
|  | ||||
| 		exists, err = utilsMock.FileExists(filepath.Join(ReportsDirectory, "piper_whitesource_vulnerability.sarif")) | ||||
| 		assert.NoError(t, err) | ||||
| 		assert.True(t, exists) | ||||
| 	}) | ||||
|  | ||||
| 	t.Run("failed to write HTML report", func(t *testing.T) { | ||||
| 		sarif := format.SARIF{} | ||||
| 		utilsMock := &mock.FilesMock{} | ||||
| 		utilsMock.FileWriteErrors = map[string]error{ | ||||
| 			filepath.Join(ReportsDirectory, "piper_whitesource_vulnerability.sarif"): fmt.Errorf("write error"), | ||||
| 		} | ||||
|  | ||||
| 		_, err := WriteSarifFile(&sarif, utilsMock) | ||||
| 		assert.Contains(t, fmt.Sprint(err), "failed to write SARIF file") | ||||
| 	}) | ||||
| } | ||||
|  | ||||
| func TestCountSecurityVulnerabilities(t *testing.T) { | ||||
| 	t.Parallel() | ||||
|  | ||||
| 	alerts := []Alert{ | ||||
| 		{Vulnerability: Vulnerability{CVSS3Score: 7.1}}, | ||||
| 		{Vulnerability: Vulnerability{CVSS3Score: 7}}, | ||||
| 		{Vulnerability: Vulnerability{CVSS3Score: 6}}, | ||||
| 	} | ||||
|  | ||||
| 	severe, nonSevere := CountSecurityVulnerabilities(&alerts, 7.0) | ||||
| 	assert.Equal(t, 2, severe) | ||||
| 	assert.Equal(t, 1, nonSevere) | ||||
| } | ||||
|  | ||||
| func TestIsSevereVulnerability(t *testing.T) { | ||||
| 	tt := []struct { | ||||
| 		alert    Alert | ||||
| 		limit    float64 | ||||
| 		expected bool | ||||
| 	}{ | ||||
| 		{alert: Alert{Vulnerability: Vulnerability{CVSS3Score: 0}}, limit: 0, expected: true}, | ||||
| 		{alert: Alert{Vulnerability: Vulnerability{CVSS3Score: 6.9, Score: 6}}, limit: 7.0, expected: false}, | ||||
| 		{alert: Alert{Vulnerability: Vulnerability{CVSS3Score: 7.0, Score: 6}}, limit: 7.0, expected: true}, | ||||
| 		{alert: Alert{Vulnerability: Vulnerability{CVSS3Score: 7.1, Score: 6}}, limit: 7.0, expected: true}, | ||||
| 		{alert: Alert{Vulnerability: Vulnerability{CVSS3Score: 6, Score: 6.9}}, limit: 7.0, expected: false}, | ||||
| 		{alert: Alert{Vulnerability: Vulnerability{CVSS3Score: 6, Score: 7.0}}, limit: 7.0, expected: false}, | ||||
| 		{alert: Alert{Vulnerability: Vulnerability{CVSS3Score: 6, Score: 7.1}}, limit: 7.0, expected: false}, | ||||
| 		{alert: Alert{Vulnerability: Vulnerability{Score: 6.9}}, limit: 7.0, expected: false}, | ||||
| 		{alert: Alert{Vulnerability: Vulnerability{Score: 7.0}}, limit: 7.0, expected: true}, | ||||
| 		{alert: Alert{Vulnerability: Vulnerability{Score: 7.1}}, limit: 7.0, expected: true}, | ||||
| 	} | ||||
|  | ||||
| 	for i, test := range tt { | ||||
| 		assert.Equalf(t, test.expected, isSevereVulnerability(test.alert, test.limit), "run %v failed", i) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func TestVulnerabilityScore(t *testing.T) { | ||||
| 	t.Parallel() | ||||
|  | ||||
| 	tt := []struct { | ||||
| 		alert    Alert | ||||
| 		expected float64 | ||||
| 	}{ | ||||
| 		{alert: Alert{Vulnerability: Vulnerability{CVSS3Score: 7.0, Score: 6}}, expected: 7.0}, | ||||
| 		{alert: Alert{Vulnerability: Vulnerability{CVSS3Score: 7.0}}, expected: 7.0}, | ||||
| 		{alert: Alert{Vulnerability: Vulnerability{Score: 6}}, expected: 6}, | ||||
| 	} | ||||
| 	for i, test := range tt { | ||||
| 		assert.Equalf(t, test.expected, vulnerabilityScore(test.alert), "run %v failed", i) | ||||
| 	} | ||||
| } | ||||
| @@ -19,6 +19,8 @@ type Scan struct { | ||||
| 	ProductVersion  string | ||||
| 	scannedProjects map[string]Project | ||||
| 	scanTimes       map[string]time.Time | ||||
| 	AgentName       string | ||||
| 	AgentVersion    string | ||||
| } | ||||
|  | ||||
| func (s *Scan) init() { | ||||
|   | ||||
| @@ -2,6 +2,7 @@ package whitesource | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
|  | ||||
| 	"github.com/SAP/jenkins-library/pkg/log" | ||||
| ) | ||||
|  | ||||
|   | ||||
| @@ -2,15 +2,18 @@ package whitesource | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"github.com/SAP/jenkins-library/pkg/log" | ||||
| 	"github.com/SAP/jenkins-library/pkg/maven" | ||||
| 	"path/filepath" | ||||
| 	"strings" | ||||
|  | ||||
| 	"github.com/SAP/jenkins-library/pkg/log" | ||||
| 	"github.com/SAP/jenkins-library/pkg/maven" | ||||
| ) | ||||
|  | ||||
| // ExecuteMavenScan constructs maven parameters from the given configuration, and executes the maven goal | ||||
| // "org.whitesource:whitesource-maven-plugin:19.5.1:update". | ||||
| func (s *Scan) ExecuteMavenScan(config *ScanOptions, utils Utils) error { | ||||
| 	s.AgentName = "WhiteSource Maven Plugin" | ||||
| 	s.AgentVersion = "unknown" | ||||
| 	log.Entry().Infof("Using Whitesource scan for Maven project") | ||||
| 	pomPath := config.PomPath | ||||
| 	if pomPath == "" { | ||||
|   | ||||
| @@ -3,10 +3,11 @@ package whitesource | ||||
| import ( | ||||
| 	"encoding/json" | ||||
| 	"fmt" | ||||
| 	"github.com/SAP/jenkins-library/pkg/log" | ||||
| 	"io/ioutil" | ||||
| 	"os" | ||||
| 	"path/filepath" | ||||
|  | ||||
| 	"github.com/SAP/jenkins-library/pkg/log" | ||||
| ) | ||||
|  | ||||
| const whiteSourceConfig = "whitesource.config.json" | ||||
| @@ -80,6 +81,8 @@ func (s *Scan) writeWhitesourceConfigJSON(config *ScanOptions, utils Utils, devD | ||||
|  | ||||
| // ExecuteNpmScan iterates over all found npm modules and performs a scan in each one. | ||||
| func (s *Scan) ExecuteNpmScan(config *ScanOptions, utils Utils) error { | ||||
| 	s.AgentName = "WhiteSource NPM Plugin" | ||||
| 	s.AgentVersion = "unknown" | ||||
| 	modules, err := utils.FindPackageJSONFiles(config) | ||||
| 	if err != nil { | ||||
| 		return fmt.Errorf("failed to find package.json files with excludes: %w", err) | ||||
|   | ||||
| @@ -22,6 +22,7 @@ const projectRegEx = `Project name: ([^,]*), URL: (.*)` | ||||
|  | ||||
| // ExecuteUAScan executes a scan with the Whitesource Unified Agent. | ||||
| func (s *Scan) ExecuteUAScan(config *ScanOptions, utils Utils) error { | ||||
| 	s.AgentName = "WhiteSource Unified Agent" | ||||
| 	if config.BuildTool != "mta" { | ||||
| 		return s.ExecuteUAScanInPath(config, utils, config.ScanPath) | ||||
| 	} | ||||
| @@ -83,6 +84,17 @@ func (s *Scan) ExecuteUAScanInPath(config *ScanOptions, utils Utils, scanPath st | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	// Fetch version of UA | ||||
| 	versionBuffer := bytes.Buffer{} | ||||
| 	utils.Stdout(&versionBuffer) | ||||
| 	err = utils.RunExecutable(javaPath, "-jar", config.AgentFileName, "-v") | ||||
| 	if err != nil { | ||||
| 		return errors.Wrap(err, "Failed to determine UA version") | ||||
| 	} | ||||
| 	s.AgentVersion = strings.TrimSpace(versionBuffer.String()) | ||||
| 	log.Entry().Debugf("Read UA version %v from Stdout", s.AgentVersion) | ||||
| 	utils.Stdout(log.Writer()) | ||||
|  | ||||
| 	// ToDo: Check if Download of Docker/container image should be done here instead of in cmd/whitesourceExecuteScan.go | ||||
|  | ||||
| 	// ToDo: check if this is required | ||||
| @@ -123,7 +135,6 @@ func (s *Scan) ExecuteUAScanInPath(config *ScanOptions, utils Utils, scanPath st | ||||
| 		defer wg.Done() | ||||
| 		scanLog(trErr, s) | ||||
| 	}() | ||||
|  | ||||
| 	err = utils.RunExecutable(javaPath, "-jar", config.AgentFileName, "-d", scanPath, "-c", configPath, "-wss.url", config.AgentURL) | ||||
|  | ||||
| 	if err := removeJre(javaPath, utils); err != nil { | ||||
|   | ||||
| @@ -2,11 +2,12 @@ package whitesource | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"github.com/SAP/jenkins-library/pkg/log" | ||||
| 	"github.com/stretchr/testify/assert" | ||||
| 	"path/filepath" | ||||
| 	"strings" | ||||
| 	"testing" | ||||
|  | ||||
| 	"github.com/SAP/jenkins-library/pkg/log" | ||||
| 	"github.com/stretchr/testify/assert" | ||||
| ) | ||||
|  | ||||
| func TestExecuteUAScan(t *testing.T) { | ||||
| @@ -24,7 +25,8 @@ func TestExecuteUAScan(t *testing.T) { | ||||
| 		err := scan.ExecuteUAScan(&config, utilsMock) | ||||
| 		assert.NoError(t, err) | ||||
| 		assert.Equal(t, "maven", config.BuildTool) | ||||
| 		assert.Contains(t, utilsMock.Calls[1].Params, ".") | ||||
| 		assert.Contains(t, utilsMock.Calls[1].Params, "-v") | ||||
| 		assert.Contains(t, utilsMock.Calls[2].Params, ".") | ||||
| 	}) | ||||
|  | ||||
| 	t.Run("success - mta", func(t *testing.T) { | ||||
| @@ -41,7 +43,8 @@ func TestExecuteUAScan(t *testing.T) { | ||||
| 		err := scan.ExecuteUAScan(&config, utilsMock) | ||||
| 		assert.NoError(t, err) | ||||
| 		assert.Equal(t, "mta", config.BuildTool) | ||||
| 		assert.Contains(t, utilsMock.Calls[1].Params, ".") | ||||
| 		assert.Contains(t, utilsMock.Calls[1].Params, "-v") | ||||
| 		assert.Contains(t, utilsMock.Calls[2].Params, ".") | ||||
| 	}) | ||||
|  | ||||
| 	t.Run("error - maven", func(t *testing.T) { | ||||
| @@ -124,16 +127,16 @@ func TestExecuteUAScanInPath(t *testing.T) { | ||||
|  | ||||
| 		err := scan.ExecuteUAScanInPath(&config, utilsMock, "") | ||||
| 		assert.NoError(t, err) | ||||
| 		assert.Equal(t, "java", utilsMock.Calls[1].Exec) | ||||
| 		assert.Equal(t, 8, len(utilsMock.Calls[1].Params)) | ||||
| 		assert.Contains(t, utilsMock.Calls[1].Params, "-jar") | ||||
| 		assert.Contains(t, utilsMock.Calls[1].Params, "-d") | ||||
| 		assert.Contains(t, utilsMock.Calls[1].Params, ".") | ||||
| 		assert.Contains(t, utilsMock.Calls[1].Params, "-c") | ||||
| 		assert.Contains(t, utilsMock.Calls[1].Params, "unified-agent.jar") | ||||
| 		assert.Equal(t, "java", utilsMock.Calls[2].Exec) | ||||
| 		assert.Equal(t, 8, len(utilsMock.Calls[2].Params)) | ||||
| 		assert.Contains(t, utilsMock.Calls[2].Params, "-jar") | ||||
| 		assert.Contains(t, utilsMock.Calls[2].Params, "-d") | ||||
| 		assert.Contains(t, utilsMock.Calls[2].Params, ".") | ||||
| 		assert.Contains(t, utilsMock.Calls[2].Params, "-c") | ||||
| 		assert.Contains(t, utilsMock.Calls[2].Params, "unified-agent.jar") | ||||
| 		// name of config file not tested since it is dynamic. This is acceptable here since we test also the size | ||||
| 		assert.Contains(t, utilsMock.Calls[1].Params, "-wss.url") | ||||
| 		assert.Contains(t, utilsMock.Calls[1].Params, config.AgentURL) | ||||
| 		assert.Contains(t, utilsMock.Calls[2].Params, "-wss.url") | ||||
| 		assert.Contains(t, utilsMock.Calls[2].Params, config.AgentURL) | ||||
| 	}) | ||||
|  | ||||
| 	t.Run("success - dedicated path", func(t *testing.T) { | ||||
| @@ -152,8 +155,8 @@ func TestExecuteUAScanInPath(t *testing.T) { | ||||
|  | ||||
| 		err := scan.ExecuteUAScanInPath(&config, utilsMock, "./my/test/path") | ||||
| 		assert.NoError(t, err) | ||||
| 		assert.Contains(t, utilsMock.Calls[1].Params, "-d") | ||||
| 		assert.Contains(t, utilsMock.Calls[1].Params, "./my/test/path") | ||||
| 		assert.Contains(t, utilsMock.Calls[2].Params, "-d") | ||||
| 		assert.Contains(t, utilsMock.Calls[2].Params, "./my/test/path") | ||||
| 	}) | ||||
|  | ||||
| 	t.Run("error - download agent", func(t *testing.T) { | ||||
| @@ -213,7 +216,7 @@ func TestExecuteUAScanInPath(t *testing.T) { | ||||
| 		scan := newTestScan(&config) | ||||
|  | ||||
| 		err := scan.ExecuteUAScanInPath(&config, utilsMock, "") | ||||
| 		assert.Contains(t, fmt.Sprint(err), "failed to execute WhiteSource scan with exit code") | ||||
| 		assert.Contains(t, fmt.Sprint(err), "Failed to determine UA version") | ||||
| 	}) | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -1,3 +1,4 @@ | ||||
| //go:build !release | ||||
| // +build !release | ||||
|  | ||||
| package whitesource | ||||
| @@ -129,7 +130,6 @@ func NewSystemMockWithProjectName(lastUpdateDate, projectName string) *SystemMoc | ||||
| 		Name:     "mock-library", | ||||
| 		Filename: "mock-library-file", | ||||
| 		Version:  "mock-library-version", | ||||
| 		Project:  projectName, | ||||
| 	} | ||||
| 	return &SystemMock{ | ||||
| 		ProductName: "mock-product", | ||||
|   | ||||
| @@ -1,3 +1,4 @@ | ||||
| //go:build !release | ||||
| // +build !release | ||||
|  | ||||
| package whitesource | ||||
|   | ||||
| @@ -43,10 +43,21 @@ type GroupAssignment struct { | ||||
|  | ||||
| // Alert | ||||
| type Alert struct { | ||||
| 	Vulnerability Vulnerability `json:"vulnerability"` | ||||
| 	Library       Library       `json:"library,omitempty"` | ||||
| 	Project       string        `json:"project,omitempty"` | ||||
| 	CreationDate  string        `json:"creation_date,omitempty"` | ||||
| 	Vulnerability    Vulnerability `json:"vulnerability"` | ||||
| 	Type             string        `json:"type,omitempty"` | ||||
| 	Level            string        `json:"level,omitempty"` | ||||
| 	Library          Library       `json:"library,omitempty"` | ||||
| 	Project          string        `json:"project,omitempty"` | ||||
| 	DirectDependency bool          `json:"directDependency,omitempty"` | ||||
| 	Description      string        `json:"description,omitempty"` | ||||
| 	CreationDate     string        `json:"date,omitempty"` | ||||
| 	ModifiedDate     string        `json:"modifiedDate,omitempty"` | ||||
| 	Status           string        `json:"status,omitempty"` | ||||
| } | ||||
|  | ||||
| // ToMarkdown returns the markdown representation of the contents | ||||
| func (a *Alert) ToMarkdown() string { | ||||
| 	return fmt.Sprintf("**Vulnerability %v**\n| Severity | Package | Installed Version | Fix Resolution | Link |\n| --- | --- | --- | --- | --- |\n|%v|%v|%v|%v|[%v](%v)|\n", a.Vulnerability.Name, a.Vulnerability.Severity, a.Library.ArtifactID, a.Library.Version, a.Vulnerability.TopFix.FixResolution, a.Vulnerability.Name, a.Vulnerability.URL) | ||||
| } | ||||
|  | ||||
| // Library | ||||
| @@ -56,24 +67,23 @@ type Library struct { | ||||
| 	ArtifactID string `json:"artifactId,omitempty"` | ||||
| 	GroupID    string `json:"groupId,omitempty"` | ||||
| 	Version    string `json:"version,omitempty"` | ||||
| 	Project    string `json:"project,omitempty"` | ||||
| } | ||||
|  | ||||
| // Vulnerability defines a vulnerability as returned by WhiteSource | ||||
| type Vulnerability struct { | ||||
| 	Name              string  `json:"name,omitempty"` | ||||
| 	Type              string  `json:"type,omitempty"` | ||||
| 	Severity          string  `json:"severity,omitempty"` | ||||
| 	Score             float64 `json:"score,omitempty"` | ||||
| 	CVSS3Severity     string  `json:"cvss3_severity,omitempty"` | ||||
| 	CVSS3Score        float64 `json:"cvss3_score,omitempty"` | ||||
| 	PublishDate       string  `json:"publishDate,omitempty"` | ||||
| 	URL               string  `json:"url,omitempty"` | ||||
| 	Description       string  `json:"description,omitempty"` | ||||
| 	TopFix            Fix     `json:"topFix,omitempty"` | ||||
| 	AllFixes          []Fix   `json:"allFixes,omitempty"` | ||||
| 	Level             string  `json:"level,omitempty"` | ||||
| 	FixResolutionText string  `json:"fixResolutionText,omitempty"` | ||||
| 	Name              string      `json:"name,omitempty"` | ||||
| 	Type              string      `json:"type,omitempty"` | ||||
| 	Severity          string      `json:"severity,omitempty"` | ||||
| 	Score             float64     `json:"score,omitempty"` | ||||
| 	CVSS3Severity     string      `json:"cvss3_severity,omitempty"` | ||||
| 	CVSS3Score        float64     `json:"cvss3_score,omitempty"` | ||||
| 	PublishDate       string      `json:"publishDate,omitempty"` | ||||
| 	URL               string      `json:"url,omitempty"` | ||||
| 	Description       string      `json:"description,omitempty"` | ||||
| 	TopFix            Fix         `json:"topFix,omitempty"` | ||||
| 	AllFixes          []Fix       `json:"allFixes,omitempty"` | ||||
| 	FixResolutionText string      `json:"fixResolutionText,omitempty"` | ||||
| 	References        []Reference `json:"references,omitempty"` | ||||
| } | ||||
|  | ||||
| // Fix defines a Fix as returned by WhiteSource | ||||
| @@ -88,6 +98,13 @@ type Fix struct { | ||||
| 	ExtraData     string `json:"extraData,omitempty"` | ||||
| } | ||||
|  | ||||
| // Reference defines a reference for the library affected | ||||
| type Reference struct { | ||||
| 	URL                 string `json:"url,omitempty"` | ||||
| 	Homepage            string `json:"homepage,omitempty"` | ||||
| 	GenericPackageIndex string `json:"genericPackageIndex,omitempty"` | ||||
| } | ||||
|  | ||||
| // Project defines a WhiteSource project with name and token | ||||
| type Project struct { | ||||
| 	ID             int64  `json:"id"` | ||||
| @@ -525,11 +542,10 @@ func (s *System) sendRequest(req Request) ([]byte, error) { | ||||
| 	headers := http.Header{} | ||||
| 	headers.Add("Content-Type", "application/json") | ||||
| 	response, err := s.httpClient.SendRequest(http.MethodPost, s.serverURL, bytes.NewBuffer(body), headers, nil) | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return responseBody, errors.Wrap(err, "failed to send request to WhiteSource") | ||||
| 	} | ||||
|  | ||||
| 	defer response.Body.Close() | ||||
| 	responseBody, err = ioutil.ReadAll(response.Body) | ||||
| 	if err != nil { | ||||
| 		return responseBody, errors.Wrap(err, "failed to read WhiteSource response") | ||||
|   | ||||
| @@ -353,7 +353,7 @@ func TestGetProjectAlertsByType(t *testing.T) { | ||||
| 		requestBody, err := ioutil.ReadAll(myTestClient.requestBody) | ||||
| 		assert.NoError(t, err) | ||||
| 		assert.Contains(t, string(requestBody), `"requestType":"getProjectAlertsByType"`) | ||||
| 		assert.Equal(t, []Alert{{Vulnerability: Vulnerability{Name: "testVulnerability1"}}}, alerts) | ||||
| 		assert.Equal(t, []Alert{{Vulnerability: Vulnerability{Name: "testVulnerability1"}, Type: "SECURITY_VULNERABILITY"}}, alerts) | ||||
| 	}) | ||||
|  | ||||
| 	t.Run("error case", func(t *testing.T) { | ||||
|   | ||||
| @@ -35,6 +35,9 @@ spec: | ||||
|         aliases: | ||||
|           - name: dockerCredentialsId | ||||
|             deprecated: true | ||||
|       - name: githubTokenCredentialsId | ||||
|         description: Jenkins 'Secret text' credentials ID containing token to authenticate to GitHub. | ||||
|         type: jenkins | ||||
|     params: | ||||
|       - name: agentDownloadUrl | ||||
|         type: string | ||||
| @@ -488,6 +491,86 @@ spec: | ||||
|           - STEPS | ||||
|         aliases: | ||||
|           - name: npm/defaultNpmRegistry | ||||
|       - name: githubToken | ||||
|         description: "GitHub personal access token as per | ||||
|           https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line" | ||||
|         scope: | ||||
|           - GENERAL | ||||
|           - PARAMETERS | ||||
|           - STAGES | ||||
|           - STEPS | ||||
|         type: string | ||||
|         secret: true | ||||
|         aliases: | ||||
|           - name: access_token | ||||
|         resourceRef: | ||||
|           - name: githubTokenCredentialsId | ||||
|             type: secret | ||||
|           - type: vaultSecret | ||||
|             default: github | ||||
|             name: githubVaultSecretName | ||||
|       - name: createResultIssue | ||||
|         type: bool | ||||
|         description: "Whether the step creates a GitHub issue containing the scan results in the originating repo. | ||||
|         Since optimized pipelines are headless the creation is implicitly activated for scheduled runs." | ||||
|         resourceRef: | ||||
|           - name: commonPipelineEnvironment | ||||
|             param: custom/optimizedAndScheduled | ||||
|         scope: | ||||
|           - PARAMETERS | ||||
|           - STAGES | ||||
|           - STEPS | ||||
|         default: false | ||||
|       - name: githubApiUrl | ||||
|         description: "Set the GitHub API URL." | ||||
|         scope: | ||||
|           - GENERAL | ||||
|           - PARAMETERS | ||||
|           - STAGES | ||||
|           - STEPS | ||||
|         type: string | ||||
|         default: "https://api.github.com" | ||||
|       - name: owner | ||||
|         aliases: | ||||
|           - name: githubOrg | ||||
|         description: "Set the GitHub organization." | ||||
|         resourceRef: | ||||
|           - name: commonPipelineEnvironment | ||||
|             param: github/owner | ||||
|         scope: | ||||
|           - PARAMETERS | ||||
|           - STAGES | ||||
|           - STEPS | ||||
|         type: string | ||||
|       - name: repository | ||||
|         aliases: | ||||
|           - name: githubRepo | ||||
|         description: "Set the GitHub repository." | ||||
|         resourceRef: | ||||
|           - name: commonPipelineEnvironment | ||||
|             param: github/repository | ||||
|         scope: | ||||
|           - PARAMETERS | ||||
|           - STAGES | ||||
|           - STEPS | ||||
|         type: string | ||||
|       - name: assignees | ||||
|         description: Defines the assignees for the Github Issue created/updated with the results of the scan as a list of login names. | ||||
|         scope: | ||||
|           - PARAMETERS | ||||
|           - STAGES | ||||
|           - STEPS | ||||
|         type: "[]string" | ||||
|         default: [] | ||||
|         mandatory: false | ||||
|       - name: customTlsCertificateLinks | ||||
|         type: "[]string" | ||||
|         description: "List of download links to custom TLS certificates. This is required to ensure trusted connections to instances with repositories (like nexus) when publish flag is set to true." | ||||
|         scope: | ||||
|           - GENERAL | ||||
|           - PARAMETERS | ||||
|           - STAGES | ||||
|           - STEPS | ||||
|     resources: | ||||
|       - name: buildDescriptor | ||||
|         type: stash | ||||
|   | ||||
| @@ -16,6 +16,7 @@ void call(Map parameters = [:]) { | ||||
|     List credentials = [ | ||||
|         [type: 'token', id: 'orgAdminUserTokenCredentialsId', env: ['PIPER_orgToken']], | ||||
|         [type: 'token', id: 'userTokenCredentialsId', env: ['PIPER_userToken']], | ||||
|         [type: 'token', id: 'githubTokenCredentialsId', env: ['PIPER_githubToken']], | ||||
|         [type: 'file', id: 'dockerConfigJsonCredentialsId', env: ['PIPER_dockerConfigJSON']], | ||||
|     ] | ||||
|     piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials) | ||||
|   | ||||
		Reference in New Issue
	
	Block a user