1
0
mirror of https://github.com/SAP/jenkins-library.git synced 2025-01-04 04:07:16 +02:00

Protecode as GoLang (#1119)

* Protecode as go implementation

Co-authored-by: Sven Merk <33895725+nevskrem@users.noreply.github.com>
Co-authored-by: Oliver Nocon <33484802+OliverNocon@users.noreply.github.com>
This commit is contained in:
redehnroV 2020-02-06 16:16:34 +01:00 committed by GitHub
parent 1417f4650a
commit 2ebf2010b7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 2416 additions and 19 deletions

2
.gitignore vendored
View File

@ -23,3 +23,5 @@ consumer-test/**/workspace
/piper
/piper.exe
.factorypath
/cache/protecode

View File

@ -55,6 +55,7 @@ func Execute() {
rootCmd.AddCommand(GithubCreatePullRequestCommand())
rootCmd.AddCommand(AbapEnvironmentPullGitRepoCommand())
rootCmd.AddCommand(CheckmarxExecuteScanCommand())
rootCmd.AddCommand(ProtecodeExecuteScanCommand())
addRootFlags(rootCmd)
if err := rootCmd.Execute(); err != nil {

330
cmd/protecodeExecuteScan.go Normal file
View File

@ -0,0 +1,330 @@
package cmd
import (
"encoding/json"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"strings"
"time"
"github.com/SAP/jenkins-library/pkg/command"
piperDocker "github.com/SAP/jenkins-library/pkg/docker"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/SAP/jenkins-library/pkg/protecode"
"github.com/SAP/jenkins-library/pkg/telemetry"
)
type protecodeData struct {
Target string `json:"target,omitempty"`
Mandatory bool `json:"mandatory,omitempty"`
ProductID string `json:"productID,omitempty"`
ServerURL string `json:"serverUrl,omitempty"`
FailOnSevereVulnerabilities bool `json:"failOnSevereVulnerabilities,omitempty"`
ExcludeCVEs string `json:"excludeCVEs,omitempty"`
Count string `json:"count,omitempty"`
Cvss2GreaterOrEqualSeven string `json:"cvss2GreaterOrEqualSeven,omitempty"`
Cvss3GreaterOrEqualSeven string `json:"cvss3GreaterOrEqualSeven,omitempty"`
ExcludedVulnerabilities string `json:"excludedVulnerabilities,omitempty"`
TriagedVulnerabilities string `json:"triagedVulnerabilities,omitempty"`
HistoricalVulnerabilities string `json:"historicalVulnerabilities,omitempty"`
Vulnerabilities []protecode.Vuln `json:"Vulnerabilities,omitempty"`
}
var reportPath = "./"
var cachePath = "./cache"
var cacheProtecodeImagePath = "/protecode/Image"
var cacheProtecodePath = "/protecode"
func protecodeExecuteScan(config protecodeExecuteScanOptions, telemetryData *telemetry.CustomData, influx *protecodeExecuteScanInflux) error {
c := command.Command{}
// reroute command output to loging framework
c.Stdout(log.Entry().Writer())
c.Stderr(log.Entry().Writer())
dClient := createDockerClient(&config)
return runProtecodeScan(&config, influx, dClient)
}
func runProtecodeScan(config *protecodeExecuteScanOptions, influx *protecodeExecuteScanInflux, dClient piperDocker.Download) error {
var fileName, filePath string
//create client for sending api request
log.Entry().Debug("Create protecode client")
client := createClient(config)
if len(config.FetchURL) <= 0 {
log.Entry().Debugf("Get docker image: %v, %v, %v, %v", config.ScanImage, config.DockerRegistryURL, config.FilePath, config.IncludeLayers)
fileName, filePath = getDockerImage(dClient, config)
if len(config.FilePath) <= 0 {
(*config).FilePath = filePath
log.Entry().Debugf("Filepath for upload image: %v", config.FilePath)
}
}
log.Entry().Debug("Execute protecode scan")
parsedResult := executeProtecodeScan(client, config, fileName, writeReportToFile)
log.Entry().Debug("Write influx data")
setInfluxData(influx, parsedResult)
defer os.Remove(config.FilePath)
deletePath := filepath.Join(cachePath, cacheProtecodePath)
err := os.RemoveAll(deletePath)
if err != nil {
log.Entry().Warnf("Error during cleanup folder %v", err)
}
return nil
}
func handleArtifactVersion(artifactVersion string) string {
matches, _ := regexp.MatchString("([\\d\\.]){1,}-[\\d]{14}([\\Wa-z\\d]{41})?", artifactVersion)
if matches {
split := strings.SplitN(artifactVersion, ".", 2)
return split[0]
}
return artifactVersion
}
func getDockerImage(dClient piperDocker.Download, config *protecodeExecuteScanOptions) (string, string) {
cacheImagePath := filepath.Join(cachePath, cacheProtecodeImagePath)
deletePath := filepath.Join(cachePath, cacheProtecodePath)
err := os.RemoveAll(deletePath)
os.Mkdir(cacheImagePath, 600)
imageSource, err := dClient.GetImageSource()
if err != nil {
log.Entry().WithError(err).Fatal("Error during get docker image source")
}
image, err := dClient.DownloadImageToPath(imageSource, cacheImagePath)
if err != nil {
log.Entry().Fatalf("Error during get docker image: %v", err)
}
tarFile, fileName := tarImage(config)
if tarFile != nil {
defer tarFile.Close()
err = dClient.TarImage(tarFile, image)
if err != nil {
log.Entry().WithError(err).Fatal("Error during tar the docker image")
}
}
resultFilePath := config.FilePath
if len(config.FilePath) <= 0 {
resultFilePath = cachePath
}
return fileName, resultFilePath
}
func tarImage(config *protecodeExecuteScanOptions) (*os.File, string) {
if !(filepath.Ext(config.ScanImage) == ".tar" ||
filepath.Ext(config.ScanImage) == ".tar.gz" ||
filepath.Ext(config.ScanImage) == ".tgz") {
artifactVersion := handleArtifactVersion(config.ArtifactVersion)
fileName := fmt.Sprintf("%v%v.tar", strings.ReplaceAll(config.ScanImage, "/", "_"), strings.ReplaceAll(artifactVersion, ":", "_"))
tarFileName := filepath.Join(cachePath, fileName)
tarFile, err := os.Create(tarFileName)
if err != nil {
log.Entry().WithError(err).Fatal("Error during create tar for the docker image")
}
if err := os.Chmod(tarFileName, 0644); err != nil {
log.Entry().WithError(err).Fatal("Error during create tar for the docker image")
}
return tarFile, fileName
}
return nil, config.ScanImage
}
func executeProtecodeScan(client protecode.Protecode, config *protecodeExecuteScanOptions, fileName string, writeReportToFile func(resp io.ReadCloser, reportFileName string) error) map[string]int {
var parsedResult map[string]int = make(map[string]int)
//load existing product by filename
log.Entry().Debugf("Load existing product Group:%v Reuse:%v", config.Group, config.ReuseExisting)
productID := client.LoadExistingProduct(config.Group, config.ReuseExisting)
// check if no existing is found or reuse existing is false
productID = uploadScanOrDeclareFetch(*config, productID, client, fileName)
if productID <= 0 {
log.Entry().Fatalf("The product id is not valid (product id %v <= zero)", productID)
}
//pollForResult
log.Entry().Debugf("Poll for scan result %v", productID)
result := client.PollForResult(productID, config.TimeoutMinutes)
jsonData, _ := json.Marshal(result)
filePath := filepath.Join(reportPath, "protecodescan_vulns.json")
ioutil.WriteFile(filePath, jsonData, 0644)
//check if result is ok else notify
if len(result.Result.Status) > 0 && result.Result.Status == "F" {
log.Entry().Fatalf("Please check the log and protecode backend for more details. URL: %v/products/%v", config.ServerURL, productID)
}
//loadReport
log.Entry().Debugf("Load report %v for %v", config.ReportFileName, productID)
resp := client.LoadReport(config.ReportFileName, productID)
//save report to filesystem
err := writeReportToFile(*resp, config.ReportFileName)
if err != nil {
return parsedResult
}
//clean scan from server
log.Entry().Debugf("Delete scan %v for %v", config.CleanupMode, productID)
client.DeleteScan(config.CleanupMode, productID)
//count vulnerabilities
log.Entry().Debug("Parse scan reult")
parsedResult, vulns := client.ParseResultForInflux(result.Result, config.ExcludeCVEs)
log.Entry().Debug("Write report to filesystem")
writeReportDataToJSONFile(config, parsedResult, productID, vulns, ioutil.WriteFile)
return parsedResult
}
func setInfluxData(influx *protecodeExecuteScanInflux, result map[string]int) {
influx.protecodeData.fields.historicalVulnerabilities = fmt.Sprintf("%v", result["historical_vulnerabilities"])
influx.protecodeData.fields.triagedVulnerabilities = fmt.Sprintf("%v", result["triaged_vulnerabilities"])
influx.protecodeData.fields.excludedVulnerabilities = fmt.Sprintf("%v", result["excluded_vulnerabilities"])
influx.protecodeData.fields.minorVulnerabilities = fmt.Sprintf("%v", result["minor_vulnerabilities"])
influx.protecodeData.fields.majorVulnerabilities = fmt.Sprintf("%v", result["major_vulnerabilities"])
influx.protecodeData.fields.vulnerabilities = fmt.Sprintf("%v", result["vulnerabilities"])
}
func writeReportDataToJSONFile(config *protecodeExecuteScanOptions, result map[string]int, productID int, vulns []protecode.Vuln, writeToFile func(f string, d []byte, p os.FileMode) error) {
protecodeData := protecodeData{}
protecodeData.ServerURL = config.ServerURL
protecodeData.FailOnSevereVulnerabilities = config.FailOnSevereVulnerabilities
protecodeData.ExcludeCVEs = config.ExcludeCVEs
protecodeData.Target = config.ReportFileName
protecodeData.Mandatory = true
protecodeData.ProductID = fmt.Sprintf("%v", productID)
protecodeData.Count = fmt.Sprintf("%v", result["count"])
protecodeData.Cvss2GreaterOrEqualSeven = fmt.Sprintf("%v", result["cvss2GreaterOrEqualSeven"])
protecodeData.Cvss3GreaterOrEqualSeven = fmt.Sprintf("%v", result["cvss3GreaterOrEqualSeven"])
protecodeData.ExcludedVulnerabilities = fmt.Sprintf("%v", result["excluded_vulnerabilities"])
protecodeData.TriagedVulnerabilities = fmt.Sprintf("%v", result["triaged_vulnerabilities"])
protecodeData.HistoricalVulnerabilities = fmt.Sprintf("%v", result["historical_vulnerabilities"])
protecodeData.Vulnerabilities = vulns
jsonData, _ := json.Marshal(protecodeData)
log.Entry().Infof("Protecode scan info, %v of which %v had a CVSS v2 score >= 7.0 and %v had a CVSS v3 score >= 7.0.\n %v vulnerabilities were excluded via configuration (%v) and %v vulnerabilities were triaged via the webUI.\nIn addition %v historical vulnerabilities were spotted. \n\n Vulnerabilities: %v",
protecodeData.Count, protecodeData.Cvss2GreaterOrEqualSeven, protecodeData.Cvss3GreaterOrEqualSeven, protecodeData.ExcludedVulnerabilities, protecodeData.ExcludeCVEs, protecodeData.TriagedVulnerabilities, protecodeData.HistoricalVulnerabilities, protecodeData.Vulnerabilities)
filePath := filepath.Join(reportPath, "protecodeExecuteScan.json")
writeToFile(filePath, jsonData, 0644)
}
func createClient(config *protecodeExecuteScanOptions) protecode.Protecode {
var duration time.Duration = time.Duration(time.Minute * 1)
if len(config.TimeoutMinutes) > 0 {
dur, err := time.ParseDuration(fmt.Sprintf("%vm", config.TimeoutMinutes))
if err != nil {
log.Entry().Warnf("Failed to parse timeout %v, switched back to default timeout %v minutes", config.TimeoutMinutes, duration)
} else {
duration = dur
}
}
pc := protecode.Protecode{}
protecodeOptions := protecode.Options{
ServerURL: config.ServerURL,
Logger: log.Entry().WithField("package", "SAP/jenkins-library/pkg/protecode"),
Duration: duration,
Username: config.User,
Password: config.Password,
}
pc.SetOptions(protecodeOptions)
return pc
}
func createDockerClient(config *protecodeExecuteScanOptions) piperDocker.Download {
dClientOptions := piperDocker.ClientOptions{ImageName: config.ScanImage, RegistryURL: config.DockerRegistryURL, LocalPath: config.FilePath, IncludeLayers: config.IncludeLayers}
dClient := &piperDocker.Client{}
dClient.SetOptions(dClientOptions)
return dClient
}
func uploadScanOrDeclareFetch(config protecodeExecuteScanOptions, productID int, client protecode.Protecode, fileName string) int {
//check if the LoadExistingProduct) before returns an valid product id, than scip this
if !hasExisting(productID, config.ReuseExisting) {
if len(config.FetchURL) > 0 {
log.Entry().Debugf("Declare fetch url %v", config.FetchURL)
resultData := client.DeclareFetchURL(config.CleanupMode, config.Group, config.FetchURL)
productID = resultData.Result.ProductID
} else {
log.Entry().Debugf("Upload file path: %v", config.FilePath)
if len(config.FilePath) <= 0 {
log.Entry().Fatalf("There is no file path configured for upload : %v", config.FilePath)
}
pathToFile := filepath.Join(config.FilePath, fileName)
if !(fileExists(pathToFile)) {
log.Entry().Fatalf("There is no file for upload: %v", pathToFile)
}
combinedFileName := fileName
if len(config.PullRequestName) > 0 {
combinedFileName = fmt.Sprintf("%v_%v", config.PullRequestName, fileName)
}
resultData := client.UploadScanFile(config.CleanupMode, config.Group, pathToFile, combinedFileName)
productID = resultData.Result.ProductID
}
}
return productID
}
func fileExists(filename string) bool {
info, err := os.Stat(filename)
if os.IsNotExist(err) {
return false
}
return !info.IsDir()
}
func hasExisting(productID int, reuseExisting bool) bool {
if (productID > 0) || reuseExisting {
return true
}
return false
}
var writeReportToFile = func(resp io.ReadCloser, reportFileName string) error {
filePath := filepath.Join(reportPath, reportFileName)
f, err := os.Create(filePath)
if err == nil {
defer f.Close()
_, err = io.Copy(f, resp)
}
return err
}

View File

@ -0,0 +1,306 @@
package cmd
import (
"fmt"
"os"
"path/filepath"
"time"
"github.com/SAP/jenkins-library/pkg/config"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/SAP/jenkins-library/pkg/piperenv"
"github.com/SAP/jenkins-library/pkg/telemetry"
"github.com/spf13/cobra"
)
type protecodeExecuteScanOptions struct {
ExcludeCVEs string `json:"excludeCVEs,omitempty"`
FailOnSevereVulnerabilities bool `json:"failOnSevereVulnerabilities,omitempty"`
ScanImage string `json:"scanImage,omitempty"`
DockerRegistryURL string `json:"dockerRegistryUrl,omitempty"`
CleanupMode string `json:"cleanupMode,omitempty"`
FilePath string `json:"filePath,omitempty"`
IncludeLayers bool `json:"includeLayers,omitempty"`
AddSideBarLink bool `json:"addSideBarLink,omitempty"`
TimeoutMinutes string `json:"timeoutMinutes,omitempty"`
ServerURL string `json:"serverUrl,omitempty"`
ReportFileName string `json:"reportFileName,omitempty"`
FetchURL string `json:"fetchUrl,omitempty"`
Group string `json:"group,omitempty"`
ReuseExisting bool `json:"reuseExisting,omitempty"`
User string `json:"user,omitempty"`
Password string `json:"password,omitempty"`
ArtifactVersion string `json:"artifactVersion,omitempty"`
PullRequestName string `json:"pullRequestName,omitempty"`
}
type protecodeExecuteScanInflux struct {
protecodeData struct {
fields struct {
historicalVulnerabilities string
triagedVulnerabilities string
excludedVulnerabilities string
majorVulnerabilities string
minorVulnerabilities string
vulnerabilities string
}
tags struct {
}
}
}
func (i *protecodeExecuteScanInflux) persist(path, resourceName string) {
measurementContent := []struct {
measurement string
valType string
name string
value string
}{
{valType: config.InfluxField, measurement: "protecodeData", name: "historicalVulnerabilities", value: i.protecodeData.fields.historicalVulnerabilities},
{valType: config.InfluxField, measurement: "protecodeData", name: "triagedVulnerabilities", value: i.protecodeData.fields.triagedVulnerabilities},
{valType: config.InfluxField, measurement: "protecodeData", name: "excludedVulnerabilities", value: i.protecodeData.fields.excludedVulnerabilities},
{valType: config.InfluxField, measurement: "protecodeData", name: "majorVulnerabilities", value: i.protecodeData.fields.majorVulnerabilities},
{valType: config.InfluxField, measurement: "protecodeData", name: "minorVulnerabilities", value: i.protecodeData.fields.minorVulnerabilities},
{valType: config.InfluxField, measurement: "protecodeData", name: "vulnerabilities", value: i.protecodeData.fields.vulnerabilities},
}
errCount := 0
for _, metric := range measurementContent {
err := piperenv.SetResourceParameter(path, resourceName, filepath.Join(metric.measurement, fmt.Sprintf("%vs", metric.valType), metric.name), metric.value)
if err != nil {
log.Entry().WithError(err).Error("Error persisting influx environment.")
errCount++
}
}
if errCount > 0 {
os.Exit(1)
}
}
// ProtecodeExecuteScanCommand Protecode is an Open Source Vulnerability Scanner that is capable of scanning binaries. It can be used to scan docker images but is supports many other programming languages especially those of the C family. You can find more details on its capabilities in the [OS3 - Open Source Software Security JAM](https://jam4.sapjam.com/groups/XgeUs0CXItfeWyuI4k7lM3/overview_page/aoAsA0k4TbezGFyOkhsXFs). For getting access to Protecode please visit the [guide](https://go.sap.corp/protecode).
func ProtecodeExecuteScanCommand() *cobra.Command {
metadata := protecodeExecuteScanMetadata()
var stepConfig protecodeExecuteScanOptions
var startTime time.Time
var influx protecodeExecuteScanInflux
var createProtecodeExecuteScanCmd = &cobra.Command{
Use: "protecodeExecuteScan",
Short: "Protecode is an Open Source Vulnerability Scanner that is capable of scanning binaries. It can be used to scan docker images but is supports many other programming languages especially those of the C family. You can find more details on its capabilities in the [OS3 - Open Source Software Security JAM](https://jam4.sapjam.com/groups/XgeUs0CXItfeWyuI4k7lM3/overview_page/aoAsA0k4TbezGFyOkhsXFs). For getting access to Protecode please visit the [guide](https://go.sap.corp/protecode).",
Long: `Protecode is an Open Source Vulnerability Scanner that is capable of scanning binaries. It can be used to scan docker images but is supports many other programming languages especially those of the C family. You can find more details on its capabilities in the [OS3 - Open Source Software Security JAM](https://jam4.sapjam.com/groups/XgeUs0CXItfeWyuI4k7lM3/overview_page/aoAsA0k4TbezGFyOkhsXFs). For getting access to Protecode please visit the [guide](https://go.sap.corp/protecode).
!!! info "New: Using protecodeExecuteScan for Docker images on JaaS"
**This step now also works on "Jenkins as a Service (JaaS)"!**<br />
For the JaaS use case where the execution happens in a Kubernetes cluster without access to a Docker daemon [skopeo](https://github.com/containers/skopeo) is now used silently in the background to save a Docker image retrieved from a registry.
!!! hint "Auditing findings (Triaging)"
Triaging is now supported by the Protecode backend and also Piper does consider this information during the analysis of the scan results though product versions are not supported by Protecode. Therefore please make sure that the ` + "`" + `fileName` + "`" + ` you are providing does either contain a stable version or that it does not contain one at all. By ensuring that you are able to triage CVEs globally on the upload file's name without affecting any other artifacts scanned in the same Protecode group and as such triaged vulnerabilities will be considered during the next scan and will not fail the build anymore.`,
PreRunE: func(cmd *cobra.Command, args []string) error {
startTime = time.Now()
log.SetStepName("protecodeExecuteScan")
log.SetVerbose(GeneralConfig.Verbose)
return PrepareConfig(cmd, &metadata, "protecodeExecuteScan", &stepConfig, config.OpenPiperFile)
},
Run: func(cmd *cobra.Command, args []string) {
telemetryData := telemetry.CustomData{}
telemetryData.ErrorCode = "1"
handler := func() {
influx.persist(GeneralConfig.EnvRootPath, "influx")
telemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
telemetry.Send(&telemetryData)
}
log.DeferExitHandler(handler)
defer handler()
telemetry.Initialize(GeneralConfig.NoTelemetry, "protecodeExecuteScan")
protecodeExecuteScan(stepConfig, &telemetryData, &influx)
telemetryData.ErrorCode = "0"
},
}
addProtecodeExecuteScanFlags(createProtecodeExecuteScanCmd, &stepConfig)
return createProtecodeExecuteScanCmd
}
func addProtecodeExecuteScanFlags(cmd *cobra.Command, stepConfig *protecodeExecuteScanOptions) {
cmd.Flags().StringVar(&stepConfig.ExcludeCVEs, "excludeCVEs", "[]", "DEPRECATED: Do use triaging within the Protecode UI instead")
cmd.Flags().BoolVar(&stepConfig.FailOnSevereVulnerabilities, "failOnSevereVulnerabilities", true, "Whether to fail the job on severe vulnerabilties or not")
cmd.Flags().StringVar(&stepConfig.ScanImage, "scanImage", os.Getenv("PIPER_scanImage"), "The reference to the docker image to scan with Protecode")
cmd.Flags().StringVar(&stepConfig.DockerRegistryURL, "dockerRegistryUrl", os.Getenv("PIPER_dockerRegistryUrl"), "The reference to the docker registry to scan with Protecode")
cmd.Flags().StringVar(&stepConfig.CleanupMode, "cleanupMode", "binary", "Decides which parts are removed from the Protecode backend after the scan")
cmd.Flags().StringVar(&stepConfig.FilePath, "filePath", os.Getenv("PIPER_filePath"), "The path to the file from local workspace to scan with Protecode")
cmd.Flags().BoolVar(&stepConfig.IncludeLayers, "includeLayers", false, "Flag if the docker layers should be included")
cmd.Flags().BoolVar(&stepConfig.AddSideBarLink, "addSideBarLink", true, "Whether to create a side bar link pointing to the report produced by Protecode or not")
cmd.Flags().StringVar(&stepConfig.TimeoutMinutes, "timeoutMinutes", "60", "The timeout to wait for the scan to finish")
cmd.Flags().StringVar(&stepConfig.ServerURL, "serverUrl", os.Getenv("PIPER_serverUrl"), "The URL to the Protecode backend")
cmd.Flags().StringVar(&stepConfig.ReportFileName, "reportFileName", "protecode_report.pdf", "The file name of the report to be created")
cmd.Flags().StringVar(&stepConfig.FetchURL, "fetchUrl", os.Getenv("PIPER_fetchUrl"), "The URL to fetch the file to scan with Protecode which must be accessible via public HTTP GET request")
cmd.Flags().StringVar(&stepConfig.Group, "group", os.Getenv("PIPER_group"), "The Protecode group ID of your team")
cmd.Flags().BoolVar(&stepConfig.ReuseExisting, "reuseExisting", false, "Whether to reuse an existing product instead of creating a new one")
cmd.Flags().StringVar(&stepConfig.User, "user", os.Getenv("PIPER_user"), "User which is used for the protecode scan")
cmd.Flags().StringVar(&stepConfig.Password, "password", os.Getenv("PIPER_password"), "Password which is used for the user")
cmd.Flags().StringVar(&stepConfig.ArtifactVersion, "artifactVersion", os.Getenv("PIPER_artifactVersion"), "The version of the artifact to allow identification in protecode backend")
cmd.Flags().StringVar(&stepConfig.PullRequestName, "pullRequestName", os.Getenv("PIPER_pullRequestName"), "The name of the pull request")
cmd.MarkFlagRequired("serverUrl")
cmd.MarkFlagRequired("group")
cmd.MarkFlagRequired("user")
cmd.MarkFlagRequired("password")
}
// retrieve step metadata
func protecodeExecuteScanMetadata() config.StepData {
var theMetaData = config.StepData{
Spec: config.StepSpec{
Inputs: config.StepInputs{
Parameters: []config.StepParameters{
{
Name: "excludeCVEs",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{{Name: "protecodeExcludeCVEs"}},
},
{
Name: "failOnSevereVulnerabilities",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "bool",
Mandatory: false,
Aliases: []config.Alias{{Name: "protecodeFailOnSevereVulnerabilities"}},
},
{
Name: "scanImage",
ResourceRef: []config.ResourceReference{{Name: "commonPipelineEnvironment", Param: "container/imageNameTag"}},
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{{Name: "dockerImage"}},
},
{
Name: "dockerRegistryUrl",
ResourceRef: []config.ResourceReference{{Name: "commonPipelineEnvironment", Param: "container/registryUrl"}},
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "cleanupMode",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "filePath",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "includeLayers",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "bool",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "addSideBarLink",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "bool",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "timeoutMinutes",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{{Name: "protecodeTimeoutMinutes"}},
},
{
Name: "serverUrl",
ResourceRef: []config.ResourceReference{},
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: true,
Aliases: []config.Alias{{Name: "protecodeServerUrl"}},
},
{
Name: "reportFileName",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "fetchUrl",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "group",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: true,
Aliases: []config.Alias{{Name: "protecodeGroup"}},
},
{
Name: "reuseExisting",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "bool",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "user",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: true,
Aliases: []config.Alias{},
},
{
Name: "password",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: true,
Aliases: []config.Alias{},
},
{
Name: "artifactVersion",
ResourceRef: []config.ResourceReference{{Name: "commonPipelineEnvironment", Param: "artifactVersion"}},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
{
Name: "pullRequestName",
ResourceRef: []config.ResourceReference{},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: false,
Aliases: []config.Alias{},
},
},
},
},
}
return theMetaData
}

View File

@ -0,0 +1,16 @@
package cmd
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestProtecodeExecuteScanCommand(t *testing.T) {
testCmd := ProtecodeExecuteScanCommand()
// only high level testing performed - details are tested in step generation procudure
assert.Equal(t, "protecodeExecuteScan", testCmd.Use, "command name incorrect")
}

View File

@ -0,0 +1,377 @@
package cmd
import (
"testing"
"bytes"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/http/httptest"
"net/url"
"os"
"path/filepath"
"strings"
"time"
pkgutil "github.com/GoogleContainerTools/container-diff/pkg/util"
"github.com/SAP/jenkins-library/pkg/protecode"
"github.com/stretchr/testify/assert"
)
type DockerClientMock struct {
imageName string
registryURL string
localPath string
includeLayers bool
}
//Download interface for download an image to a local path
type Download interface {
GetImageSource() (string, error)
DownloadImageToPath(imageSource, filePath string) (pkgutil.Image, error)
TarImage(writer io.Writer, image pkgutil.Image) error
}
const (
daemonPrefix = "daemon://"
remotePrefix = "remote://"
)
func (c *DockerClientMock) GetImageSource() (string, error) {
imageSource := c.imageName
if len(c.registryURL) > 0 && len(c.localPath) <= 0 {
registry := c.registryURL
url, _ := url.Parse(c.registryURL)
//remove protocoll from registryURL to get registry
if len(url.Scheme) > 0 {
registry = strings.Replace(c.registryURL, fmt.Sprintf("%v://", url.Scheme), "", 1)
}
if strings.HasSuffix(registry, "/") {
imageSource = fmt.Sprintf("%v%v%v", remotePrefix, registry, c.imageName)
} else {
imageSource = fmt.Sprintf("%v%v/%v", remotePrefix, registry, c.imageName)
}
} else if len(c.localPath) > 0 {
imageSource = c.localPath
if !pkgutil.IsTar(c.localPath) {
imageSource = fmt.Sprintf("%v%v", daemonPrefix, c.localPath)
}
}
if len(imageSource) <= 0 {
return imageSource, fmt.Errorf("There is no image source for the parameters: (Name: %v, Registry: %v, local Path: %v)", c.imageName, c.registryURL, c.localPath)
}
return imageSource, nil
}
//DownloadImageToPath download the image to the specified path
func (c *DockerClientMock) DownloadImageToPath(imageSource, filePath string) (pkgutil.Image, error) {
return pkgutil.Image{}, nil
}
//TarImage write a tar from the given image
func (c *DockerClientMock) TarImage(writer io.Writer, image pkgutil.Image) error {
return nil
}
func TestRunProtecodeScan(t *testing.T) {
requestURI := ""
dir, err := ioutil.TempDir("", "t")
if err != nil {
t.Fatal("Failed to create temporary directory")
}
// clean up tmp dir
defer os.RemoveAll(dir)
testFile, err := ioutil.TempFile(dir, "t.tar")
if err != nil {
t.FailNow()
}
fileName := filepath.Base(testFile.Name())
path := strings.ReplaceAll(testFile.Name(), fileName, "")
server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
requestURI = req.RequestURI
var b bytes.Buffer
if requestURI == "/api/product/4486/" || requestURI == "/api/product/4711/" {
violations := filepath.Join("testdata/TestProtecode", "protecode_result_violations.json")
byteContent, err := ioutil.ReadFile(violations)
if err != nil {
t.Fatalf("failed reading %v", violations)
}
response := protecode.ResultData{Result: protecode.Result{ProductID: 4711, ReportURL: requestURI}}
err = json.Unmarshal(byteContent, &response)
json.NewEncoder(&b).Encode(response)
} else if requestURI == "/api/fetch/" {
violations := filepath.Join("testdata/TestProtecode", "protecode_result_violations.json")
byteContent, err := ioutil.ReadFile(violations)
if err != nil {
t.Fatalf("failed reading %v", violations)
}
response := protecode.ResultData{Result: protecode.Result{ProductID: 4486, ReportURL: requestURI}}
err = json.Unmarshal(byteContent, &response)
json.NewEncoder(&b).Encode(response)
} else if requestURI == "/api/product/4486/pdf-report" {
} else if requestURI == "/api/upload/t.tar" {
response := protecode.ResultData{Result: protecode.Result{ProductID: 4486, ReportURL: requestURI}}
var b bytes.Buffer
json.NewEncoder(&b).Encode(&response)
rw.Write([]byte(b.Bytes()))
} else {
response := protecode.Result{ProductID: 4486, ReportURL: requestURI}
json.NewEncoder(&b).Encode(&response)
}
rw.Write([]byte(b.Bytes()))
}))
// Close the server when test finishes
defer server.Close()
po := protecode.Options{ServerURL: server.URL}
pc := protecode.Protecode{}
pc.SetOptions(po)
dClient := &DockerClientMock{imageName: "t", registryURL: "", localPath: path, includeLayers: false}
influx := protecodeExecuteScanInflux{}
reportPath = dir
cachePath = dir
t.Run("With tar as scan image", func(t *testing.T) {
config := protecodeExecuteScanOptions{ServerURL: server.URL, TimeoutMinutes: "1", ReuseExisting: false, CleanupMode: "none", Group: "13", FetchURL: "/api/fetch/", ExcludeCVEs: "CVE-2018-1, CVE-2017-1000382", ReportFileName: "./cache/report-file.txt"}
err = runProtecodeScan(&config, &influx, dClient)
assert.Nil(t, err, "There should be no Error")
})
t.Run("Without tar as scan image", func(t *testing.T) {
config := protecodeExecuteScanOptions{ServerURL: server.URL, ScanImage: "t", FilePath: path, TimeoutMinutes: "1", ReuseExisting: false, CleanupMode: "none", Group: "13", ExcludeCVEs: "CVE-2018-1, CVE-2017-1000382", ReportFileName: "./cache/report-file.txt"}
err = runProtecodeScan(&config, &influx, dClient)
assert.Nil(t, err, "There should be no Error")
})
}
func TestHandleArtifactVersion(t *testing.T) {
cases := []struct {
version string
want string
}{
{"1.0.0-20200131085038+eeb7c1033339bfd404d21ec5e7dc05c80e9e985e", "1"},
{"2.20.20-20200131085038+eeb7c1033339bfd404d21ec5e7dc05c80e9e985e", "2"},
{"3.20.20-20200131085038+eeb7c1033339bfd404d21ec5e7dc05c80e9e985e", "3"},
{"4.20.20-20200131085038", "4"},
{"5.20.20-20200131085038+", "5"},
{"6.00", "6.00"},
{"7.20.20", "7.20.20"},
}
for _, c := range cases {
got := handleArtifactVersion(c.version)
assert.Equal(t, c.want, got)
}
}
func TestCreateClient(t *testing.T) {
cases := []struct {
timeout string
}{
{""},
{"1"},
}
for _, c := range cases {
config := protecodeExecuteScanOptions{TimeoutMinutes: c.timeout}
client := createClient(&config)
assert.NotNil(t, client, "client should not be empty")
}
}
func TestCreateDockerClient(t *testing.T) {
cases := []struct {
scanImage string
dockerRegistryURL string
filePath string
includeLayers bool
}{
{"test", "url", "path", false},
{"", "", "", true},
}
for _, c := range cases {
config := protecodeExecuteScanOptions{ScanImage: c.scanImage, DockerRegistryURL: c.dockerRegistryURL, FilePath: c.filePath, IncludeLayers: c.includeLayers}
client := createDockerClient(&config)
assert.NotNil(t, client, "client should not be empty")
}
}
var fileContent string
func writeToFileMock(f string, d []byte, p os.FileMode) error {
fileContent = string(d)
return nil
}
func TestWriteReportDataToJSONFile(t *testing.T) {
expected := "{\"target\":\"REPORTFILENAME\",\"mandatory\":true,\"productID\":\"4711\",\"serverUrl\":\"DUMMYURL\",\"count\":\"0\",\"cvss2GreaterOrEqualSeven\":\"4\",\"cvss3GreaterOrEqualSeven\":\"3\",\"excludedVulnerabilities\":\"2\",\"triagedVulnerabilities\":\"0\",\"historicalVulnerabilities\":\"1\",\"Vulnerabilities\":[{\"cve\":\"Vulnerability\",\"cvss\":2.5,\"cvss3_score\":\"5.5\"}]}"
var parsedResult map[string]int = make(map[string]int)
parsedResult["historical_vulnerabilities"] = 1
parsedResult["excluded_vulnerabilities"] = 2
parsedResult["cvss3GreaterOrEqualSeven"] = 3
parsedResult["cvss2GreaterOrEqualSeven"] = 4
parsedResult["vulnerabilities"] = 5
config := protecodeExecuteScanOptions{ServerURL: "DUMMYURL", ReportFileName: "REPORTFILENAME"}
writeReportDataToJSONFile(&config, parsedResult, 4711, []protecode.Vuln{{"Vulnerability", 2.5, "5.5"}}, writeToFileMock)
assert.Equal(t, fileContent, expected, "content should be not empty")
}
func TestUploadScanOrDeclareFetch(t *testing.T) {
testFile, err := ioutil.TempFile("", "testFileUpload")
if err != nil {
t.FailNow()
}
defer os.RemoveAll(testFile.Name()) // clean up
fileName := filepath.Base(testFile.Name())
path := strings.ReplaceAll(testFile.Name(), fileName, "")
requestURI := ""
server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
requestURI = req.RequestURI
if requestURI == "/api/fetch/" {
response := protecode.ResultData{Result: protecode.Result{ProductID: 4711, ReportURL: requestURI}}
var b bytes.Buffer
json.NewEncoder(&b).Encode(&response)
rw.Write([]byte(b.Bytes()))
}
if requestURI == fmt.Sprintf("/api/upload/%v", fileName) || requestURI == fmt.Sprintf("/api/upload/PR_4711_%v", fileName) {
response := protecode.ResultData{Result: protecode.Result{ProductID: 4711, ReportURL: requestURI}}
var b bytes.Buffer
json.NewEncoder(&b).Encode(&response)
rw.Write([]byte(b.Bytes()))
}
}))
// Close the server when test finishes
defer server.Close()
po := protecode.Options{ServerURL: server.URL}
pc := protecode.Protecode{}
pc.SetOptions(po)
cases := []struct {
reuse bool
clean string
group string
fetchURL string
filePath string
prName string
want int
}{
{false, "test", "group1", "/api/fetch/", "", "", 4711},
{false, "test", "group1", "", path, "", 4711},
{false, "test", "group1", "", path, "PR_4711", 4711},
}
for _, c := range cases {
config := protecodeExecuteScanOptions{ReuseExisting: c.reuse, CleanupMode: c.clean, Group: c.group, FetchURL: c.fetchURL, FilePath: c.filePath}
got := uploadScanOrDeclareFetch(config, 0, pc, fileName)
assert.Equal(t, c.want, got)
}
}
func writeReportToFileMock(resp io.ReadCloser, reportFileName string) error {
return nil
}
func TestExecuteProtecodeScan(t *testing.T) {
requestURI := ""
server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
requestURI = req.RequestURI
var b bytes.Buffer
if requestURI == "/api/product/4711/" {
violations := filepath.Join("testdata/TestProtecode", "protecode_result_violations.json")
byteContent, err := ioutil.ReadFile(violations)
if err != nil {
t.Fatalf("failed reading %v", violations)
}
response := protecode.ResultData{}
err = json.Unmarshal(byteContent, &response)
json.NewEncoder(&b).Encode(response)
} else if requestURI == "/api/product/4711/pdf-report" {
} else {
response := protecode.ResultData{Result: protecode.Result{ProductID: 4711, ReportURL: requestURI}}
json.NewEncoder(&b).Encode(&response)
}
rw.Write([]byte(b.Bytes()))
}))
// Close the server when test finishes
defer server.Close()
po := protecode.Options{ServerURL: server.URL, Duration: time.Minute * 3}
pc := protecode.Protecode{}
pc.SetOptions(po)
cases := []struct {
reuse bool
clean string
group string
fetchURL string
want int
}{
{false, "binary", "group1", "/api/fetch/", 4711},
}
for _, c := range cases {
dir, err := ioutil.TempDir("", "t")
if err != nil {
t.Fatal("Failed to create temporary directory")
}
// clean up tmp dir
defer os.RemoveAll(dir)
reportPath = dir
config := protecodeExecuteScanOptions{ReuseExisting: c.reuse, CleanupMode: c.clean, Group: c.group, FetchURL: c.fetchURL, TimeoutMinutes: "3", ExcludeCVEs: "CVE-2018-1, CVE-2017-1000382", ReportFileName: "./cache/report-file.txt"}
got := executeProtecodeScan(pc, &config, "dummy", writeReportToFileMock)
assert.Equal(t, 1125, got["historical_vulnerabilities"])
assert.Equal(t, 0, got["triaged_vulnerabilities"])
assert.Equal(t, 1, got["excluded_vulnerabilities"])
assert.Equal(t, 129, got["cvss3GreaterOrEqualSeven"])
assert.Equal(t, 13, got["cvss2GreaterOrEqualSeven"])
assert.Equal(t, 226, got["vulnerabilities"])
}
}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,46 @@
# ${docGenStepName}
## ${docGenDescription}
## Prerequisites
1. Request creation of a team for your development group as described [here](http://go.sap.corp/protecode) and in addition request creation of a technical Protecode user through OS3 team
2. Create a Username / Password credential with the Protecode technical user in your Jenkins credential store
3. Supply the credential ID either via config.yml or on the step via parameter `protecodeCredentialsId`
4. Supply the **group ID** of the Protecode group via parameter `protecodeGroup`. You can either inquire this value from OS3 upon creation of the group or look it up yourself via REST API using `curl -u <place your user here> "https://protecode.mo.sap.corp/api/groups/"`.
## Example
Usage of pipeline step:
Workspace based:
```groovy
executeProtecodeScan script: this, filePath: 'dockerImage.tar'
```
Fetch URL:
```groovy
executeProtecodeScan script: this, fetchUrl: 'https://nexusrel.wdf.sap.corp:8443/nexus/service/local/repositories/build.releases.3rd-party.proxy.2018.04.13/content/org/alfresco/surf/spring-cmis-framework/6.11/spring-cmis-framework-6.11.jar'
```
Docker image:
```groovy
executeProtecodeScan script: this, dockerImage: 'docker.wdf.sap.corp:50000/piper/yeoman:1.0-20180321110554'
```
## ${docGenParameters}
### Details:
* The Protecode scan step is able to send a file addressed via parameter `filePath` to the backend for scanning it for known vulnerabilities.
* Alternatively an HTTP URL can be specified via `fetchUrl`. Protecode will then download the artifact from there and scan it.
* To support docker image scanning please provide `dockerImage` with a docker like URL poiting to the image tag within the docker registry being used. Our step uses [skopeo](https://github.com/containers/skopeo) to download the image and sends it to Protecode for scanning.
* To receive the result it polls until the job completes.
* Once the job has completed a PDF report is pulled from the backend and archived in the build
* Finally the scan result is being analysed for critical findings with a CVSS v3 score >= 7.0 and if such findings are detected the build is failed based on the configuration setting `protecodeFailOnSevereVulnerabilities`.
* During the analysis all CVEs which are either triaged in the Protecode backend or which are excluded via configuration parameter `protecodeExcludeCVEs` are ignored and will not provoke the build to fail.
### FAQs:
* In case of `dockerImage` and the step still tries to pull and save it via docker daemon, please make sure your JaaS environment has the variable `ON_K8S` declared and set to `true`.
## ${docGenConfiguration}

View File

@ -127,7 +127,12 @@ func (c *Client) SetOptions(options ClientOptions) {
c.username = options.Username
c.password = options.Password
c.token = options.Token
c.logger = options.Logger
if options.Logger != nil {
c.logger = options.Logger
} else {
c.logger = log.Entry().WithField("package", "SAP/jenkins-library/pkg/http")
}
c.cookieJar = options.CookieJar
}

427
pkg/protecode/protecode.go Normal file
View File

@ -0,0 +1,427 @@
package protecode
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"strconv"
"strings"
"time"
piperHttp "github.com/SAP/jenkins-library/pkg/http"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/sirupsen/logrus"
)
// ProductData holds the product information of the protecode product
type ProductData struct {
Products []Product `json:"products,omitempty"`
}
// Product holds the id of the protecode product
type Product struct {
ProductID int `json:"product_id,omitempty"`
}
//ResultData holds the information about the protecode result
type ResultData struct {
Result Result `json:"results,omitempty"`
}
//Result holds the detail information about the protecode result
type Result struct {
ProductID int `json:"product_id,omitempty"`
ReportURL string `json:"report_url,omitempty"`
Status string `json:"status,omitempty"`
Components []Component `json:"components,omitempty"`
}
//Component the protecode component information
type Component struct {
Vulns []Vulnerability `json:"vulns,omitempty"`
}
//Vulnerability the protecode vulnerability information
type Vulnerability struct {
Exact bool `json:"exact,omitempty"`
Vuln Vuln `json:"vuln,omitempty"`
Triage []Triage `json:"triage,omitempty"`
}
//Vuln holds the inforamtion about the vulnerability
type Vuln struct {
Cve string `json:"cve,omitempty"`
Cvss float64 `json:"cvss,omitempty"`
Cvss3Score string `json:"cvss3_score,omitempty"`
}
//Triage holds the triaging information
type Triage struct {
ID int `json:"id,omitempty"`
VulnID string `json:"vuln_id,omitempty"`
Component string `json:"component,omitempty"`
Vendor string `json:"vendor,omitempty"`
Codetype string `json:"codetype,omitempty"`
Version string `json:"version,omitempty"`
Modified string `json:"modified,omitempty"`
Scope string `json:"scope,omitempty"`
Description string `json:"description,omitempty"`
User User `json:"user,omitempty"`
}
//User holds the user information
type User struct {
ID int `json:"id,omitempty"`
Email string `json:"email,omitempty"`
Girstname string `json:"firstname,omitempty"`
Lastname string `json:"lastname,omitempty"`
Username string `json:"username,omitempty"`
}
//Protecode ist the protecode client which is used by the step
type Protecode struct {
serverURL string
client piperHttp.Uploader
duration time.Duration
logger *logrus.Entry
}
//Options struct which can be used to configure the Protecode struct
type Options struct {
ServerURL string
Duration time.Duration
Username string
Password string
Logger *logrus.Entry
}
//SetOptions setter function to set the internal properties of the protecode
func (pc *Protecode) SetOptions(options Options) {
pc.serverURL = options.ServerURL
pc.client = &piperHttp.Client{}
pc.duration = options.Duration
if options.Logger != nil {
pc.logger = options.Logger
} else {
pc.logger = log.Entry().WithField("package", "SAP/jenkins-library/pkg/protecode")
}
httpOptions := piperHttp.ClientOptions{Timeout: options.Duration, Username: options.Username, Password: options.Password, Logger: options.Logger}
pc.client.SetOptions(httpOptions)
}
func (pc *Protecode) createURL(path string, pValue string, fParam string) string {
protecodeURL, err := url.Parse(pc.serverURL)
if err != nil {
pc.logger.WithError(err).Fatal("Malformed URL")
}
if len(path) > 0 {
protecodeURL.Path += fmt.Sprintf("%v", path)
}
if len(pValue) > 0 {
protecodeURL.Path += fmt.Sprintf("%v", pValue)
}
// Prepare Query Parameters
if len(fParam) > 0 {
encodedFParam := url.QueryEscape(fParam)
params := url.Values{}
params.Add("q", fmt.Sprintf("file:%v", encodedFParam))
// Add Query Parameters to the URL
protecodeURL.RawQuery = params.Encode() // Escape Query Parameters
}
return protecodeURL.String()
}
func (pc *Protecode) mapResponse(r io.ReadCloser, response interface{}) {
defer r.Close()
buf := new(bytes.Buffer)
buf.ReadFrom(r)
newStr := buf.String()
if len(newStr) > 0 {
unquoted, err := strconv.Unquote(newStr)
if err != nil {
err = json.Unmarshal([]byte(newStr), response)
if err != nil {
pc.logger.WithError(err).Fatalf("Error during unqote response: %v", newStr)
}
} else {
err = json.Unmarshal([]byte(unquoted), response)
}
if err != nil {
pc.logger.WithError(err).Fatalf("Error during decode response: %v", newStr)
}
}
}
func (pc *Protecode) sendAPIRequest(method string, url string, headers map[string][]string) (*io.ReadCloser, error) {
r, err := pc.client.SendRequest(method, url, nil, headers, nil)
return &r.Body, err
}
// ParseResultForInflux parses the result from the scan into the internal format
func (pc *Protecode) ParseResultForInflux(result Result, excludeCVEs string) (map[string]int, []Vuln) {
var vulns []Vuln
var m map[string]int = make(map[string]int)
m["count"] = 0
m["cvss2GreaterOrEqualSeven"] = 0
m["cvss3GreaterOrEqualSeven"] = 0
m["historical_vulnerabilities"] = 0
m["triaged_vulnerabilities"] = 0
m["excluded_vulnerabilities"] = 0
m["minor_vulnerabilities"] = 0
m["major_vulnerabilities"] = 0
m["vulnerabilities"] = 0
for _, components := range result.Components {
for _, vulnerability := range components.Vulns {
exact := isExact(vulnerability)
countVulnerability := isExact(vulnerability) && !isExcluded(vulnerability, excludeCVEs) && !isTriaged(vulnerability)
if exact && isExcluded(vulnerability, excludeCVEs) {
m["excluded_vulnerabilities"]++
}
if exact && isTriaged(vulnerability) {
m["triaged_vulnerabilities"]++
}
if countVulnerability {
m["count"]++
m["vulnerabilities"]++
//collect all vulns here
vulns = append(vulns, vulnerability.Vuln)
}
if countVulnerability && isSevereCVSS3(vulnerability) {
m["cvss3GreaterOrEqualSeven"]++
m["major_vulnerabilities"]++
}
if countVulnerability && isSevereCVSS2(vulnerability) {
m["cvss2GreaterOrEqualSeven"]++
m["major_vulnerabilities"]++
}
if countVulnerability && !isSevereCVSS3(vulnerability) && !isSevereCVSS2(vulnerability) {
m["minor_vulnerabilities"]++
}
if !exact {
m["historical_vulnerabilities"]++
}
}
}
return m, vulns
}
func isExact(vulnerability Vulnerability) bool {
return vulnerability.Exact
}
func isExcluded(vulnerability Vulnerability, excludeCVEs string) bool {
return strings.Contains(excludeCVEs, vulnerability.Vuln.Cve)
}
func isTriaged(vulnerability Vulnerability) bool {
return len(vulnerability.Triage) > 0
}
func isSevereCVSS3(vulnerability Vulnerability) bool {
threshold := 7.0
cvss3, _ := strconv.ParseFloat(vulnerability.Vuln.Cvss3Score, 64)
return cvss3 >= threshold
}
func isSevereCVSS2(vulnerability Vulnerability) bool {
threshold := 7.0
cvss3, _ := strconv.ParseFloat(vulnerability.Vuln.Cvss3Score, 64)
return cvss3 == 0 && vulnerability.Vuln.Cvss >= threshold
}
// DeleteScan deletes if configured the scan on the protecode server
func (pc *Protecode) DeleteScan(cleanupMode string, productID int) {
switch cleanupMode {
case "none":
case "binary":
return
case "complete":
pc.logger.Info("Deleting scan from server.")
protecodeURL := pc.createURL("/api/product/", fmt.Sprintf("%v/", productID), "")
headers := map[string][]string{}
pc.sendAPIRequest("DELETE", protecodeURL, headers)
break
default:
pc.logger.Fatalf("Unknown cleanup mode %v", cleanupMode)
}
}
// LoadReport loads the report of the protecode scan
func (pc *Protecode) LoadReport(reportFileName string, productID int) *io.ReadCloser {
protecodeURL := pc.createURL("/api/product/", fmt.Sprintf("%v/pdf-report", productID), "")
headers := map[string][]string{
"Cache-Control": []string{"no-cache, no-store, must-revalidate"},
"Pragma": []string{"no-cache"},
"Outputfile": []string{reportFileName},
}
readCloser, err := pc.sendAPIRequest(http.MethodGet, protecodeURL, headers)
if err != nil {
pc.logger.WithError(err).Fatalf("It is not possible to load report %v", protecodeURL)
}
return readCloser
}
// UploadScanFile upload the scan file to the protecode server
func (pc *Protecode) UploadScanFile(cleanupMode, group, filePath, fileName string) *ResultData {
deleteBinary := (cleanupMode == "binary" || cleanupMode == "complete")
headers := map[string][]string{"Group": []string{group}, "Delete-Binary": []string{fmt.Sprintf("%v", deleteBinary)}}
uploadURL := fmt.Sprintf("%v/api/upload/%v", pc.serverURL, fileName)
r, err := pc.client.UploadRequest(http.MethodPut, uploadURL, filePath, "file", headers, nil)
if err != nil {
pc.logger.WithError(err).Fatalf("Error during %v upload request", uploadURL)
} else {
pc.logger.Info("Upload successful")
}
result := new(ResultData)
pc.mapResponse(r.Body, result)
return result
}
// DeclareFetchURL configures the fetch url for the protecode scan
func (pc *Protecode) DeclareFetchURL(cleanupMode, group, fetchURL string) *ResultData {
deleteBinary := (cleanupMode == "binary" || cleanupMode == "complete")
headers := map[string][]string{"Group": []string{group}, "Delete-Binary": []string{fmt.Sprintf("%v", deleteBinary)}, "Url": []string{fetchURL}, "Content-Type": []string{"application/json"}}
protecodeURL := fmt.Sprintf("%v/api/fetch/", pc.serverURL)
r, err := pc.sendAPIRequest(http.MethodPost, protecodeURL, headers)
if err != nil {
pc.logger.WithError(err).Fatalf("Error during declare fetch url: %v", protecodeURL)
}
result := new(ResultData)
pc.mapResponse(*r, result)
return result
}
//PollForResult polls the protecode scan for the result scan
func (pc *Protecode) PollForResult(productID int, timeOutInMinutes string) ResultData {
var response ResultData
var err error
ticker := time.NewTicker(10 * time.Second)
defer ticker.Stop()
var ticks int64 = 6
if len(timeOutInMinutes) > 0 {
parsedTimeOutInMinutes, _ := strconv.ParseInt(timeOutInMinutes, 10, 64)
ticks = parsedTimeOutInMinutes * 6
}
pc.logger.Infof("Poll for result %v times", ticks)
for i := ticks; i > 0; i-- {
response, err = pc.pullResult(productID)
if err != nil {
ticker.Stop()
i = 0
return response
}
if len(response.Result.Components) > 0 && response.Result.Status != "B" {
ticker.Stop()
i = 0
break
}
select {
case t := <-ticker.C:
pc.logger.Debugf("Tick : %v Processing status for productID %v", t, productID)
}
}
if len(response.Result.Components) == 0 || response.Result.Status == "B" {
response, err = pc.pullResult(productID)
if err != nil || len(response.Result.Components) == 0 || response.Result.Status == "B" {
pc.logger.Fatal("No result after polling")
}
}
return response
}
func (pc *Protecode) pullResult(productID int) (ResultData, error) {
protecodeURL := pc.createURL("/api/product/", fmt.Sprintf("%v/", productID), "")
headers := map[string][]string{
"acceptType": []string{"application/json"},
}
r, err := pc.sendAPIRequest(http.MethodGet, protecodeURL, headers)
if err != nil {
return *new(ResultData), err
}
result := new(ResultData)
pc.mapResponse(*r, result)
return *result, nil
}
// LoadExistingProduct loads the existing product from protecode service
func (pc *Protecode) LoadExistingProduct(group string, reuseExisting bool) int {
var productID int = -1
if reuseExisting {
protecodeURL := pc.createURL("/api/apps/", fmt.Sprintf("%v/", group), "")
headers := map[string][]string{
"acceptType": []string{"application/json"},
}
response := pc.loadExisting(protecodeURL, headers)
// by definition we will take the first one and trigger rescan
productID = response.Products[0].ProductID
pc.logger.Infof("Re-use existing Protecode scan - group: %v, productID: %v", group, productID)
}
return productID
}
func (pc *Protecode) loadExisting(protecodeURL string, headers map[string][]string) *ProductData {
r, err := pc.sendAPIRequest(http.MethodGet, protecodeURL, headers)
if err != nil {
pc.logger.WithError(err).Fatalf("Error during load existing product: %v", protecodeURL)
}
result := new(ProductData)
pc.mapResponse(*r, result)
return result
}

View File

@ -0,0 +1,490 @@
package protecode
import (
"testing"
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"mime/multipart"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"strings"
"time"
piperHttp "github.com/SAP/jenkins-library/pkg/http"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/stretchr/testify/assert"
)
func TestMapResponse(t *testing.T) {
cases := []struct {
give string
input interface{}
want interface{}
}{
{`"{}"`, new(Result), &Result{ProductID: 0}},
{`{"product_id": 1}`, new(Result), &Result{ProductID: 1}},
{`"{\"product_id\": 4711}"`, new(Result), &Result{ProductID: 4711}},
{"{\"results\": {\"product_id\": 1}}", new(ResultData), &ResultData{Result: Result{ProductID: 1}}},
{`{"results": {"status": "B", "id": 209396, "product_id": 209396, "report_url": "https://protecode.c.eu-de-2.cloud.sap/products/209396/"}}`, new(ResultData), &ResultData{Result: Result{ProductID: 209396, Status: "B", ReportURL: "https://protecode.c.eu-de-2.cloud.sap/products/209396/"}}},
{`{"products": [{"product_id": 1}]}`, new(ProductData), &ProductData{Products: []Product{{ProductID: 1}}}},
}
pc := Protecode{}
for _, c := range cases {
r := ioutil.NopCloser(bytes.NewReader([]byte(c.give)))
pc.mapResponse(r, c.input)
assert.Equal(t, c.want, c.input)
}
}
func TestParseResultSuccess(t *testing.T) {
var result Result = Result{
ProductID: 4712,
ReportURL: "ReportUrl",
Status: "B",
Components: []Component{
{Vulns: []Vulnerability{
{Exact: true, Triage: []Triage{}, Vuln: Vuln{Cve: "Cve1", Cvss: 7.2, Cvss3Score: "0.0"}},
{Exact: true, Triage: []Triage{{ID: 1}}, Vuln: Vuln{Cve: "Cve2", Cvss: 2.2, Cvss3Score: "2.3"}},
{Exact: true, Triage: []Triage{}, Vuln: Vuln{Cve: "Cve2b", Cvss: 0.0, Cvss3Score: "0.0"}},
},
},
{Vulns: []Vulnerability{
{Exact: true, Triage: []Triage{}, Vuln: Vuln{Cve: "Cve3", Cvss: 3.2, Cvss3Score: "7.3"}},
{Exact: true, Triage: []Triage{}, Vuln: Vuln{Cve: "Cve4", Cvss: 8.0, Cvss3Score: "8.0"}},
{Exact: false, Triage: []Triage{}, Vuln: Vuln{Cve: "Cve4b", Cvss: 8.0, Cvss3Score: "8.0"}},
},
},
},
}
pc := Protecode{}
m, vulns := pc.ParseResultForInflux(result, "Excluded CVES: Cve4,")
t.Run("Parse Protecode Results", func(t *testing.T) {
assert.Equal(t, 1, m["historical_vulnerabilities"])
assert.Equal(t, 1, m["triaged_vulnerabilities"])
assert.Equal(t, 1, m["excluded_vulnerabilities"])
assert.Equal(t, 1, m["minor_vulnerabilities"])
assert.Equal(t, 2, m["major_vulnerabilities"])
assert.Equal(t, 3, m["vulnerabilities"])
assert.Equal(t, 3, len(vulns))
})
}
func TestParseResultViolations(t *testing.T) {
violations := filepath.Join("testdata", "protecode_result_violations.json")
byteContent, err := ioutil.ReadFile(violations)
if err != nil {
t.Fatalf("failed reading %v", violations)
}
pc := Protecode{}
resultData := new(ResultData)
pc.mapResponse(ioutil.NopCloser(strings.NewReader(string(byteContent))), resultData)
m, vulns := pc.ParseResultForInflux(resultData.Result, "CVE-2018-1, CVE-2017-1000382")
t.Run("Parse Protecode Results", func(t *testing.T) {
assert.Equal(t, 1125, m["historical_vulnerabilities"])
assert.Equal(t, 0, m["triaged_vulnerabilities"])
assert.Equal(t, 1, m["excluded_vulnerabilities"])
assert.Equal(t, 129, m["cvss3GreaterOrEqualSeven"])
assert.Equal(t, 13, m["cvss2GreaterOrEqualSeven"])
assert.Equal(t, 226, m["vulnerabilities"])
assert.Equal(t, 226, len(vulns))
})
}
func TestParseResultNoViolations(t *testing.T) {
noViolations := filepath.Join("testdata", "protecode_result_no_violations.json")
byteContent, err := ioutil.ReadFile(noViolations)
if err != nil {
t.Fatalf("failed reading %v", noViolations)
}
pc := Protecode{}
resultData := new(ResultData)
pc.mapResponse(ioutil.NopCloser(strings.NewReader(string(byteContent))), resultData)
m, vulns := pc.ParseResultForInflux(resultData.Result, "CVE-2018-1, CVE-2017-1000382")
t.Run("Parse Protecode Results", func(t *testing.T) {
assert.Equal(t, 27, m["historical_vulnerabilities"])
assert.Equal(t, 0, m["triaged_vulnerabilities"])
assert.Equal(t, 0, m["excluded_vulnerabilities"])
assert.Equal(t, 0, m["cvss3GreaterOrEqualSeven"])
assert.Equal(t, 0, m["cvss2GreaterOrEqualSeven"])
assert.Equal(t, 0, m["vulnerabilities"])
assert.Equal(t, 0, len(vulns))
})
}
func TestParseResultTriaged(t *testing.T) {
triaged := filepath.Join("testdata", "protecode_result_triaging.json")
byteContent, err := ioutil.ReadFile(triaged)
if err != nil {
t.Fatalf("failed reading %v", triaged)
}
pc := Protecode{}
resultData := new(ResultData)
pc.mapResponse(ioutil.NopCloser(strings.NewReader(string(byteContent))), resultData)
m, vulns := pc.ParseResultForInflux(resultData.Result, "")
t.Run("Parse Protecode Results", func(t *testing.T) {
assert.Equal(t, 1132, m["historical_vulnerabilities"])
assert.Equal(t, 187, m["triaged_vulnerabilities"])
assert.Equal(t, 0, m["excluded_vulnerabilities"])
assert.Equal(t, 15, m["cvss3GreaterOrEqualSeven"])
assert.Equal(t, 0, m["cvss2GreaterOrEqualSeven"])
assert.Equal(t, 36, m["vulnerabilities"])
assert.Equal(t, 36, len(vulns))
})
}
func TestLoadExistingProductSuccess(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
response := ProductData{
Products: []Product{
{ProductID: 1}},
}
var b bytes.Buffer
json.NewEncoder(&b).Encode(&response)
rw.Write([]byte(b.Bytes()))
}))
// Close the server when test finishes
defer server.Close()
client := &piperHttp.Client{}
client.SetOptions(piperHttp.ClientOptions{})
cases := []struct {
pc Protecode
protecodeGroup string
reuseExisting bool
want int
}{
{Protecode{serverURL: server.URL, client: client, logger: log.Entry().WithField("package", "SAP/jenkins-library/pkg/protecode")}, "group", true, 1},
{Protecode{serverURL: server.URL, client: client}, "group32", false, -1},
}
for _, c := range cases {
got := c.pc.LoadExistingProduct(c.protecodeGroup, c.reuseExisting)
assert.Equal(t, c.want, got)
}
}
func TestPollForResultSuccess(t *testing.T) {
requestURI := ""
var response ResultData = ResultData{}
server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
requestURI = req.RequestURI
productID := 111
if strings.Contains(requestURI, "222") {
productID = 222
}
response = ResultData{Result: Result{ProductID: productID, ReportURL: requestURI, Status: "D", Components: []Component{
{Vulns: []Vulnerability{
{Triage: []Triage{{ID: 1}}}},
}},
}}
var b bytes.Buffer
json.NewEncoder(&b).Encode(&response)
rw.Write([]byte(b.Bytes()))
}))
cases := []struct {
productID int
want ResultData
}{
{111, ResultData{Result: Result{ProductID: 111, ReportURL: "/api/product/111/", Status: "D", Components: []Component{
{Vulns: []Vulnerability{
{Triage: []Triage{{ID: 1}}}},
}},
}}},
{222, ResultData{Result: Result{ProductID: 222, ReportURL: "/api/product/222/", Status: "D", Components: []Component{
{Vulns: []Vulnerability{
{Triage: []Triage{{ID: 1}}}},
}},
}}},
}
// Close the server when test finishes
defer server.Close()
client := &piperHttp.Client{}
client.SetOptions(piperHttp.ClientOptions{})
pc := Protecode{serverURL: server.URL, client: client, duration: (time.Minute * 1), logger: log.Entry().WithField("package", "SAP/jenkins-library/pkg/protecode")}
for _, c := range cases {
got := pc.PollForResult(c.productID, "1")
assert.Equal(t, c.want, got)
assert.Equal(t, fmt.Sprintf("/api/product/%v/", c.productID), requestURI)
}
}
func TestPullResultSuccess(t *testing.T) {
requestURI := ""
server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
requestURI = req.RequestURI
var response ResultData = ResultData{}
if strings.Contains(requestURI, "111") {
response = ResultData{
Result: Result{ProductID: 111, ReportURL: requestURI}}
} else {
response = ResultData{
Result: Result{ProductID: 222, ReportURL: requestURI}}
}
var b bytes.Buffer
json.NewEncoder(&b).Encode(&response)
rw.Write([]byte(b.Bytes()))
}))
// Close the server when test finishes
defer server.Close()
client := &piperHttp.Client{}
client.SetOptions(piperHttp.ClientOptions{})
cases := []struct {
pc Protecode
productID int
want ResultData
}{
{Protecode{serverURL: server.URL, client: client}, 111, ResultData{Result: Result{ProductID: 111, ReportURL: "/api/product/111/"}}},
{Protecode{serverURL: server.URL, client: client}, 222, ResultData{Result: Result{ProductID: 222, ReportURL: "/api/product/222/"}}},
}
for _, c := range cases {
got, _ := c.pc.pullResult(c.productID)
assert.Equal(t, c.want, got)
assert.Equal(t, fmt.Sprintf("/api/product/%v/", c.productID), requestURI)
}
}
func TestDeclareFetchURLSuccess(t *testing.T) {
requestURI := ""
var passedHeaders = map[string][]string{}
server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
requestURI = req.RequestURI
passedHeaders = map[string][]string{}
if req.Header != nil {
for name, headers := range req.Header {
passedHeaders[name] = headers
}
}
response := ResultData{Result: Result{ProductID: 111, ReportURL: requestURI}}
var b bytes.Buffer
json.NewEncoder(&b).Encode(&response)
rw.Write([]byte(b.Bytes()))
}))
// Close the server when test finishes
defer server.Close()
pc := Protecode{}
po := Options{ServerURL: server.URL}
pc.SetOptions(po)
cases := []struct {
cleanupMode string
protecodeGroup string
fetchURL string
want string
}{
{"binary", "group1", "dummy", "/api/fetch/"},
{"Test", "group2", "dummy", "/api/fetch/"},
}
for _, c := range cases {
pc.DeclareFetchURL(c.cleanupMode, c.protecodeGroup, c.fetchURL)
assert.Equal(t, requestURI, c.want)
assert.Contains(t, passedHeaders, "Group")
assert.Contains(t, passedHeaders, "Delete-Binary")
assert.Contains(t, passedHeaders, "Url")
}
}
func TestUploadScanFileSuccess(t *testing.T) {
requestURI := ""
var passedHeaders = map[string][]string{}
var multipartFile multipart.File
var passedFileContents []byte
server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
requestURI = req.RequestURI
passedHeaders = map[string][]string{}
if req.Header != nil {
for name, headers := range req.Header {
passedHeaders[name] = headers
}
}
response := Result{ProductID: 111, ReportURL: requestURI}
err := req.ParseMultipartForm(4096)
if err != nil {
t.FailNow()
}
multipartFile, _, err = req.FormFile("file")
if err != nil {
t.FailNow()
}
defer req.Body.Close()
passedFileContents, err = ioutil.ReadAll(multipartFile)
if err != nil {
t.FailNow()
}
var b bytes.Buffer
json.NewEncoder(&b).Encode(&response)
rw.Write([]byte(b.Bytes()))
}))
// Close the server when test finishes
defer server.Close()
pc := Protecode{}
po := Options{ServerURL: server.URL}
pc.SetOptions(po)
testFile, err := ioutil.TempFile("", "testFileUpload")
if err != nil {
t.FailNow()
}
defer os.RemoveAll(testFile.Name()) // clean up
fileContents, err := ioutil.ReadFile(testFile.Name())
if err != nil {
t.FailNow()
}
cases := []struct {
cleanupMode string
protecodeGroup string
fileName string
want string
}{
{"binary", "group1", testFile.Name(), "/api/upload/dummy"},
{"Test", "group2", testFile.Name(), "/api/upload/dummy"},
}
for _, c := range cases {
pc.UploadScanFile(c.cleanupMode, c.protecodeGroup, c.fileName, "dummy")
assert.Equal(t, requestURI, c.want)
assert.Contains(t, passedHeaders, "Group")
assert.Contains(t, passedHeaders, "Delete-Binary")
assert.Equal(t, fileContents, passedFileContents, "Uploaded file incorrect")
}
}
func TestLoadReportSuccess(t *testing.T) {
requestURI := ""
var passedHeaders = map[string][]string{}
server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
requestURI = req.RequestURI
passedHeaders = map[string][]string{}
if req.Header != nil {
for name, headers := range req.Header {
passedHeaders[name] = headers
}
}
rw.Write([]byte("OK"))
}))
// Close the server when test finishes
defer server.Close()
client := &piperHttp.Client{}
client.SetOptions(piperHttp.ClientOptions{})
pc := Protecode{serverURL: server.URL, client: client}
cases := []struct {
productID int
reportFileName string
want string
}{
{1, "fileName", "/api/product/1/pdf-report"},
{2, "fileName", "/api/product/2/pdf-report"},
}
for _, c := range cases {
pc.LoadReport(c.reportFileName, c.productID)
assert.Equal(t, requestURI, c.want)
assert.Contains(t, passedHeaders, "Outputfile")
assert.Contains(t, passedHeaders, "Pragma")
assert.Contains(t, passedHeaders, "Cache-Control")
}
}
func TestDeleteScanSuccess(t *testing.T) {
requestURI := ""
var passedHeaders = map[string][]string{}
server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
requestURI = req.RequestURI
passedHeaders = map[string][]string{}
if req.Header != nil {
for name, headers := range req.Header {
passedHeaders[name] = headers
}
}
rw.Write([]byte("OK"))
}))
// Close the server when test finishes
defer server.Close()
pc := Protecode{}
po := Options{ServerURL: server.URL}
pc.SetOptions(po)
cases := []struct {
cleanupMode string
productID int
want string
}{
{"binary", 1, ""},
{"complete", 2, "/api/product/2/"},
}
for _, c := range cases {
pc.DeleteScan(c.cleanupMode, c.productID)
assert.Equal(t, requestURI, c.want)
if c.cleanupMode == "complete" {
assert.Contains(t, requestURI, fmt.Sprintf("%v", c.productID))
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -50,6 +50,12 @@
<dependencies>
<dependency>
<groupId>org.jenkins-ci.plugins</groupId>
<artifactId>sidebar-link</artifactId>
<version>1.9.1</version>
</dependency>
<dependency>
<groupId>org.jenkins-ci.plugins</groupId>
<artifactId>junit</artifactId>

View File

@ -0,0 +1,194 @@
metadata:
name: protecodeExecuteScan
description: Protecode is an Open Source Vulnerability Scanner that is capable of scanning binaries. It can be used to scan docker images but is supports many other programming languages especially those of the C family. You can find more details on its capabilities in the [OS3 - Open Source Software Security JAM](https://jam4.sapjam.com/groups/XgeUs0CXItfeWyuI4k7lM3/overview_page/aoAsA0k4TbezGFyOkhsXFs). For getting access to Protecode please visit the [guide](https://go.sap.corp/protecode).
longDescription: |-
Protecode is an Open Source Vulnerability Scanner that is capable of scanning binaries. It can be used to scan docker images but is supports many other programming languages especially those of the C family. You can find more details on its capabilities in the [OS3 - Open Source Software Security JAM](https://jam4.sapjam.com/groups/XgeUs0CXItfeWyuI4k7lM3/overview_page/aoAsA0k4TbezGFyOkhsXFs). For getting access to Protecode please visit the [guide](https://go.sap.corp/protecode).
!!! info "New: Using protecodeExecuteScan for Docker images on JaaS"
**This step now also works on "Jenkins as a Service (JaaS)"!**<br />
For the JaaS use case where the execution happens in a Kubernetes cluster without access to a Docker daemon [skopeo](https://github.com/containers/skopeo) is now used silently in the background to save a Docker image retrieved from a registry.
!!! hint "Auditing findings (Triaging)"
Triaging is now supported by the Protecode backend and also Piper does consider this information during the analysis of the scan results though product versions are not supported by Protecode. Therefore please make sure that the `fileName` you are providing does either contain a stable version or that it does not contain one at all. By ensuring that you are able to triage CVEs globally on the upload file's name without affecting any other artifacts scanned in the same Protecode group and as such triaged vulnerabilities will be considered during the next scan and will not fail the build anymore.
spec:
inputs:
params:
- name: excludeCVEs
aliases:
- name: protecodeExcludeCVEs
type: string
description: 'DEPRECATED: Do use triaging within the Protecode UI instead'
scope:
- PARAMETERS
- STAGES
- STEPS
default: []
- name: failOnSevereVulnerabilities
aliases:
- name: protecodeFailOnSevereVulnerabilities
type: bool
description: Whether to fail the job on severe vulnerabilties or not
scope:
- PARAMETERS
- STAGES
- STEPS
default: true
- name: scanImage
aliases:
- name: dockerImage
type: string
description: The reference to the docker image to scan with Protecode
resourceRef:
- name: commonPipelineEnvironment
param: container/imageNameTag
scope:
- GENERAL
- PARAMETERS
- STAGES
- STEPS
- name: dockerRegistryUrl
type: string
description: The reference to the docker registry to scan with Protecode
resourceRef:
- name: commonPipelineEnvironment
param: container/registryUrl
scope:
- GENERAL
- PARAMETERS
- STAGES
- STEPS
- name: cleanupMode
type: string
description: Decides which parts are removed from the Protecode backend after the scan
scope:
- PARAMETERS
- STAGES
- STEPS
default: binary
- name: filePath
type: string
description: The path to the file from local workspace to scan with Protecode
scope:
- PARAMETERS
- STAGES
- STEPS
- name: includeLayers
type: bool
description: Flag if the docker layers should be included
scope:
- PARAMETERS
- STAGES
- STEPS
- name: addSideBarLink
type: bool
description: Whether to create a side bar link pointing to the report produced by Protecode or not
scope:
- PARAMETERS
- STAGES
- STEPS
default: true
- name: timeoutMinutes
aliases:
- name: protecodeTimeoutMinutes
type: string
description: The timeout to wait for the scan to finish
scope:
- PARAMETERS
- STAGES
- STEPS
default: 60
- name: serverUrl
aliases:
- name: protecodeServerUrl
type: string
description: The URL to the Protecode backend
mandatory: true
scope:
- GENERAL
- PARAMETERS
- STAGES
- STEPS
- name: reportFileName
type: string
description: The file name of the report to be created
scope:
- PARAMETERS
- STAGES
- STEPS
default: protecode_report.pdf
- name: fetchUrl
type: string
description: The URL to fetch the file to scan with Protecode which must be accessible via public HTTP GET request
scope:
- PARAMETERS
- STAGES
- STEPS
- name: group
aliases:
- name: protecodeGroup
type: string
description: The Protecode group ID of your team
mandatory: true
scope:
- PARAMETERS
- STAGES
- STEPS
- name: reuseExisting
type: bool
description: Whether to reuse an existing product instead of creating a new one
scope:
- PARAMETERS
- STAGES
- STEPS
- name: user
type: string
description: User which is used for the protecode scan
mandatory: true
scope:
- PARAMETERS
- STAGES
- STEPS
- name: password
type: string
description: Password which is used for the user
mandatory: true
scope:
- PARAMETERS
- STAGES
- STEPS
- name: artifactVersion
type: string
description: The version of the artifact to allow identification in protecode backend
resourceRef:
- name: commonPipelineEnvironment
param: artifactVersion
scope:
- PARAMETERS
- STAGES
- STEPS
- name: pullRequestName
type: string
description: The name of the pull request
scope:
- PARAMETERS
- STAGES
- STEPS
secrets:
- name: protecodeCredentialsId
type: jenkins
- name: dockerCredentialsId
type: jenkins
outputs:
resources:
- name: influx
type: influx
params:
- name: protecodeData
fields:
- name: historicalVulnerabilities
- name: triagedVulnerabilities
- name: excludedVulnerabilities
- name: majorVulnerabilities
- name: minorVulnerabilities
- name: vulnerabilities

View File

@ -65,11 +65,17 @@ def nodeAvailable() {
return true
}
@NonCPS
def getCurrentBuildInstance() {
return currentBuild
}
@NonCPS
def getParentJob() {
return getRawBuild().getParent()
}
@NonCPS
def getRawBuild() {
return getCurrentBuildInstance().rawBuild
@ -164,6 +170,42 @@ String getPluginVersion(name) {
return getPlugin(name)?.getVersion()
}
@NonCPS
void addJobSideBarLink(String relativeUrl, String displayName, String relativeIconPath) {
try {
def linkActionClass = this.class.classLoader.loadClass("hudson.plugins.sidebar_link.LinkAction")
if (relativeUrl != null && displayName != null) {
def parentJob = getParentJob()
def buildNumber = getCurrentBuildInstance().number
def iconPath = (null != relativeIconPath) ? "${Functions.getResourcePath()}/${relativeIconPath}" : null
def action = linkActionClass.newInstance("${buildNumber}/${relativeUrl}", displayName, iconPath)
echo "Added job level sidebar link to '${action.getUrlName()}' with name '${action.getDisplayName()}' and icon '${action.getIconFileName()}'"
parentJob.getActions().add(action)
}
} catch (e) {
e.printStackTrace()
}
}
@NonCPS
void removeJobSideBarLinks(String relativeUrl = null) {
try {
def linkActionClass = this.class.classLoader.loadClass("hudson.plugins.sidebar_link.LinkAction")
def parentJob = getParentJob()
def listToRemove = new ArrayList()
for (def action : parentJob.getActions()) {
if (linkActionClass.isAssignableFrom(action.getClass()) && (null == relativeUrl || action.getUrlName().endsWith(relativeUrl))) {
echo "Removing job level sidebar link to '${action.getUrlName()}' with name '${action.getDisplayName()}' and icon '${action.getIconFileName()}'"
listToRemove.add(action)
}
}
parentJob.getActions().removeAll(listToRemove)
echo "Removed Jenkins global sidebar links ${listToRemove}"
} catch (e) {
e.printStackTrace()
}
}
void handleStepResults(String stepName, boolean failOnMissingReports, boolean failOnMissingLinks) {
def reportsFileName = "${stepName}_reports.json"
def reportsFileExists = fileExists(file: reportsFileName)

View File

@ -119,6 +119,7 @@ public class CommonStepsTest extends BasePiperTest{
'checkmarxExecuteScan', //implementing new golang pattern without fields
'githubPublishRelease', //implementing new golang pattern without fields
'kubernetesDeploy', //implementing new golang pattern without fields
'protecodeExecuteScan', //implementing new golang pattern without fields
'xsDeploy', //implementing new golang pattern without fields
]

View File

@ -1,6 +1,9 @@
package com.sap.piper
import hudson.plugins.sidebar_link.LinkAction
import hudson.AbortException
import org.jenkinsci.plugins.workflow.steps.MissingContextVariableException
import org.junit.Before
import org.junit.Rule
@ -15,6 +18,7 @@ import util.Rules
import static org.hamcrest.Matchers.*
import static org.junit.Assert.assertThat
import static org.junit.Assert.assertEquals
class JenkinsUtilsTest extends BasePiperTest {
public ExpectedException exception = ExpectedException.none()
@ -57,24 +61,6 @@ class JenkinsUtilsTest extends BasePiperTest {
def getActiveJenkinsInstance() {
return jenkinsInstanceMock
}
void addRunSideBarLink(String relativeUrl, String displayName, String relativeIconPath) {
results.runlinkCalled = true
assertThat(relativeUrl, is('https://server.com/1234.pdf'))
assertThat(displayName, is('Test link'))
assertThat(relativeIconPath, is('images/24x24/graph.png'))
}
void addJobSideBarLink(String relativeUrl, String displayName, String relativeIconPath) {
results.joblinkCalled = true
assertThat(relativeUrl, is('https://server.com/1234.pdf'))
assertThat(displayName, is('Test link'))
assertThat(relativeIconPath, is('images/24x24/graph.png'))
}
void removeJobSideBarLinks(String relativeUrl) {
results.removejoblinkCalled = true
assertThat(relativeUrl, is('https://server.com/1234.pdf'))
}
}
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(jenkinsUtils)
@ -123,8 +109,41 @@ class JenkinsUtilsTest extends BasePiperTest {
}
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(currentBuildMock)
}
def initializeNewUtil() {
jenkinsUtils = new JenkinsUtils() {
def getCurrentBuildInstance() {
return currentBuildMock
}
def getActiveJenkinsInstance() {
return jenkinsInstanceMock
}
void addRunSideBarLink(String relativeUrl, String displayName, String relativeIconPath) {
results.runlinkCalled = true
assertThat(relativeUrl, is('https://server.com/1234.pdf'))
assertThat(displayName, is('Test link'))
assertThat(relativeIconPath, is('images/24x24/graph.png'))
}
void addJobSideBarLink(String relativeUrl, String displayName, String relativeIconPath) {
results.joblinkCalled = true
assertThat(relativeUrl, is('https://server.com/1234.pdf'))
assertThat(displayName, is('Test link'))
assertThat(relativeIconPath, is('images/24x24/graph.png'))
}
void removeJobSideBarLinks(String relativeUrl) {
results.removejoblinkCalled = true
assertThat(relativeUrl, is('https://server.com/1234.pdf'))
}
}
LibraryLoadingTestExecutionListener.prepareObjectInterceptors(jenkinsUtils)
}
@Test
void testHandleStepResultsJobLink() {
initializeNewUtil()
helper.registerAllowedMethod("fileExists", [Map], { m ->
return true
})
@ -143,6 +162,7 @@ class JenkinsUtilsTest extends BasePiperTest {
}
@Test
void testHandleStepResults() {
initializeNewUtil()
helper.registerAllowedMethod("fileExists", [Map], { m ->
return true
})
@ -161,6 +181,7 @@ class JenkinsUtilsTest extends BasePiperTest {
}
@Test
void testHandleStepResultsEmptyReports() {
initializeNewUtil()
helper.registerAllowedMethod("fileExists", [Map], { m ->
return true
})
@ -175,6 +196,7 @@ class JenkinsUtilsTest extends BasePiperTest {
}
@Test
void testHandleStepResultsEmptyLinks() {
initializeNewUtil()
helper.registerAllowedMethod("fileExists", [Map], { m ->
return true
})
@ -189,6 +211,7 @@ class JenkinsUtilsTest extends BasePiperTest {
}
@Test
void testHandleStepResultsNoErrorReportsLinks() {
initializeNewUtil()
helper.registerAllowedMethod("fileExists", [Map], { m ->
return true
})
@ -202,6 +225,7 @@ class JenkinsUtilsTest extends BasePiperTest {
}
@Test
void testHandleStepResultsReportsNoFile() {
initializeNewUtil()
helper.registerAllowedMethod("fileExists", [Map], { m ->
return false
})
@ -219,6 +243,7 @@ class JenkinsUtilsTest extends BasePiperTest {
}
@Test
void testHandleStepResultsLinksNoFile() {
initializeNewUtil()
helper.registerAllowedMethod("fileExists", [Map], { m ->
return false
})
@ -294,4 +319,55 @@ class JenkinsUtilsTest extends BasePiperTest {
assertThat(libs[0], is([name: 'lib1', version: '1', trusted: true]))
assertThat(libs[1], is([name: 'lib2', version: '2', trusted: false]))
}
@Test
void testAddJobSideBarLink() {
def actions = new ArrayList()
helper.registerAllowedMethod("getActions", [], {
return actions
})
currentBuildMock.number = 15
jenkinsUtils.addJobSideBarLink("abcd/1234", "Some report link", "images/24x24/report.png")
assertEquals(1, actions.size())
assertEquals(LinkAction.class, actions.get(0).getClass())
assertEquals("15/abcd/1234", actions.get(0).getUrlName())
assertEquals("Some report link", actions.get(0).getDisplayName())
assertEquals("/images/24x24/report.png", actions.get(0).getIconFileName())
}
@Test
void testRemoveJobSideBarLinks() {
def actions = new ArrayList()
actions.add(new LinkAction("abcd/1234", "Some report link", "images/24x24/report.png"))
helper.registerAllowedMethod("getActions", [], {
return actions
})
jenkinsUtils.removeJobSideBarLinks("abcd/1234")
assertEquals(0, actions.size())
}
@Test
void testAddRunSideBarLink() {
def actions = new ArrayList()
helper.registerAllowedMethod("getActions", [], {
return actions
})
jenkinsUtils.addRunSideBarLink("abcd/1234", "Some report link", "images/24x24/report.png")
assertEquals(1, actions.size())
assertEquals(LinkAction.class, actions.get(0).getClass())
assertEquals("abcd/1234", actions.get(0).getUrlName())
assertEquals("Some report link", actions.get(0).getDisplayName())
assertEquals("/images/24x24/report.png", actions.get(0).getIconFileName())
}
}

View File

@ -0,0 +1,74 @@
import com.sap.piper.JenkinsUtils
import com.sap.piper.PiperGoUtils
import com.sap.piper.Utils
import groovy.transform.Field
import static com.sap.piper.Prerequisites.checkScript
@Field String STEP_NAME = getClass().getName()
@Field String METADATA_FILE = 'metadata/protecode.yaml'
/**
* Protecode is an Open Source Vulnerability Scanner that is capable of scanning binaries. It can be used to scan docker images but is supports many other programming languages especially those of the C family. You can find more details on its capabilities in the [OS3 - Open Source Software Security JAM](https://jam4.sapjam.com/groups/XgeUs0CXItfeWyuI4k7lM3/overview_page/aoAsA0k4TbezGFyOkhsXFs). For getting access to Protecode please visit the [guide](https://go.sap.corp/protecode).
*/
void call(Map parameters = [:]) {
handlePipelineStepErrors (stepName: STEP_NAME, stepParameters: parameters, failOnError: true) {
def script = checkScript(this, parameters) ?: this
Map config
def utils = parameters.juStabUtils ?: new Utils()
def jenkinsUtils = parameters.jenkinsUtilsStub ?: new JenkinsUtils()
new PiperGoUtils(this, utils).unstashPiperBin()
utils.unstash('pipelineConfigAndTests')
writeFile(file: METADATA_FILE, text: libraryResource(METADATA_FILE))
withEnv([
"PIPER_parametersJSON=${groovy.json.JsonOutput.toJson(parameters)}",
]) {
// get context configuration
config = readJSON (text: sh(returnStdout: true, script: "./piper getConfig --contextConfig --stepMetadata '${METADATA_FILE}'"))
def creds = []
if (config.protecodeCredentialsId) creds.add(usernamePassword(credentialsId: config.protecodeCredentialsId, passwordVariable: 'PIPER_password', usernameVariable: 'PIPER_user'))
if (config.dockerCredentialsId) creds.add(file(credentialsId: config.dockerCredentialsId, variable: 'FILE_PATH'))
// execute step
withCredentials(creds) {
if(config.dockerCredentialsId) {
if (FILE_PATH) {
File file = new File(FILE_PATH);
def configDirPath = file.getAbsoluteFile().getParent();
withEnv([
"DOCKER_CONFIG=${configDirPath}",
]) {
sh "./piper protecodeExecuteScan"
}
}
}
else {
sh "./piper protecodeExecuteScan"
}
}
def json = readJSON (file: "protecodescan_vulns.json")
def report = readJSON (file: 'protecodeExecuteScan.json')
archiveArtifacts artifacts: report['target'], allowEmptyArchive: !report['mandatory']
archiveArtifacts artifacts: "protecodeExecuteScan.json", allowEmptyArchive: false
archiveArtifacts artifacts: "protecodescan_vulns.json", allowEmptyArchive: false
jenkinsUtils.removeJobSideBarLinks("artifact/${report['target']}")
jenkinsUtils.addJobSideBarLink("artifact/${report['target']}", "Protecode Report", "images/24x24/graph.png")
jenkinsUtils.addRunSideBarLink("artifact/${report['target']}", "Protecode Report", "images/24x24/graph.png")
jenkinsUtils.addRunSideBarLink("${report['protecodeServerUrl']}/products/${report['productID']}/", "Protecode WebUI", "images/24x24/graph.png")
}
}
}