mirror of
https://github.com/SAP/jenkins-library.git
synced 2024-11-28 08:49:44 +02:00
Adding support for CheckmarxOne platform (#4317)
* Initial in progress * compiling but not yet functional * Missed file * updated checkmarxone step * Working up to fetching a project then breaks * Missed file * Breaks when retrieving projects+proxy set * Create project & run scan working, now polling * Fixed polling * added back the zipfile remove command * Fixed polling again * Generates and downloads PDF report * Updated and working, prep for refactor * Added compliance steps * Cleanup, reporting, added groovy connector * fixed groovy file * checkmarxone to checkmarxOne * checkmarxone to checkmarxOne * split credentials (id+secret, apikey), renamed pullrequestname to branch, groovy fix * Fixed filenames & yaml * missed the metadata_generated.go * added json to sarif conversion * fix:type in new checkmarxone package * fix:type in new checkmarxone package * removed test logs, added temp error log for creds * extra debugging to fix crash * improved auth logging, fixed query parse issue * fixed bug with group fetch when using oauth user * CWE can be -1 if not defined, can't be uint * Query also had CweID * Disabled predicates-fetch in sarif generation * Removing leftover info log message * Better error handling * fixed default preset configuration * removing .bat files - sorry * Cleanup per initial review * refactoring per Gist, fixed project find, add apps * small fix - sorry for commit noise while testing * Fixing issues with incremental scans. * removing maxretries * Updated per PR feedback, further changes todo toda * JSON Report changes and reporting cleanup * removing .bat (again?) * adding docs, groovy unit test, linter fixes * Started adding tests maybe 15% covered * fix(checkmarxOne): test cases for pkg and reporting * fix(checkmarxOne):fix formatting * feat(checkmarxone): update interface with missing method * feat(checkmarxone):change runStep signature to be able to inject dependency * feat(checkmarxone): add tests for step (wip) * Adding a bit more coverage * feat(checkmarxOne): fix code review * feat(checkmarxOne): fix code review * feat(checkmarxOne): fix code review * feat(checkmarxOne): fix integration test PR --------- Co-authored-by: thtri <trinhthanhhai@gmail.com> Co-authored-by: Thanh-Hai Trinh <thanh.hai.trinh@sap.com>
This commit is contained in:
parent
a84bba9030
commit
d12f01d90f
1126
cmd/checkmarxOneExecuteScan.go
Normal file
1126
cmd/checkmarxOneExecuteScan.go
Normal file
File diff suppressed because it is too large
Load Diff
847
cmd/checkmarxOneExecuteScan_generated.go
Normal file
847
cmd/checkmarxOneExecuteScan_generated.go
Normal file
@ -0,0 +1,847 @@
|
||||
// Code generated by piper's step-generator. DO NOT EDIT.
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/config"
|
||||
"github.com/SAP/jenkins-library/pkg/gcs"
|
||||
"github.com/SAP/jenkins-library/pkg/log"
|
||||
"github.com/SAP/jenkins-library/pkg/piperenv"
|
||||
"github.com/SAP/jenkins-library/pkg/splunk"
|
||||
"github.com/SAP/jenkins-library/pkg/telemetry"
|
||||
"github.com/SAP/jenkins-library/pkg/validation"
|
||||
"github.com/bmatcuk/doublestar"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
type checkmarxOneExecuteScanOptions struct {
|
||||
Assignees []string `json:"assignees,omitempty"`
|
||||
AvoidDuplicateProjectScans bool `json:"avoidDuplicateProjectScans,omitempty"`
|
||||
FilterPattern string `json:"filterPattern,omitempty"`
|
||||
FullScanCycle string `json:"fullScanCycle,omitempty"`
|
||||
FullScansScheduled bool `json:"fullScansScheduled,omitempty"`
|
||||
GeneratePdfReport bool `json:"generatePdfReport,omitempty"`
|
||||
GithubAPIURL string `json:"githubApiUrl,omitempty"`
|
||||
GithubToken string `json:"githubToken,omitempty"`
|
||||
Incremental bool `json:"incremental,omitempty"`
|
||||
Owner string `json:"owner,omitempty"`
|
||||
ClientSecret string `json:"clientSecret,omitempty"`
|
||||
APIKey string `json:"APIKey,omitempty"`
|
||||
Preset string `json:"preset,omitempty"`
|
||||
LanguageMode string `json:"languageMode,omitempty"`
|
||||
ProjectCriticality string `json:"projectCriticality,omitempty"`
|
||||
ProjectName string `json:"projectName,omitempty"`
|
||||
Branch string `json:"branch,omitempty"`
|
||||
PullRequestName string `json:"pullRequestName,omitempty"`
|
||||
Repository string `json:"repository,omitempty"`
|
||||
ServerURL string `json:"serverUrl,omitempty"`
|
||||
IamURL string `json:"iamUrl,omitempty"`
|
||||
Tenant string `json:"tenant,omitempty"`
|
||||
SourceEncoding string `json:"sourceEncoding,omitempty"`
|
||||
GroupName string `json:"groupName,omitempty"`
|
||||
ApplicationName string `json:"applicationName,omitempty"`
|
||||
ClientID string `json:"clientId,omitempty"`
|
||||
VerifyOnly bool `json:"verifyOnly,omitempty"`
|
||||
VulnerabilityThresholdEnabled bool `json:"vulnerabilityThresholdEnabled,omitempty"`
|
||||
VulnerabilityThresholdHigh int `json:"vulnerabilityThresholdHigh,omitempty"`
|
||||
VulnerabilityThresholdMedium int `json:"vulnerabilityThresholdMedium,omitempty"`
|
||||
VulnerabilityThresholdLow int `json:"vulnerabilityThresholdLow,omitempty"`
|
||||
VulnerabilityThresholdLowPerQuery bool `json:"vulnerabilityThresholdLowPerQuery,omitempty"`
|
||||
VulnerabilityThresholdLowPerQueryMax int `json:"vulnerabilityThresholdLowPerQueryMax,omitempty"`
|
||||
VulnerabilityThresholdResult string `json:"vulnerabilityThresholdResult,omitempty" validate:"possible-values=FAILURE"`
|
||||
VulnerabilityThresholdUnit string `json:"vulnerabilityThresholdUnit,omitempty"`
|
||||
IsOptimizedAndScheduled bool `json:"isOptimizedAndScheduled,omitempty"`
|
||||
CreateResultIssue bool `json:"createResultIssue,omitempty"`
|
||||
ConvertToSarif bool `json:"convertToSarif,omitempty"`
|
||||
}
|
||||
|
||||
type checkmarxOneExecuteScanInflux struct {
|
||||
step_data struct {
|
||||
fields struct {
|
||||
checkmarxOne bool
|
||||
}
|
||||
tags struct {
|
||||
}
|
||||
}
|
||||
checkmarxOne_data struct {
|
||||
fields struct {
|
||||
high_issues int
|
||||
high_not_false_postive int
|
||||
high_not_exploitable int
|
||||
high_confirmed int
|
||||
high_urgent int
|
||||
high_proposed_not_exploitable int
|
||||
high_to_verify int
|
||||
medium_issues int
|
||||
medium_not_false_postive int
|
||||
medium_not_exploitable int
|
||||
medium_confirmed int
|
||||
medium_urgent int
|
||||
medium_proposed_not_exploitable int
|
||||
medium_to_verify int
|
||||
low_issues int
|
||||
low_not_false_postive int
|
||||
low_not_exploitable int
|
||||
low_confirmed int
|
||||
low_urgent int
|
||||
low_proposed_not_exploitable int
|
||||
low_to_verify int
|
||||
information_issues int
|
||||
information_not_false_postive int
|
||||
information_not_exploitable int
|
||||
information_confirmed int
|
||||
information_urgent int
|
||||
information_proposed_not_exploitable int
|
||||
information_to_verify int
|
||||
lines_of_code_scanned int
|
||||
files_scanned int
|
||||
initiator_name string
|
||||
owner string
|
||||
scan_id string
|
||||
project_id string
|
||||
projectName string
|
||||
group string
|
||||
group_full_path_on_report_date string
|
||||
scan_start string
|
||||
scan_time string
|
||||
checkmarxOne_version string
|
||||
scan_type string
|
||||
preset string
|
||||
deep_link string
|
||||
report_creation_time string
|
||||
}
|
||||
tags struct {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (i *checkmarxOneExecuteScanInflux) persist(path, resourceName string) {
|
||||
measurementContent := []struct {
|
||||
measurement string
|
||||
valType string
|
||||
name string
|
||||
value interface{}
|
||||
}{
|
||||
{valType: config.InfluxField, measurement: "step_data", name: "checkmarxOne", value: i.step_data.fields.checkmarxOne},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "high_issues", value: i.checkmarxOne_data.fields.high_issues},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "high_not_false_postive", value: i.checkmarxOne_data.fields.high_not_false_postive},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "high_not_exploitable", value: i.checkmarxOne_data.fields.high_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "high_confirmed", value: i.checkmarxOne_data.fields.high_confirmed},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "high_urgent", value: i.checkmarxOne_data.fields.high_urgent},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "high_proposed_not_exploitable", value: i.checkmarxOne_data.fields.high_proposed_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "high_to_verify", value: i.checkmarxOne_data.fields.high_to_verify},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "medium_issues", value: i.checkmarxOne_data.fields.medium_issues},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "medium_not_false_postive", value: i.checkmarxOne_data.fields.medium_not_false_postive},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "medium_not_exploitable", value: i.checkmarxOne_data.fields.medium_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "medium_confirmed", value: i.checkmarxOne_data.fields.medium_confirmed},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "medium_urgent", value: i.checkmarxOne_data.fields.medium_urgent},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "medium_proposed_not_exploitable", value: i.checkmarxOne_data.fields.medium_proposed_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "medium_to_verify", value: i.checkmarxOne_data.fields.medium_to_verify},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "low_issues", value: i.checkmarxOne_data.fields.low_issues},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "low_not_false_postive", value: i.checkmarxOne_data.fields.low_not_false_postive},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "low_not_exploitable", value: i.checkmarxOne_data.fields.low_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "low_confirmed", value: i.checkmarxOne_data.fields.low_confirmed},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "low_urgent", value: i.checkmarxOne_data.fields.low_urgent},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "low_proposed_not_exploitable", value: i.checkmarxOne_data.fields.low_proposed_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "low_to_verify", value: i.checkmarxOne_data.fields.low_to_verify},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "information_issues", value: i.checkmarxOne_data.fields.information_issues},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "information_not_false_postive", value: i.checkmarxOne_data.fields.information_not_false_postive},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "information_not_exploitable", value: i.checkmarxOne_data.fields.information_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "information_confirmed", value: i.checkmarxOne_data.fields.information_confirmed},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "information_urgent", value: i.checkmarxOne_data.fields.information_urgent},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "information_proposed_not_exploitable", value: i.checkmarxOne_data.fields.information_proposed_not_exploitable},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "information_to_verify", value: i.checkmarxOne_data.fields.information_to_verify},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "lines_of_code_scanned", value: i.checkmarxOne_data.fields.lines_of_code_scanned},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "files_scanned", value: i.checkmarxOne_data.fields.files_scanned},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "initiator_name", value: i.checkmarxOne_data.fields.initiator_name},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "owner", value: i.checkmarxOne_data.fields.owner},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "scan_id", value: i.checkmarxOne_data.fields.scan_id},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "project_id", value: i.checkmarxOne_data.fields.project_id},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "projectName", value: i.checkmarxOne_data.fields.projectName},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "group", value: i.checkmarxOne_data.fields.group},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "group_full_path_on_report_date", value: i.checkmarxOne_data.fields.group_full_path_on_report_date},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "scan_start", value: i.checkmarxOne_data.fields.scan_start},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "scan_time", value: i.checkmarxOne_data.fields.scan_time},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "checkmarxOne_version", value: i.checkmarxOne_data.fields.checkmarxOne_version},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "scan_type", value: i.checkmarxOne_data.fields.scan_type},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "preset", value: i.checkmarxOne_data.fields.preset},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "deep_link", value: i.checkmarxOne_data.fields.deep_link},
|
||||
{valType: config.InfluxField, measurement: "checkmarxOne_data", name: "report_creation_time", value: i.checkmarxOne_data.fields.report_creation_time},
|
||||
}
|
||||
|
||||
errCount := 0
|
||||
for _, metric := range measurementContent {
|
||||
err := piperenv.SetResourceParameter(path, resourceName, filepath.Join(metric.measurement, fmt.Sprintf("%vs", metric.valType), metric.name), metric.value)
|
||||
if err != nil {
|
||||
log.Entry().WithError(err).Error("Error persisting influx environment.")
|
||||
errCount++
|
||||
}
|
||||
}
|
||||
if errCount > 0 {
|
||||
log.Entry().Error("failed to persist Influx environment")
|
||||
}
|
||||
}
|
||||
|
||||
type checkmarxOneExecuteScanReports struct {
|
||||
}
|
||||
|
||||
func (p *checkmarxOneExecuteScanReports) persist(stepConfig checkmarxOneExecuteScanOptions, gcpJsonKeyFilePath string, gcsBucketId string, gcsFolderPath string, gcsSubFolder string) {
|
||||
if gcsBucketId == "" {
|
||||
log.Entry().Info("persisting reports to GCS is disabled, because gcsBucketId is empty")
|
||||
return
|
||||
}
|
||||
log.Entry().Info("Uploading reports to Google Cloud Storage...")
|
||||
content := []gcs.ReportOutputParam{
|
||||
{FilePattern: "**/piper_checkmarxone_report.html", ParamRef: "", StepResultType: "checkmarxone"},
|
||||
{FilePattern: "**/Cx1_SASTResults_*.xml", ParamRef: "", StepResultType: "checkmarxone"},
|
||||
{FilePattern: "**/ScanReport.*", ParamRef: "", StepResultType: "checkmarxone"},
|
||||
{FilePattern: "**/toolrun_checkmarxone_*.json", ParamRef: "", StepResultType: "checkmarxone"},
|
||||
}
|
||||
envVars := []gcs.EnvVar{
|
||||
{Name: "GOOGLE_APPLICATION_CREDENTIALS", Value: gcpJsonKeyFilePath, Modified: false},
|
||||
}
|
||||
gcsClient, err := gcs.NewClient(gcs.WithEnvVars(envVars))
|
||||
if err != nil {
|
||||
log.Entry().Errorf("creation of GCS client failed: %v", err)
|
||||
return
|
||||
}
|
||||
defer gcsClient.Close()
|
||||
structVal := reflect.ValueOf(&stepConfig).Elem()
|
||||
inputParameters := map[string]string{}
|
||||
for i := 0; i < structVal.NumField(); i++ {
|
||||
field := structVal.Type().Field(i)
|
||||
if field.Type.String() == "string" {
|
||||
paramName := strings.Split(field.Tag.Get("json"), ",")
|
||||
paramValue, _ := structVal.Field(i).Interface().(string)
|
||||
inputParameters[paramName[0]] = paramValue
|
||||
}
|
||||
}
|
||||
if err := gcs.PersistReportsToGCS(gcsClient, content, inputParameters, gcsFolderPath, gcsBucketId, gcsSubFolder, doublestar.Glob, os.Stat); err != nil {
|
||||
log.Entry().Errorf("failed to persist reports: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// CheckmarxOneExecuteScanCommand checkmarxOne is the recommended tool for security scans of JavaScript, iOS, Swift and Ruby code.
|
||||
func CheckmarxOneExecuteScanCommand() *cobra.Command {
|
||||
const STEP_NAME = "checkmarxOneExecuteScan"
|
||||
|
||||
metadata := checkmarxOneExecuteScanMetadata()
|
||||
var stepConfig checkmarxOneExecuteScanOptions
|
||||
var startTime time.Time
|
||||
var influx checkmarxOneExecuteScanInflux
|
||||
var reports checkmarxOneExecuteScanReports
|
||||
var logCollector *log.CollectorHook
|
||||
var splunkClient *splunk.Splunk
|
||||
telemetryClient := &telemetry.Telemetry{}
|
||||
|
||||
var createCheckmarxOneExecuteScanCmd = &cobra.Command{
|
||||
Use: STEP_NAME,
|
||||
Short: "checkmarxOne is the recommended tool for security scans of JavaScript, iOS, Swift and Ruby code.",
|
||||
Long: `checkmarxOne is a Static Application Security Testing (SAST) platform to analyze i.e. Java or TypeScript, Swift, Golang, Ruby code,
|
||||
and many other programming languages for security flaws based on a set of provided rules/queries that can be customized and extended.
|
||||
|
||||
This step by default enforces a specific audit baseline for findings and therefore ensures that:
|
||||
|
||||
* No 'To Verify' High and Medium issues exist in your project
|
||||
* Total number of High and Medium 'Confirmed' or 'Urgent' issues is zero
|
||||
* 10% of all Low issues are 'Confirmed' or 'Not Exploitable'
|
||||
|
||||
You can adapt above thresholds specifically using the provided configuration parameters and i.e. check for ` + "`" + `absolute` + "`" + `
|
||||
thresholds instead of ` + "`" + `percentage` + "`" + ` whereas we strongly recommend you to stay with the defaults provided.`,
|
||||
PreRunE: func(cmd *cobra.Command, _ []string) error {
|
||||
startTime = time.Now()
|
||||
log.SetStepName(STEP_NAME)
|
||||
log.SetVerbose(GeneralConfig.Verbose)
|
||||
|
||||
GeneralConfig.GitHubAccessTokens = ResolveAccessTokens(GeneralConfig.GitHubTokens)
|
||||
|
||||
path, _ := os.Getwd()
|
||||
fatalHook := &log.FatalHook{CorrelationID: GeneralConfig.CorrelationID, Path: path}
|
||||
log.RegisterHook(fatalHook)
|
||||
|
||||
err := PrepareConfig(cmd, &metadata, STEP_NAME, &stepConfig, config.OpenPiperFile)
|
||||
if err != nil {
|
||||
log.SetErrorCategory(log.ErrorConfiguration)
|
||||
return err
|
||||
}
|
||||
log.RegisterSecret(stepConfig.GithubToken)
|
||||
log.RegisterSecret(stepConfig.ClientSecret)
|
||||
log.RegisterSecret(stepConfig.APIKey)
|
||||
log.RegisterSecret(stepConfig.ClientID)
|
||||
|
||||
if len(GeneralConfig.HookConfig.SentryConfig.Dsn) > 0 {
|
||||
sentryHook := log.NewSentryHook(GeneralConfig.HookConfig.SentryConfig.Dsn, GeneralConfig.CorrelationID)
|
||||
log.RegisterHook(&sentryHook)
|
||||
}
|
||||
|
||||
if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 {
|
||||
splunkClient = &splunk.Splunk{}
|
||||
logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID}
|
||||
log.RegisterHook(logCollector)
|
||||
}
|
||||
|
||||
if err = log.RegisterANSHookIfConfigured(GeneralConfig.CorrelationID); err != nil {
|
||||
log.Entry().WithError(err).Warn("failed to set up SAP Alert Notification Service log hook")
|
||||
}
|
||||
|
||||
validation, err := validation.New(validation.WithJSONNamesForStructFields(), validation.WithPredefinedErrorMessages())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err = validation.ValidateStruct(stepConfig); err != nil {
|
||||
log.SetErrorCategory(log.ErrorConfiguration)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
Run: func(_ *cobra.Command, _ []string) {
|
||||
stepTelemetryData := telemetry.CustomData{}
|
||||
stepTelemetryData.ErrorCode = "1"
|
||||
handler := func() {
|
||||
influx.persist(GeneralConfig.EnvRootPath, "influx")
|
||||
reports.persist(stepConfig, GeneralConfig.GCPJsonKeyFilePath, GeneralConfig.GCSBucketId, GeneralConfig.GCSFolderPath, GeneralConfig.GCSSubFolder)
|
||||
config.RemoveVaultSecretFiles()
|
||||
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
|
||||
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
|
||||
stepTelemetryData.PiperCommitHash = GitCommit
|
||||
telemetryClient.SetData(&stepTelemetryData)
|
||||
telemetryClient.Send()
|
||||
if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 {
|
||||
splunkClient.Send(telemetryClient.GetData(), logCollector)
|
||||
}
|
||||
}
|
||||
log.DeferExitHandler(handler)
|
||||
defer handler()
|
||||
telemetryClient.Initialize(GeneralConfig.NoTelemetry, STEP_NAME)
|
||||
if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 {
|
||||
splunkClient.Initialize(GeneralConfig.CorrelationID,
|
||||
GeneralConfig.HookConfig.SplunkConfig.Dsn,
|
||||
GeneralConfig.HookConfig.SplunkConfig.Token,
|
||||
GeneralConfig.HookConfig.SplunkConfig.Index,
|
||||
GeneralConfig.HookConfig.SplunkConfig.SendLogs)
|
||||
}
|
||||
checkmarxOneExecuteScan(stepConfig, &stepTelemetryData, &influx)
|
||||
stepTelemetryData.ErrorCode = "0"
|
||||
log.Entry().Info("SUCCESS")
|
||||
},
|
||||
}
|
||||
|
||||
addCheckmarxOneExecuteScanFlags(createCheckmarxOneExecuteScanCmd, &stepConfig)
|
||||
return createCheckmarxOneExecuteScanCmd
|
||||
}
|
||||
|
||||
func addCheckmarxOneExecuteScanFlags(cmd *cobra.Command, stepConfig *checkmarxOneExecuteScanOptions) {
|
||||
cmd.Flags().StringSliceVar(&stepConfig.Assignees, "assignees", []string{``}, "Defines the assignees for the Github Issue created/updated with the results of the scan as a list of login names. [Not yet supported]")
|
||||
cmd.Flags().BoolVar(&stepConfig.AvoidDuplicateProjectScans, "avoidDuplicateProjectScans", true, "Whether duplicate scans of the same project state shall be avoided or not [Not yet supported]")
|
||||
cmd.Flags().StringVar(&stepConfig.FilterPattern, "filterPattern", `!**/node_modules/**, !**/.xmake/**, !**/*_test.go, !**/vendor/**/*.go, **/*.html, **/*.xml, **/*.go, **/*.py, **/*.js, **/*.scala, **/*.ts`, "The filter pattern used to zip the files relevant for scanning, patterns can be negated by setting an exclamation mark in front i.e. `!test/*.js` would avoid adding any javascript files located in the test directory")
|
||||
cmd.Flags().StringVar(&stepConfig.FullScanCycle, "fullScanCycle", `5`, "Indicates how often a full scan should happen between the incremental scans when activated")
|
||||
cmd.Flags().BoolVar(&stepConfig.FullScansScheduled, "fullScansScheduled", true, "Whether full scans are to be scheduled or not. Should be used in relation with `incremental` and `fullScanCycle`")
|
||||
cmd.Flags().BoolVar(&stepConfig.GeneratePdfReport, "generatePdfReport", true, "Whether to generate a PDF report of the analysis results or not")
|
||||
cmd.Flags().StringVar(&stepConfig.GithubAPIURL, "githubApiUrl", `https://api.github.com`, "Set the GitHub API URL.")
|
||||
cmd.Flags().StringVar(&stepConfig.GithubToken, "githubToken", os.Getenv("PIPER_githubToken"), "GitHub personal access token as per https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line")
|
||||
cmd.Flags().BoolVar(&stepConfig.Incremental, "incremental", true, "Whether incremental scans are to be applied which optimizes the scan time but might reduce detection capabilities. Therefore full scans are still required from time to time and should be scheduled via `fullScansScheduled` and `fullScanCycle`")
|
||||
cmd.Flags().StringVar(&stepConfig.Owner, "owner", os.Getenv("PIPER_owner"), "Set the GitHub organization.")
|
||||
cmd.Flags().StringVar(&stepConfig.ClientSecret, "clientSecret", os.Getenv("PIPER_clientSecret"), "The clientSecret to authenticate using a service account")
|
||||
cmd.Flags().StringVar(&stepConfig.APIKey, "APIKey", os.Getenv("PIPER_APIKey"), "The APIKey to authenticate")
|
||||
cmd.Flags().StringVar(&stepConfig.Preset, "preset", os.Getenv("PIPER_preset"), "The preset to use for scanning, if not set explicitly the step will attempt to look up the project's setting based on the availability of `checkmarxOneCredentialsId`")
|
||||
cmd.Flags().StringVar(&stepConfig.LanguageMode, "languageMode", `multi`, "Specifies whether the scan should be run for a 'single' language or 'multi' language, default 'multi'")
|
||||
cmd.Flags().StringVar(&stepConfig.ProjectCriticality, "projectCriticality", `3`, "The criticality of the checkmarxOne project, used during project creation")
|
||||
cmd.Flags().StringVar(&stepConfig.ProjectName, "projectName", os.Getenv("PIPER_projectName"), "The name of the checkmarxOne project to scan into")
|
||||
cmd.Flags().StringVar(&stepConfig.Branch, "branch", os.Getenv("PIPER_branch"), "Used to supply the branch scanned in the repository, or a friendly-name set by the user")
|
||||
cmd.Flags().StringVar(&stepConfig.PullRequestName, "pullRequestName", os.Getenv("PIPER_pullRequestName"), "Used to supply the name for the newly created PR project branch when being used in pull request scenarios. This is supplied by the orchestrator.")
|
||||
cmd.Flags().StringVar(&stepConfig.Repository, "repository", os.Getenv("PIPER_repository"), "Set the GitHub repository.")
|
||||
cmd.Flags().StringVar(&stepConfig.ServerURL, "serverUrl", os.Getenv("PIPER_serverUrl"), "The URL pointing to the root of the checkmarxOne server to be used")
|
||||
cmd.Flags().StringVar(&stepConfig.IamURL, "iamUrl", os.Getenv("PIPER_iamUrl"), "The URL pointing to the access control root of the checkmarxOne IAM server to be used")
|
||||
cmd.Flags().StringVar(&stepConfig.Tenant, "tenant", os.Getenv("PIPER_tenant"), "The name of the checkmarxOne tenant to be used")
|
||||
cmd.Flags().StringVar(&stepConfig.SourceEncoding, "sourceEncoding", `1`, "The source encoding to be used, if not set explicitly the project's default will be used [Not yet supported]")
|
||||
cmd.Flags().StringVar(&stepConfig.GroupName, "groupName", os.Getenv("PIPER_groupName"), "The full name of the group to assign newly created projects to which is preferred to groupId")
|
||||
cmd.Flags().StringVar(&stepConfig.ApplicationName, "applicationName", os.Getenv("PIPER_applicationName"), "The full name of the Checkmarx One application to which the newly created projects will be assigned")
|
||||
cmd.Flags().StringVar(&stepConfig.ClientID, "clientId", os.Getenv("PIPER_clientId"), "The username to authenticate")
|
||||
cmd.Flags().BoolVar(&stepConfig.VerifyOnly, "verifyOnly", false, "Whether the step shall only apply verification checks or whether it does a full scan and check cycle")
|
||||
cmd.Flags().BoolVar(&stepConfig.VulnerabilityThresholdEnabled, "vulnerabilityThresholdEnabled", true, "Whether the thresholds are enabled or not. If enabled the build will be set to `vulnerabilityThresholdResult` in case a specific threshold value is exceeded")
|
||||
cmd.Flags().IntVar(&stepConfig.VulnerabilityThresholdHigh, "vulnerabilityThresholdHigh", 100, "The specific threshold for high severity findings")
|
||||
cmd.Flags().IntVar(&stepConfig.VulnerabilityThresholdMedium, "vulnerabilityThresholdMedium", 100, "The specific threshold for medium severity findings")
|
||||
cmd.Flags().IntVar(&stepConfig.VulnerabilityThresholdLow, "vulnerabilityThresholdLow", 10, "The specific threshold for low severity findings")
|
||||
cmd.Flags().BoolVar(&stepConfig.VulnerabilityThresholdLowPerQuery, "vulnerabilityThresholdLowPerQuery", false, "Flag to activate/deactivate the threshold of low severity findings per query")
|
||||
cmd.Flags().IntVar(&stepConfig.VulnerabilityThresholdLowPerQueryMax, "vulnerabilityThresholdLowPerQueryMax", 10, "Upper threshold of low severity findings per query (in absolute number)")
|
||||
cmd.Flags().StringVar(&stepConfig.VulnerabilityThresholdResult, "vulnerabilityThresholdResult", `FAILURE`, "The result of the build in case thresholds are enabled and exceeded")
|
||||
cmd.Flags().StringVar(&stepConfig.VulnerabilityThresholdUnit, "vulnerabilityThresholdUnit", `percentage`, "The unit for the threshold to apply.")
|
||||
cmd.Flags().BoolVar(&stepConfig.IsOptimizedAndScheduled, "isOptimizedAndScheduled", false, "Whether the pipeline runs in optimized mode and the current execution is a scheduled one")
|
||||
cmd.Flags().BoolVar(&stepConfig.CreateResultIssue, "createResultIssue", false, "Activate creation of a result issue in GitHub.")
|
||||
cmd.Flags().BoolVar(&stepConfig.ConvertToSarif, "convertToSarif", true, "Convert the checkmarxOne XML scan results to the open SARIF standard.")
|
||||
|
||||
cmd.MarkFlagRequired("clientSecret")
|
||||
cmd.MarkFlagRequired("APIKey")
|
||||
cmd.MarkFlagRequired("projectCriticality")
|
||||
cmd.MarkFlagRequired("projectName")
|
||||
cmd.MarkFlagRequired("branch")
|
||||
cmd.MarkFlagRequired("serverUrl")
|
||||
cmd.MarkFlagRequired("iamUrl")
|
||||
cmd.MarkFlagRequired("tenant")
|
||||
cmd.MarkFlagRequired("clientId")
|
||||
}
|
||||
|
||||
// retrieve step metadata
|
||||
func checkmarxOneExecuteScanMetadata() config.StepData {
|
||||
var theMetaData = config.StepData{
|
||||
Metadata: config.StepMetadata{
|
||||
Name: "checkmarxOneExecuteScan",
|
||||
Aliases: []config.Alias{},
|
||||
Description: "checkmarxOne is the recommended tool for security scans of JavaScript, iOS, Swift and Ruby code.",
|
||||
},
|
||||
Spec: config.StepSpec{
|
||||
Inputs: config.StepInputs{
|
||||
Secrets: []config.StepSecrets{
|
||||
{Name: "checkmarxOneCredentialsId", Description: "Jenkins 'Username with password' credentials ID containing ClientID and ClientSecret to communicate with the checkmarxOne backend.", Type: "jenkins"},
|
||||
{Name: "checkmarxOneAPIKey", Description: "Jenkins 'Secret Text' containing the APIKey to communicate with the checkmarxOne backend.", Type: "jenkins"},
|
||||
{Name: "githubTokenCredentialsId", Description: "Jenkins 'Secret text' credentials ID containing token to authenticate to GitHub.", Type: "jenkins"},
|
||||
},
|
||||
Resources: []config.StepResources{
|
||||
{Name: "checkmarxOne", Type: "stash"},
|
||||
},
|
||||
Parameters: []config.StepParameters{
|
||||
{
|
||||
Name: "assignees",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "[]string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: []string{``},
|
||||
},
|
||||
{
|
||||
Name: "avoidDuplicateProjectScans",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "bool",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: true,
|
||||
},
|
||||
{
|
||||
Name: "filterPattern",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: `!**/node_modules/**, !**/.xmake/**, !**/*_test.go, !**/vendor/**/*.go, **/*.html, **/*.xml, **/*.go, **/*.py, **/*.js, **/*.scala, **/*.ts`,
|
||||
},
|
||||
{
|
||||
Name: "fullScanCycle",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: `5`,
|
||||
},
|
||||
{
|
||||
Name: "fullScansScheduled",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "bool",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: true,
|
||||
},
|
||||
{
|
||||
Name: "generatePdfReport",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "bool",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: true,
|
||||
},
|
||||
{
|
||||
Name: "githubApiUrl",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: `https://api.github.com`,
|
||||
},
|
||||
{
|
||||
Name: "githubToken",
|
||||
ResourceRef: []config.ResourceReference{
|
||||
{
|
||||
Name: "githubTokenCredentialsId",
|
||||
Type: "secret",
|
||||
},
|
||||
|
||||
{
|
||||
Name: "githubVaultSecretName",
|
||||
Type: "vaultSecret",
|
||||
Default: "github",
|
||||
},
|
||||
},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{{Name: "access_token"}},
|
||||
Default: os.Getenv("PIPER_githubToken"),
|
||||
},
|
||||
{
|
||||
Name: "incremental",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "bool",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: true,
|
||||
},
|
||||
{
|
||||
Name: "owner",
|
||||
ResourceRef: []config.ResourceReference{
|
||||
{
|
||||
Name: "commonPipelineEnvironment",
|
||||
Param: "github/owner",
|
||||
},
|
||||
},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{{Name: "githubOrg"}},
|
||||
Default: os.Getenv("PIPER_owner"),
|
||||
},
|
||||
{
|
||||
Name: "clientSecret",
|
||||
ResourceRef: []config.ResourceReference{
|
||||
{
|
||||
Name: "checkmarxOneCredentialsId",
|
||||
Param: "clientSecret",
|
||||
Type: "secret",
|
||||
},
|
||||
|
||||
{
|
||||
Name: "checkmarxOneVaultSecretName",
|
||||
Type: "vaultSecret",
|
||||
Default: "checkmarxOne",
|
||||
},
|
||||
},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: true,
|
||||
Aliases: []config.Alias{},
|
||||
Default: os.Getenv("PIPER_clientSecret"),
|
||||
},
|
||||
{
|
||||
Name: "APIKey",
|
||||
ResourceRef: []config.ResourceReference{
|
||||
{
|
||||
Name: "checkmarxOneAPIKey",
|
||||
Param: "APIKey",
|
||||
Type: "secret",
|
||||
},
|
||||
|
||||
{
|
||||
Name: "checkmarxOneVaultSecretName",
|
||||
Type: "vaultSecret",
|
||||
Default: "checkmarxOne",
|
||||
},
|
||||
},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: true,
|
||||
Aliases: []config.Alias{},
|
||||
Default: os.Getenv("PIPER_APIKey"),
|
||||
},
|
||||
{
|
||||
Name: "preset",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: os.Getenv("PIPER_preset"),
|
||||
},
|
||||
{
|
||||
Name: "languageMode",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: `multi`,
|
||||
},
|
||||
{
|
||||
Name: "projectCriticality",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: true,
|
||||
Aliases: []config.Alias{},
|
||||
Default: `3`,
|
||||
},
|
||||
{
|
||||
Name: "projectName",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: true,
|
||||
Aliases: []config.Alias{},
|
||||
Default: os.Getenv("PIPER_projectName"),
|
||||
},
|
||||
{
|
||||
Name: "branch",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: true,
|
||||
Aliases: []config.Alias{},
|
||||
Default: os.Getenv("PIPER_branch"),
|
||||
},
|
||||
{
|
||||
Name: "pullRequestName",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: os.Getenv("PIPER_pullRequestName"),
|
||||
},
|
||||
{
|
||||
Name: "repository",
|
||||
ResourceRef: []config.ResourceReference{
|
||||
{
|
||||
Name: "commonPipelineEnvironment",
|
||||
Param: "github/repository",
|
||||
},
|
||||
},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{{Name: "githubRepo"}},
|
||||
Default: os.Getenv("PIPER_repository"),
|
||||
},
|
||||
{
|
||||
Name: "serverUrl",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: true,
|
||||
Aliases: []config.Alias{},
|
||||
Default: os.Getenv("PIPER_serverUrl"),
|
||||
},
|
||||
{
|
||||
Name: "iamUrl",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: true,
|
||||
Aliases: []config.Alias{},
|
||||
Default: os.Getenv("PIPER_iamUrl"),
|
||||
},
|
||||
{
|
||||
Name: "tenant",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: true,
|
||||
Aliases: []config.Alias{},
|
||||
Default: os.Getenv("PIPER_tenant"),
|
||||
},
|
||||
{
|
||||
Name: "sourceEncoding",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: `1`,
|
||||
},
|
||||
{
|
||||
Name: "groupName",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: os.Getenv("PIPER_groupName"),
|
||||
},
|
||||
{
|
||||
Name: "applicationName",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: os.Getenv("PIPER_applicationName"),
|
||||
},
|
||||
{
|
||||
Name: "clientId",
|
||||
ResourceRef: []config.ResourceReference{
|
||||
{
|
||||
Name: "checkmarxOneCredentialsId",
|
||||
Param: "clientId",
|
||||
Type: "secret",
|
||||
},
|
||||
|
||||
{
|
||||
Name: "checkmarxOneVaultSecretName",
|
||||
Type: "vaultSecret",
|
||||
Default: "checkmarxOne",
|
||||
},
|
||||
},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: true,
|
||||
Aliases: []config.Alias{},
|
||||
Default: os.Getenv("PIPER_clientId"),
|
||||
},
|
||||
{
|
||||
Name: "verifyOnly",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "bool",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: false,
|
||||
},
|
||||
{
|
||||
Name: "vulnerabilityThresholdEnabled",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "bool",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: true,
|
||||
},
|
||||
{
|
||||
Name: "vulnerabilityThresholdHigh",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "int",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: 100,
|
||||
},
|
||||
{
|
||||
Name: "vulnerabilityThresholdMedium",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "int",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: 100,
|
||||
},
|
||||
{
|
||||
Name: "vulnerabilityThresholdLow",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "int",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: 10,
|
||||
},
|
||||
{
|
||||
Name: "vulnerabilityThresholdLowPerQuery",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "bool",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: false,
|
||||
},
|
||||
{
|
||||
Name: "vulnerabilityThresholdLowPerQueryMax",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "int",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: 10,
|
||||
},
|
||||
{
|
||||
Name: "vulnerabilityThresholdResult",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: `FAILURE`,
|
||||
},
|
||||
{
|
||||
Name: "vulnerabilityThresholdUnit",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "string",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: `percentage`,
|
||||
},
|
||||
{
|
||||
Name: "isOptimizedAndScheduled",
|
||||
ResourceRef: []config.ResourceReference{
|
||||
{
|
||||
Name: "commonPipelineEnvironment",
|
||||
Param: "custom/isOptimizedAndScheduled",
|
||||
},
|
||||
},
|
||||
Scope: []string{"PARAMETERS"},
|
||||
Type: "bool",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: false,
|
||||
},
|
||||
{
|
||||
Name: "createResultIssue",
|
||||
ResourceRef: []config.ResourceReference{
|
||||
{
|
||||
Name: "commonPipelineEnvironment",
|
||||
Param: "custom/isOptimizedAndScheduled",
|
||||
},
|
||||
},
|
||||
Scope: []string{"GENERAL", "PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "bool",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: false,
|
||||
},
|
||||
{
|
||||
Name: "convertToSarif",
|
||||
ResourceRef: []config.ResourceReference{},
|
||||
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
|
||||
Type: "bool",
|
||||
Mandatory: false,
|
||||
Aliases: []config.Alias{},
|
||||
Default: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
Outputs: config.StepOutputs{
|
||||
Resources: []config.StepResources{
|
||||
{
|
||||
Name: "influx",
|
||||
Type: "influx",
|
||||
Parameters: []map[string]interface{}{
|
||||
{"name": "step_data", "fields": []map[string]string{{"name": "checkmarxOne"}}},
|
||||
{"name": "checkmarxOne_data", "fields": []map[string]string{{"name": "high_issues"}, {"name": "high_not_false_postive"}, {"name": "high_not_exploitable"}, {"name": "high_confirmed"}, {"name": "high_urgent"}, {"name": "high_proposed_not_exploitable"}, {"name": "high_to_verify"}, {"name": "medium_issues"}, {"name": "medium_not_false_postive"}, {"name": "medium_not_exploitable"}, {"name": "medium_confirmed"}, {"name": "medium_urgent"}, {"name": "medium_proposed_not_exploitable"}, {"name": "medium_to_verify"}, {"name": "low_issues"}, {"name": "low_not_false_postive"}, {"name": "low_not_exploitable"}, {"name": "low_confirmed"}, {"name": "low_urgent"}, {"name": "low_proposed_not_exploitable"}, {"name": "low_to_verify"}, {"name": "information_issues"}, {"name": "information_not_false_postive"}, {"name": "information_not_exploitable"}, {"name": "information_confirmed"}, {"name": "information_urgent"}, {"name": "information_proposed_not_exploitable"}, {"name": "information_to_verify"}, {"name": "lines_of_code_scanned"}, {"name": "files_scanned"}, {"name": "initiator_name"}, {"name": "owner"}, {"name": "scan_id"}, {"name": "project_id"}, {"name": "projectName"}, {"name": "group"}, {"name": "group_full_path_on_report_date"}, {"name": "scan_start"}, {"name": "scan_time"}, {"name": "checkmarxOne_version"}, {"name": "scan_type"}, {"name": "preset"}, {"name": "deep_link"}, {"name": "report_creation_time"}}},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "reports",
|
||||
Type: "reports",
|
||||
Parameters: []map[string]interface{}{
|
||||
{"filePattern": "**/piper_checkmarxone_report.html", "type": "checkmarxone"},
|
||||
{"filePattern": "**/Cx1_SASTResults_*.xml", "type": "checkmarxone"},
|
||||
{"filePattern": "**/ScanReport.*", "type": "checkmarxone"},
|
||||
{"filePattern": "**/toolrun_checkmarxone_*.json", "type": "checkmarxone"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
return theMetaData
|
||||
}
|
20
cmd/checkmarxOneExecuteScan_generated_test.go
Normal file
20
cmd/checkmarxOneExecuteScan_generated_test.go
Normal file
@ -0,0 +1,20 @@
|
||||
//go:build unit
|
||||
// +build unit
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestCheckmarxOneExecuteScanCommand(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testCmd := CheckmarxOneExecuteScanCommand()
|
||||
|
||||
// only high level testing performed - details are tested in step generation procedure
|
||||
assert.Equal(t, "checkmarxOneExecuteScan", testCmd.Use, "command name incorrect")
|
||||
|
||||
}
|
314
cmd/checkmarxOneExecuteScan_test.go
Normal file
314
cmd/checkmarxOneExecuteScan_test.go
Normal file
@ -0,0 +1,314 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/checkmarxone"
|
||||
"github.com/SAP/jenkins-library/pkg/piperutils"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type checkmarxOneSystemMock struct {
|
||||
response interface{}
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) DownloadReport(reportID string) ([]byte, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetReportStatus(reportID string) (checkmarxOne.ReportStatus, error) {
|
||||
return checkmarxOne.ReportStatus{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) RequestNewReport(scanID, projectID, branch, reportType string) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) CreateApplication(appname string) (checkmarxOne.Application, error) {
|
||||
return checkmarxOne.Application{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetApplicationByName(appname string) (checkmarxOne.Application, error) {
|
||||
return checkmarxOne.Application{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) UpdateApplication(app *checkmarxOne.Application) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetScan(scanID string) (checkmarxOne.Scan, error) {
|
||||
return checkmarxOne.Scan{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetScanMetadata(scanID string) (checkmarxOne.ScanMetadata, error) {
|
||||
return checkmarxOne.ScanMetadata{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetScanResults(scanID string, limit uint64) ([]checkmarxOne.ScanResult, error) {
|
||||
return []checkmarxOne.ScanResult{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetScanSummary(scanID string) (checkmarxOne.ScanSummary, error) {
|
||||
return checkmarxOne.ScanSummary{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetResultsPredicates(SimilarityID int64, ProjectID string) ([]checkmarxOne.ResultsPredicates, error) {
|
||||
return []checkmarxOne.ResultsPredicates{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetScanWorkflow(scanID string) ([]checkmarxOne.WorkflowLog, error) {
|
||||
return []checkmarxOne.WorkflowLog{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetLastScans(projectID string, limit int) ([]checkmarxOne.Scan, error) {
|
||||
return []checkmarxOne.Scan{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetLastScansByStatus(projectID string, limit int, status []string) ([]checkmarxOne.Scan, error) {
|
||||
return []checkmarxOne.Scan{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) ScanProject(projectID, sourceUrl, branch, scanType string, settings []checkmarxOne.ScanConfiguration) (checkmarxOne.Scan, error) {
|
||||
return checkmarxOne.Scan{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) ScanProjectZip(projectID, sourceUrl, branch string, settings []checkmarxOne.ScanConfiguration) (checkmarxOne.Scan, error) {
|
||||
return checkmarxOne.Scan{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) ScanProjectGit(projectID, repoUrl, branch string, settings []checkmarxOne.ScanConfiguration) (checkmarxOne.Scan, error) {
|
||||
return checkmarxOne.Scan{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) UploadProjectSourceCode(projectID string, zipFile string) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) CreateProject(projectName string, groupIDs []string) (checkmarxOne.Project, error) {
|
||||
return checkmarxOne.Project{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetPresets() ([]checkmarxOne.Preset, error) {
|
||||
return []checkmarxOne.Preset{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetProjectByID(projectID string) (checkmarxOne.Project, error) {
|
||||
return checkmarxOne.Project{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetProjectsByName(projectName string) ([]checkmarxOne.Project, error) {
|
||||
str := `[
|
||||
{
|
||||
"id": "3cb99ae5-5245-4cf7-83aa-9b517b8c1c57",
|
||||
"name": "ssba-github",
|
||||
"createdAt": "2023-03-21T16:48:33.224554Z",
|
||||
"updatedAt": "2023-03-21T16:48:33.224554Z",
|
||||
"groups": [
|
||||
"af361bd1-e478-40f6-a4fb-d479828d5998"
|
||||
],
|
||||
"tags": {},
|
||||
"repoUrl": "",
|
||||
"mainBranch": "",
|
||||
"criticality": 3
|
||||
},
|
||||
{
|
||||
"id": "3cb99ae5-5245-4cf7-83aa-9b517b8c1c58",
|
||||
"name": "ssba-local",
|
||||
"createdAt": "2023-03-21T16:48:33.224554Z",
|
||||
"updatedAt": "2023-03-21T16:48:33.224554Z",
|
||||
"groups": [
|
||||
"af361bd1-e478-40f6-a4fb-d479828d5998"
|
||||
],
|
||||
"tags": {},
|
||||
"repoUrl": "",
|
||||
"mainBranch": "",
|
||||
"criticality": 3
|
||||
},
|
||||
{
|
||||
"id": "3cb99ae5-5245-4cf7-83aa-9b517b8c1c59",
|
||||
"name": "ssba-zip",
|
||||
"createdAt": "2023-03-21T16:48:33.224554Z",
|
||||
"updatedAt": "2023-03-21T16:48:33.224554Z",
|
||||
"groups": [
|
||||
"af361bd1-e478-40f6-a4fb-d479828d5998"
|
||||
],
|
||||
"tags": {},
|
||||
"repoUrl": "",
|
||||
"mainBranch": "",
|
||||
"criticality": 3
|
||||
}
|
||||
]`
|
||||
projects := []checkmarxOne.Project{}
|
||||
_ = json.Unmarshal([]byte(str), &projects)
|
||||
|
||||
return projects, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetProjectsByNameAndGroup(projectName, groupID string) ([]checkmarxOne.Project, error) {
|
||||
return []checkmarxOne.Project{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetProjects() ([]checkmarxOne.Project, error) {
|
||||
return []checkmarxOne.Project{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetQueries() ([]checkmarxOne.Query, error) {
|
||||
return []checkmarxOne.Query{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetGroups() ([]checkmarxOne.Group, error) {
|
||||
str := `
|
||||
[
|
||||
{
|
||||
"id": "d857c923-cf53-48bc-bfe4-163f66ed7b39",
|
||||
"name": "Group1"
|
||||
},
|
||||
{
|
||||
"id": "a8009bce-c24f-4edc-a931-06eb91ace2f5",
|
||||
"name": "Group2"
|
||||
},
|
||||
{
|
||||
"id": "a9ef684c-a61b-4647-9c49-363efc3879d7",
|
||||
"name": "01100035870000224721"
|
||||
},
|
||||
{
|
||||
"id": "3078680e-d796-4607-8e96-0d658eff799a",
|
||||
"name": "Group3"
|
||||
}
|
||||
]
|
||||
`
|
||||
groups := []checkmarxOne.Group{}
|
||||
_ = json.Unmarshal([]byte(str), &groups)
|
||||
|
||||
return groups, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetGroupByName(groupName string) (checkmarxOne.Group, error) {
|
||||
groups, err := sys.GetGroups()
|
||||
var group checkmarxOne.Group
|
||||
if err != nil {
|
||||
return group, err
|
||||
}
|
||||
|
||||
for _, g := range groups {
|
||||
if g.Name == groupName {
|
||||
return g, nil
|
||||
}
|
||||
}
|
||||
|
||||
return group, errors.New(fmt.Sprintf("No group matching %v", groupName))
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetGroupByID(groupID string) (checkmarxOne.Group, error) {
|
||||
return checkmarxOne.Group{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) SetProjectBranch(projectID, branch string, allowOverride bool) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) SetProjectPreset(projectID, presetName string, allowOverride bool) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) SetProjectLanguageMode(projectID, languageMode string, allowOverride bool) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) SetProjectFileFilter(projectID, filter string, allowOverride bool) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) GetProjectConfiguration(projectID string) ([]checkmarxOne.ProjectConfigurationSetting, error) {
|
||||
return []checkmarxOne.ProjectConfigurationSetting{}, nil
|
||||
}
|
||||
|
||||
func (sys *checkmarxOneSystemMock) UpdateProjectConfiguration(projectID string, settings []checkmarxOne.ProjectConfigurationSetting) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type checkmarxOneExecuteScanHelperMock struct {
|
||||
ctx context.Context
|
||||
config checkmarxOneExecuteScanOptions
|
||||
sys *checkmarxOne.SystemInstance
|
||||
influx *checkmarxOneExecuteScanInflux
|
||||
utils checkmarxOneExecuteScanUtils
|
||||
Project *checkmarxOne.Project
|
||||
Group *checkmarxOne.Group
|
||||
App *checkmarxOne.Application
|
||||
reports []piperutils.Path
|
||||
}
|
||||
|
||||
func TestGetProjectByName(t *testing.T) {
|
||||
t.Parallel()
|
||||
sys := &checkmarxOneSystemMock{}
|
||||
t.Run("project name not found", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
options := checkmarxOneExecuteScanOptions{ProjectName: "ssba_notexist", VulnerabilityThresholdUnit: "absolute", FullScanCycle: "2", Incremental: true, FullScansScheduled: true, Preset: "CheckmarxDefault", GroupName: "TestGroup", VulnerabilityThresholdEnabled: true, GeneratePdfReport: true, APIKey: "testAPIKey", ServerURL: "testURL", IamURL: "testIamURL", Tenant: "testTenant"}
|
||||
|
||||
cx1sh := checkmarxOneExecuteScanHelper{nil, options, sys, nil, nil, nil, nil, nil, nil}
|
||||
|
||||
_, err := cx1sh.GetProjectByName()
|
||||
|
||||
assert.Contains(t, fmt.Sprint(err), "project not found")
|
||||
})
|
||||
t.Run("project name exists", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
options := checkmarxOneExecuteScanOptions{ProjectName: "ssba-github", VulnerabilityThresholdUnit: "absolute", FullScanCycle: "2", Incremental: true, FullScansScheduled: true, Preset: "CheckmarxDefault", GroupName: "TestGroup", VulnerabilityThresholdEnabled: true, GeneratePdfReport: true, APIKey: "testAPIKey", ServerURL: "testURL", IamURL: "testIamURL", Tenant: "testTenant"}
|
||||
|
||||
cx1sh := checkmarxOneExecuteScanHelper{nil, options, sys, nil, nil, nil, nil, nil, nil}
|
||||
|
||||
project, err := cx1sh.GetProjectByName()
|
||||
assert.NoError(t, err, "Error occurred but none expected")
|
||||
assert.Equal(t, project.ProjectID, "3cb99ae5-5245-4cf7-83aa-9b517b8c1c57")
|
||||
assert.Equal(t, project.Name, "ssba-github")
|
||||
assert.Equal(t, project.Groups[0], "af361bd1-e478-40f6-a4fb-d479828d5998")
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetGroup(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
sys := &checkmarxOneSystemMock{}
|
||||
|
||||
t.Run("group ID and group name is not provided", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
options := checkmarxOneExecuteScanOptions{ProjectName: "ssba", VulnerabilityThresholdUnit: "absolute", FullScanCycle: "2", Incremental: true, FullScansScheduled: true, Preset: "CheckmarxDefault" /*GroupName: "NotProvided",*/, VulnerabilityThresholdEnabled: true, GeneratePdfReport: true, APIKey: "testAPIKey", ServerURL: "testURL", IamURL: "testIamURL", Tenant: "testTenant"}
|
||||
|
||||
cx1sh := checkmarxOneExecuteScanHelper{nil, options, sys, nil, nil, nil, nil, nil, nil}
|
||||
_, err := cx1sh.GetGroup()
|
||||
assert.Contains(t, fmt.Sprint(err), "No group ID or group name provided")
|
||||
})
|
||||
|
||||
t.Run("group name not found", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
options := checkmarxOneExecuteScanOptions{ProjectName: "ssba", VulnerabilityThresholdUnit: "absolute", FullScanCycle: "2", Incremental: true, FullScansScheduled: true, Preset: "CheckmarxDefault", GroupName: "GroupNotExist", VulnerabilityThresholdEnabled: true, GeneratePdfReport: true, APIKey: "testAPIKey", ServerURL: "testURL", IamURL: "testIamURL", Tenant: "testTenant"}
|
||||
|
||||
cx1sh := checkmarxOneExecuteScanHelper{nil, options, sys, nil, nil, nil, nil, nil, nil}
|
||||
|
||||
_, err := cx1sh.GetGroup()
|
||||
assert.Contains(t, fmt.Sprint(err), "Failed to get Checkmarx One group by Name GroupNotExist: No group matching GroupNotExist")
|
||||
})
|
||||
|
||||
t.Run("group name exists", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
options := checkmarxOneExecuteScanOptions{ProjectName: "ssba-github", VulnerabilityThresholdUnit: "absolute", FullScanCycle: "2", Incremental: true, FullScansScheduled: true, Preset: "CheckmarxDefault", GroupName: "Group2", VulnerabilityThresholdEnabled: true, GeneratePdfReport: true, APIKey: "testAPIKey", ServerURL: "testURL", IamURL: "testIamURL", Tenant: "testTenant"}
|
||||
|
||||
cx1sh := checkmarxOneExecuteScanHelper{nil, options, sys, nil, nil, nil, nil, nil, nil}
|
||||
|
||||
group, err := cx1sh.GetGroup()
|
||||
assert.NoError(t, err, "Error occurred but none expected")
|
||||
assert.Equal(t, group.GroupID, "a8009bce-c24f-4edc-a931-06eb91ace2f5")
|
||||
assert.Equal(t, group.Name, "Group2")
|
||||
})
|
||||
}
|
@ -40,6 +40,7 @@ func GetAllStepMetadata() map[string]config.StepData {
|
||||
"azureBlobUpload": azureBlobUploadMetadata(),
|
||||
"batsExecuteTests": batsExecuteTestsMetadata(),
|
||||
"checkmarxExecuteScan": checkmarxExecuteScanMetadata(),
|
||||
"checkmarxOneExecuteScan": checkmarxOneExecuteScanMetadata(),
|
||||
"cloudFoundryCreateService": cloudFoundryCreateServiceMetadata(),
|
||||
"cloudFoundryCreateServiceKey": cloudFoundryCreateServiceKeyMetadata(),
|
||||
"cloudFoundryCreateSpace": cloudFoundryCreateSpaceMetadata(),
|
||||
|
@ -111,6 +111,7 @@ func Execute() {
|
||||
rootCmd.AddCommand(AbapEnvironmentCreateTagCommand())
|
||||
rootCmd.AddCommand(AbapEnvironmentCreateSystemCommand())
|
||||
rootCmd.AddCommand(CheckmarxExecuteScanCommand())
|
||||
rootCmd.AddCommand(CheckmarxOneExecuteScanCommand())
|
||||
rootCmd.AddCommand(FortifyExecuteScanCommand())
|
||||
rootCmd.AddCommand(CodeqlExecuteScanCommand())
|
||||
rootCmd.AddCommand(CredentialdiggerScanCommand())
|
||||
|
7
documentation/docs/steps/checkmarxOneExecuteScan.md
Normal file
7
documentation/docs/steps/checkmarxOneExecuteScan.md
Normal file
@ -0,0 +1,7 @@
|
||||
# ${docGenStepName}
|
||||
|
||||
## ${docGenDescription}
|
||||
|
||||
## ${docGenParameters}
|
||||
|
||||
## ${docGenConfiguration}
|
@ -83,6 +83,7 @@ nav:
|
||||
- batsExecuteTests: steps/batsExecuteTests.md
|
||||
- buildExecute: steps/buildExecute.md
|
||||
- checkmarxExecuteScan: steps/checkmarxExecuteScan.md
|
||||
- checkmarxOneExecuteScan: steps/checkmarxOneExecuteScan.md
|
||||
- checksPublishResults: steps/checksPublishResults.md
|
||||
- cfManifestSubstituteVariables: steps/cfManifestSubstituteVariables.md
|
||||
- cloudFoundryCreateService: steps/cloudFoundryCreateService.md
|
||||
|
1316
pkg/checkmarxone/checkmarxone.go
Normal file
1316
pkg/checkmarxone/checkmarxone.go
Normal file
File diff suppressed because it is too large
Load Diff
296
pkg/checkmarxone/checkmarxone_test.go
Normal file
296
pkg/checkmarxone/checkmarxone_test.go
Normal file
@ -0,0 +1,296 @@
|
||||
package checkmarxOne
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
piperHttp "github.com/SAP/jenkins-library/pkg/http"
|
||||
"github.com/SAP/jenkins-library/pkg/log"
|
||||
"github.com/sirupsen/logrus"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
type senderMock struct {
|
||||
token string
|
||||
httpMethod string
|
||||
httpStatusCode int
|
||||
urlCalled string
|
||||
requestBody string
|
||||
responseBody string
|
||||
header http.Header
|
||||
logger *logrus.Entry
|
||||
errorExp bool
|
||||
}
|
||||
|
||||
func (sm *senderMock) SendRequest(method, url string, body io.Reader, header http.Header, cookies []*http.Cookie) (*http.Response, error) {
|
||||
if sm.errorExp {
|
||||
return &http.Response{}, errors.New("Provoked technical error")
|
||||
}
|
||||
sm.httpMethod = method
|
||||
sm.urlCalled = url
|
||||
sm.header = header
|
||||
if body != nil {
|
||||
buf := new(bytes.Buffer)
|
||||
buf.ReadFrom(body)
|
||||
sm.requestBody = buf.String()
|
||||
}
|
||||
var httpError error
|
||||
if sm.httpStatusCode > 399 {
|
||||
httpError = fmt.Errorf("http error %v", sm.httpStatusCode)
|
||||
}
|
||||
return &http.Response{StatusCode: sm.httpStatusCode, Body: ioutil.NopCloser(strings.NewReader(sm.responseBody))}, httpError
|
||||
}
|
||||
func (sm *senderMock) UploadFile(url, file, fieldName string, header http.Header, cookies []*http.Cookie, uploadType string) (*http.Response, error) {
|
||||
sm.httpMethod = http.MethodPost
|
||||
sm.urlCalled = url
|
||||
sm.header = header
|
||||
return &http.Response{StatusCode: sm.httpStatusCode, Body: ioutil.NopCloser(bytes.NewReader([]byte(sm.responseBody)))}, nil
|
||||
}
|
||||
func (sm *senderMock) UploadRequest(method, url, file, fieldName string, header http.Header, cookies []*http.Cookie, uploadType string) (*http.Response, error) {
|
||||
sm.httpMethod = http.MethodPost
|
||||
sm.urlCalled = url
|
||||
sm.header = header
|
||||
return &http.Response{StatusCode: sm.httpStatusCode, Body: ioutil.NopCloser(bytes.NewReader([]byte(sm.responseBody)))}, nil
|
||||
}
|
||||
func (sm *senderMock) Upload(_ piperHttp.UploadRequestData) (*http.Response, error) {
|
||||
return &http.Response{}, fmt.Errorf("not implemented")
|
||||
}
|
||||
func (sm *senderMock) SetOptions(opts piperHttp.ClientOptions) {
|
||||
sm.token = opts.Token
|
||||
}
|
||||
|
||||
func TestSendRequest(t *testing.T) {
|
||||
logger := log.Entry().WithField("package", "SAP/jenkins-library/pkg/checkmarxOne_test")
|
||||
opts := piperHttp.ClientOptions{}
|
||||
t.Run("test success", func(t *testing.T) {
|
||||
myTestClient := senderMock{responseBody: `{"some": "test"}`, httpStatusCode: 200}
|
||||
sys := SystemInstance{serverURL: "https://cx1.server.com", iamURL: "https://cx1iam.server.com", tenant: "tenant", client: &myTestClient, logger: logger}
|
||||
myTestClient.SetOptions(opts)
|
||||
|
||||
_, err := sendRequest(&sys, "GET", "/test", nil, nil, []int{})
|
||||
|
||||
assert.NoError(t, err, "Error occurred but none expected")
|
||||
assert.Equal(t, "https://cx1.server.com/api/test", myTestClient.urlCalled, "Called url incorrect")
|
||||
})
|
||||
|
||||
t.Run("test error", func(t *testing.T) {
|
||||
myTestClient := senderMock{responseBody: `{"some": "test"}`, httpStatusCode: 400}
|
||||
sys := SystemInstance{serverURL: "https://cx1.server.com", iamURL: "https://cx1iam.server.com", tenant: "tenant", client: &myTestClient, logger: logger}
|
||||
myTestClient.SetOptions(opts)
|
||||
_, err := sendRequest(&sys, "GET", "/test", nil, nil, []int{})
|
||||
|
||||
assert.Error(t, err, "Error expected but none occurred")
|
||||
assert.Equal(t, "https://cx1.server.com/api/test", myTestClient.urlCalled, "Called url incorrect")
|
||||
})
|
||||
|
||||
t.Run("test technical error", func(t *testing.T) {
|
||||
myTestClient := senderMock{responseBody: `{"some": "test"}`, httpStatusCode: 400}
|
||||
sys := SystemInstance{serverURL: "https://cx1.server.com", iamURL: "https://cx1iam.server.com", tenant: "tenant", client: &myTestClient, logger: logger}
|
||||
myTestClient.SetOptions(opts)
|
||||
_, err := sendRequest(&sys, "error", "/test", nil, nil, []int{})
|
||||
|
||||
assert.Error(t, err, "Error expected but none occurred")
|
||||
})
|
||||
}
|
||||
|
||||
func TestSendRequestInternal(t *testing.T) {
|
||||
logger := log.Entry().WithField("package", "SAP/jenkins-library/pkg/checkmarxOne_test")
|
||||
opts := piperHttp.ClientOptions{}
|
||||
|
||||
t.Run("test accepted error", func(t *testing.T) {
|
||||
myTestClient := senderMock{responseBody: `{"some": "test"}`, httpStatusCode: 404}
|
||||
sys := SystemInstance{serverURL: "https://cx1.server.com", iamURL: "https://cx1iam.server.com", tenant: "tenant", client: &myTestClient, logger: logger}
|
||||
myTestClient.SetOptions(opts)
|
||||
_, err := sendRequestInternal(&sys, "GET", "/test", nil, nil, []int{404})
|
||||
|
||||
assert.NoError(t, err, "No error expected but error occurred")
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetOAuthToken(t *testing.T) {
|
||||
logger := log.Entry().WithField("package", "SAP/jenkins-library/pkg/checkmarxOne_test")
|
||||
opts := piperHttp.ClientOptions{}
|
||||
t.Run("test success", func(t *testing.T) {
|
||||
myTestClient := senderMock{responseBody: `{"token_type":"Bearer","access_token":"abcd12345","expires_in":7045634}`, httpStatusCode: 200}
|
||||
sys, _ := NewSystemInstance(&myTestClient, "https://cx1.server.com", "https://cx1iam.server.com", "tenant", "", "client", "secret")
|
||||
myTestClient.SetOptions(opts)
|
||||
|
||||
token, err := sys.getOAuth2Token()
|
||||
|
||||
assert.NoError(t, err, "Error occurred but none expected")
|
||||
assert.Equal(t, "https://cx1iam.server.com/auth/realms/tenant/protocol/openid-connect/token", myTestClient.urlCalled, "Called url incorrect")
|
||||
assert.Equal(t, "Bearer abcd12345", token, "Token incorrect")
|
||||
assert.Equal(t, "client_id=client&client_secret=secret&grant_type=client_credentials", myTestClient.requestBody, "Request body incorrect")
|
||||
})
|
||||
|
||||
t.Run("test authentication failure", func(t *testing.T) {
|
||||
myTestClient := senderMock{responseBody: `{}`, httpStatusCode: 400}
|
||||
sys := SystemInstance{serverURL: "https://cx1.server.com", iamURL: "https://cx1iam.server.com", tenant: "tenant", client: &myTestClient, logger: logger}
|
||||
myTestClient.SetOptions(opts)
|
||||
|
||||
_, err := sys.getOAuth2Token()
|
||||
|
||||
assert.Error(t, err, "Error expected but none occurred")
|
||||
assert.Equal(t, "https://cx1iam.server.com/auth/realms/tenant/protocol/openid-connect/token", myTestClient.urlCalled, "Called url incorrect")
|
||||
})
|
||||
|
||||
t.Run("test new system", func(t *testing.T) {
|
||||
myTestClient := senderMock{responseBody: `{"token_type":"Bearer","access_token":"abcd12345","expires_in":7045634}`, httpStatusCode: 200}
|
||||
_, err := NewSystemInstance(&myTestClient, "https://cx1.server.com", "https://cx1iam.server.com", "tenant", "", "client", "secret")
|
||||
|
||||
assert.NoError(t, err, "Error occurred but none expected")
|
||||
assert.Equal(t, "https://cx1iam.server.com/auth/realms/tenant/protocol/openid-connect/token", myTestClient.urlCalled, "Called url incorrect")
|
||||
assert.Equal(t, "Bearer abcd12345", myTestClient.token, "Token incorrect")
|
||||
})
|
||||
|
||||
t.Run("test technical error", func(t *testing.T) {
|
||||
myTestClient := senderMock{responseBody: `{}`, httpStatusCode: 400}
|
||||
sys := SystemInstance{serverURL: "https://cx1.server.com", iamURL: "https://cx1iam.server.com", tenant: "tenant", client: &myTestClient, logger: logger}
|
||||
myTestClient.SetOptions(opts)
|
||||
myTestClient.errorExp = true
|
||||
|
||||
_, err := sys.getOAuth2Token()
|
||||
|
||||
assert.Error(t, err, "Error expected but none occurred")
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetGroups(t *testing.T) {
|
||||
logger := log.Entry().WithField("package", "SAP/jenkins-library/pkg/checkmarxOne_test")
|
||||
opts := piperHttp.ClientOptions{}
|
||||
t.Run("test success", func(t *testing.T) {
|
||||
myTestClient := senderMock{responseBody: `[{"id":"be82031b-a75c-4fc0-894b-fff4deab2854","name":"Group1","path":"/Group1","subGroups":[]},{"id":"b368988c-b124-4151-b507-c8fcad501165","name":"Group2","path":"/Group2","subGroups":[]}]`, httpStatusCode: 200}
|
||||
sys := SystemInstance{serverURL: "https://cx1.server.com", iamURL: "https://cx1iam.server.com", tenant: "tenant", client: &myTestClient, logger: logger}
|
||||
myTestClient.SetOptions(opts)
|
||||
|
||||
groups, err := sys.GetGroups()
|
||||
assert.NoError(t, err, "Error occurred but none expected")
|
||||
|
||||
assert.Equal(t, "https://cx1iam.server.com/auth/realms/tenant/pip/groups", myTestClient.urlCalled, "Called url incorrect")
|
||||
assert.Equal(t, 2, len(groups), "Number of Groups incorrect")
|
||||
assert.Equal(t, "Group1", groups[0].Name, "Group name 1 incorrect")
|
||||
assert.Equal(t, "Group2", groups[1].Name, "Group name 2 incorrect")
|
||||
|
||||
t.Run("test filter groups by name", func(t *testing.T) {
|
||||
group2, _ := sys.GetGroupByName("Group2")
|
||||
assert.Equal(t, "Group2", group2.Name, "Group name incorrect")
|
||||
assert.Equal(t, "b368988c-b124-4151-b507-c8fcad501165", group2.GroupID, "Group id incorrect")
|
||||
})
|
||||
|
||||
t.Run("test Filter groups by ID", func(t *testing.T) {
|
||||
group1, _ := sys.GetGroupByID("be82031b-a75c-4fc0-894b-fff4deab2854")
|
||||
assert.Equal(t, "Group1", group1.Name, "Group name incorrect")
|
||||
assert.Equal(t, "be82031b-a75c-4fc0-894b-fff4deab2854", group1.GroupID, "Group id incorrect")
|
||||
})
|
||||
|
||||
t.Run("test fail Filter groups by name", func(t *testing.T) {
|
||||
group, err := sys.GetGroupByName("Group")
|
||||
assert.Equal(t, "", group.Name, "Group name incorrect")
|
||||
assert.Contains(t, fmt.Sprint(err), "No group matching")
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("test technical error", func(t *testing.T) {
|
||||
myTestClient := senderMock{responseBody: `[{"id":"1", "fullName":"Group1"}, {"id":"2", "fullName":"Group2"}, {"id":"3", "fullName":"Group3"}]`, httpStatusCode: 200}
|
||||
sys := SystemInstance{serverURL: "https://cx1.server.com", iamURL: "https://cx1iam.server.com", tenant: "tenant", client: &myTestClient, logger: logger}
|
||||
myTestClient.SetOptions(opts)
|
||||
myTestClient.errorExp = true
|
||||
|
||||
groups, _ := sys.GetGroups()
|
||||
|
||||
assert.Equal(t, 0, len(groups), "Error expected but none occurred")
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetScanMetadata(t *testing.T) {
|
||||
logger := log.Entry().WithField("package", "SAP/jenkins-library/pkg/checkmarxOne_test")
|
||||
opts := piperHttp.ClientOptions{}
|
||||
t.Run("test success", func(t *testing.T) {
|
||||
myTestClient := senderMock{responseBody: `{"scanId":"03d66397-36df-40b5-8976-f38bcce695a7","projectId":"eac4dc3b-4bbf-4d04-87e5-3b3cedae38fb","loc":158,"fileCount":39,"isIncremental":false,"isIncrementalCanceled":false,"queryPreset":"Checkmarx Default"}`, httpStatusCode: 200}
|
||||
sys := SystemInstance{serverURL: "https://cx1.server.com", iamURL: "https://cx1iam.server.com", tenant: "tenant", client: &myTestClient, logger: logger}
|
||||
myTestClient.SetOptions(opts)
|
||||
|
||||
scanmeta, err := sys.GetScanMetadata("03d66397-36df-40b5-8976-f38bcce695a7")
|
||||
assert.NoError(t, err, "Error occurred but none expected")
|
||||
|
||||
assert.Equal(t, "03d66397-36df-40b5-8976-f38bcce695a7", scanmeta.ScanID, "ScanID is incorrect")
|
||||
assert.Equal(t, "eac4dc3b-4bbf-4d04-87e5-3b3cedae38fb", scanmeta.ProjectID, "ProjectID is incorrect")
|
||||
assert.Equal(t, 158, scanmeta.LOC, "LOC is incorrect")
|
||||
assert.Equal(t, 39, scanmeta.FileCount, "FileCount is incorrect")
|
||||
assert.Equal(t, false, scanmeta.IsIncremental, "IsIncremental is incorrect")
|
||||
assert.Equal(t, false, scanmeta.IsIncrementalCanceled, "IsIncrementalCanceled is incorrect")
|
||||
assert.Equal(t, "Checkmarx Default", scanmeta.PresetName, "PresetName is incorrect")
|
||||
})
|
||||
|
||||
t.Run("test technical error", func(t *testing.T) {
|
||||
myTestClient := senderMock{httpStatusCode: 200}
|
||||
sys := SystemInstance{serverURL: "https://cx1.server.com", iamURL: "https://cx1iam.server.com", tenant: "tenant", client: &myTestClient, logger: logger}
|
||||
myTestClient.SetOptions(opts)
|
||||
myTestClient.errorExp = true
|
||||
|
||||
_, err := sys.GetScanMetadata("03d66397-36df-40b5-8976-f38bcce695a7")
|
||||
assert.Contains(t, fmt.Sprint(err), "Provoked technical error")
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetScan(t *testing.T) {
|
||||
logger := log.Entry().WithField("package", "SAP/jenkins-library/pkg/checkmarxOne_test")
|
||||
opts := piperHttp.ClientOptions{}
|
||||
t.Run("test success", func(t *testing.T) {
|
||||
myTestClient := senderMock{responseBody: `{"id":"7343f9f5-7633-40d5-b000-0a7a3c2c432e","status":"Completed","statusDetails":[{"name":"general","status":"Completed","details":""},{"name":"sast","status":"Completed","details":"","loc":2148}],"branch":"master","createdAt":"2023-03-31T08:35:56.412514Z","updatedAt":"2023-03-31T08:36:53.526569Z","projectId":"e7a7704c-4bfe-4054-9137-d32c156ca641","projectName":"fullScanCycle","userAgent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:105.0) Gecko/20100101 Firefox/105.0","initiator":"user@sap.com","tags":{},"metadata":{"id":"7343f9f5-7633-40d5-b000-0a7a3c2c432e","type":"upload","Handler":{"UploadHandler":{"branch":"master","upload_url":"https://cx1.server.com/storage/st-gcp-9k90xv-uploads/b68ee5ba-3657-424f-9b68-05452300d5d7/271b80e3-b0d4-4be6-9f66-9469126b624f?X-Amz-Algorithm=AWS4-HMAC-SHA256\u0026X-Amz-Credential=ast%2F20230331%2Fus-east-1%2Fs3%2Faws4_request\u0026X-Amz-Date=20230331T083556Z\u0026X-Amz-Expires=86400\u0026X-Amz-Signature=94d74276d93945c37243f7ccec3d1e30b15d4d6ec79a869d3d9e46622fd89acd\u0026X-Amz-SignedHeaders=host"}},"configs":[{"type":"sast","value":{"presetName":"Checkmarx Default","incremental":"true","languageMode":"primary"}}],"project":{"id":"e7a7704c-4bfe-4054-9137-d32c156ca641"},"created_at":{"nanos":387074846,"seconds":1680251756}},"engines":["sast"],"sourceType":"zip","sourceOrigin":"Mozilla"}`, httpStatusCode: 200}
|
||||
sys := SystemInstance{serverURL: "https://cx1.server.com", iamURL: "https://cx1iam.server.com", tenant: "tenant", client: &myTestClient, logger: logger}
|
||||
myTestClient.SetOptions(opts)
|
||||
|
||||
scan, err := sys.GetScan("7343f9f5-7633-40d5-b000-0a7a3c2c432e")
|
||||
assert.NoError(t, err, "Error occurred but none expected")
|
||||
assert.Equal(t, "7343f9f5-7633-40d5-b000-0a7a3c2c432e", scan.ScanID, "ScanID is incorrect")
|
||||
assert.Equal(t, "master", scan.Branch, "Branch is incorrect")
|
||||
assert.Equal(t, 2, len(scan.StatusDetails), "StatusDetails is incorrect")
|
||||
})
|
||||
|
||||
t.Run("test technical error", func(t *testing.T) {
|
||||
myTestClient := senderMock{httpStatusCode: 200}
|
||||
sys := SystemInstance{serverURL: "https://cx1.server.com", iamURL: "https://cx1iam.server.com", tenant: "tenant", client: &myTestClient, logger: logger}
|
||||
myTestClient.SetOptions(opts)
|
||||
myTestClient.errorExp = true
|
||||
|
||||
_, err := sys.GetScan("7343f9f5-7633-40d5-b000-0a7a3c2c432e")
|
||||
assert.Contains(t, fmt.Sprint(err), "Provoked technical error")
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetApplicationByName(t *testing.T) {
|
||||
logger := log.Entry().WithField("package", "SAP/jenkins-library/pkg/checkmarxOne_test")
|
||||
opts := piperHttp.ClientOptions{}
|
||||
t.Run("test success", func(t *testing.T) {
|
||||
myTestClient := senderMock{responseBody: `{"totalCount":6,"filteredTotalCount":6,"applications":[{"id":"8cf83fcf-ac61-4e32-b988-47cde3cc818c","name":"test_dev2","description":"","criticality":3,"rules":[],"projectIds":[],"tags":{},"createdAt":"2023-04-06T13:57:00.082719Z","updatedAt":"2023-04-06T13:57:00.082719Z"},{"id":"dee8573b-c58e-4945-a97c-a66884380093","name":"test_dev1","description":"","criticality":3,"rules":[],"projectIds":[],"tags":{},"createdAt":"2023-04-06T13:44:32.212065Z","updatedAt":"2023-04-06T13:44:32.212065Z"},{"id":"0ff00c77-b7e6-4d27-bd88-9e14520e06e6","name":"test_dev","description":"","criticality":3,"rules":[],"projectIds":[],"tags":{},"createdAt":"2023-04-06T13:24:36.459375Z","updatedAt":"2023-04-06T13:24:36.459375Z"},{"id":"5d482cfc-27ae-43e1-ba45-68d557df8423","name":"SSBA","description":"","criticality":3,"rules":[{"id":"e00a5b13-93d0-4128-8c32-9d6a46db85b0","type":"project.name.in","value":"ssba-zip;ssba-git;cx_cli_ssba_test"}],"projectIds":["2d75e828-6db9-4cfa-87e7-b953ad59ea25","f00a9d02-b552-4461-835a-c701e30957d8","f61cf5f0-fa91-4563-b87b-8154a4fd2408"],"tags":{},"createdAt":"2023-03-15T13:44:31.831175Z","updatedAt":"2023-03-15T13:44:31.831175Z"},{"id":"68f2f996-e7eb-495e-8829-8996241eb84e","name":"test_1","description":"","criticality":3,"rules":[{"id":"3a08b06e-a76a-4a48-bcde-1b43b9890f31","type":"project.name.in","value":"OAuth-CLI-test;test-piper-1;cx_cli_ssba_test"}],"projectIds":["2d75e828-6db9-4cfa-87e7-b953ad59ea25","db82605a-26e4-4693-a59c-ec1d584840d0","31c44a7c-0c68-492a-9921-052d336e5d5a"],"tags":{"TEST_APP":""},"createdAt":"2023-02-20T13:12:02.927562Z","updatedAt":"2023-02-20T13:12:02.927562Z"},{"id":"095dced0-60b0-4dd6-b1e8-0063fa04eaa7","name":"TEST","description":"","criticality":3,"rules":[{"id":"fc02a324-0706-4522-a89f-e24bcbf76cf7","type":"project.tag.key.exists","value":"test"}],"projectIds":["db82605a-26e4-4693-a59c-ec1d584840d0"],"tags":{"TEST_APP":""},"createdAt":"2023-01-12T13:22:38.222789Z","updatedAt":"2023-01-12T13:22:38.222789Z"}]}`, httpStatusCode: 200}
|
||||
sys := SystemInstance{serverURL: "https://cx1.server.com", iamURL: "https://cx1iam.server.com", tenant: "tenant", client: &myTestClient, logger: logger}
|
||||
myTestClient.SetOptions(opts)
|
||||
|
||||
apps, err := sys.GetApplicationsByName("test", 10)
|
||||
assert.NoError(t, err, "Error occurred but none expected")
|
||||
assert.Equal(t, 6, len(apps), "TotalCount is incorrect")
|
||||
|
||||
app1, _ := sys.GetApplicationByName("test_dev2")
|
||||
assert.Equal(t, "8cf83fcf-ac61-4e32-b988-47cde3cc818c", app1.ApplicationID, "ApplicationID is incorrect")
|
||||
|
||||
_, err = sys.GetApplicationByName("ssba")
|
||||
assert.Contains(t, fmt.Sprint(err), "no application found named ssba")
|
||||
})
|
||||
|
||||
t.Run("test technical error", func(t *testing.T) {
|
||||
myTestClient := senderMock{httpStatusCode: 200}
|
||||
sys := SystemInstance{serverURL: "https://cx1.server.com", iamURL: "https://cx1iam.server.com", tenant: "tenant", client: &myTestClient, logger: logger}
|
||||
myTestClient.SetOptions(opts)
|
||||
myTestClient.errorExp = true
|
||||
|
||||
_, err := sys.GetApplicationsByName("test", 10)
|
||||
assert.Contains(t, fmt.Sprint(err), "Provoked technical error")
|
||||
})
|
||||
}
|
290
pkg/checkmarxone/cxjson_to_sarif.go
Normal file
290
pkg/checkmarxone/cxjson_to_sarif.go
Normal file
@ -0,0 +1,290 @@
|
||||
package checkmarxOne
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/format"
|
||||
"github.com/SAP/jenkins-library/pkg/log"
|
||||
"github.com/SAP/jenkins-library/pkg/piperutils"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// ConvertCxJSONToSarif is the entrypoint for the Parse function
|
||||
func ConvertCxJSONToSarif(sys System, serverURL string, scanResults *[]ScanResult, scanMeta *ScanMetadata, scan *Scan) (format.SARIF, error) {
|
||||
// Process sarif
|
||||
start := time.Now()
|
||||
|
||||
var sarif format.SARIF
|
||||
sarif.Schema = "https://docs.oasis-open.org/sarif/sarif/v2.1.0/cos02/schemas/sarif-schema-2.1.0.json"
|
||||
sarif.Version = "2.1.0"
|
||||
var checkmarxRun format.Runs
|
||||
checkmarxRun.ColumnKind = "utf16CodeUnits"
|
||||
sarif.Runs = append(sarif.Runs, checkmarxRun)
|
||||
rulesArray := []format.SarifRule{}
|
||||
|
||||
queries, err := sys.GetQueries()
|
||||
if err != nil {
|
||||
return sarif, errors.Wrap(err, "Failed to retrieve list of queries")
|
||||
}
|
||||
|
||||
baseURL := "https://" + serverURL + "/results/" + scanMeta.ScanID + "/" + scanMeta.ProjectID
|
||||
|
||||
cweIdsForTaxonomies := make(map[int64]int) //use a map to avoid duplicates
|
||||
cweCounter := 0
|
||||
//maxretries := 5
|
||||
|
||||
//JSON contains a ScanResultData > Query object, which represents a broken rule or type of vuln
|
||||
//This Query object contains a list of Result objects, each representing an occurence
|
||||
//Each Result object contains a ResultPath, which represents the exact location of the occurence (the "Snippet")
|
||||
log.Entry().Debug("[SARIF] Now handling results.")
|
||||
|
||||
for _, r := range *scanResults {
|
||||
query := getQuery(queries, r.Data.QueryID)
|
||||
if query == nil {
|
||||
return sarif, errors.New(fmt.Sprintf("Unknown queryid in results: %d", r.Data.QueryID))
|
||||
}
|
||||
|
||||
_, haskey := cweIdsForTaxonomies[query.CweID]
|
||||
|
||||
if !haskey {
|
||||
cweIdsForTaxonomies[query.CweID] = cweCounter
|
||||
cweCounter++
|
||||
}
|
||||
|
||||
simidString := fmt.Sprintf("%d", r.SimilarityID)
|
||||
|
||||
var apiDescription string
|
||||
result := *new(format.Results)
|
||||
|
||||
//General
|
||||
result.RuleID = fmt.Sprintf("checkmarxOne-%v/%d", query.Language, query.QueryID)
|
||||
result.RuleIndex = cweIdsForTaxonomies[query.CweID]
|
||||
result.Level = "none"
|
||||
msg := new(format.Message)
|
||||
if apiDescription != "" {
|
||||
msg.Text = apiDescription
|
||||
} else {
|
||||
msg.Text = query.Name
|
||||
}
|
||||
result.Message = msg
|
||||
|
||||
//Locations
|
||||
codeflow := *new(format.CodeFlow)
|
||||
threadflow := *new(format.ThreadFlow)
|
||||
locationSaved := false
|
||||
for k := 0; k < len(r.Data.Nodes); k++ {
|
||||
loc := *new(format.Location)
|
||||
loc.PhysicalLocation.ArtifactLocation.URI = r.Data.Nodes[0].FileName
|
||||
loc.PhysicalLocation.Region.StartLine = r.Data.Nodes[k].Line
|
||||
loc.PhysicalLocation.Region.EndLine = r.Data.Nodes[k].Line
|
||||
loc.PhysicalLocation.Region.StartColumn = r.Data.Nodes[k].Column
|
||||
snip := new(format.SnippetSarif)
|
||||
snip.Text = r.Data.Nodes[k].Name
|
||||
loc.PhysicalLocation.Region.Snippet = snip
|
||||
if !locationSaved { // To avoid overloading log file, we only save the 1st location, or source, as in the webview
|
||||
result.Locations = append(result.Locations, loc)
|
||||
locationSaved = true
|
||||
}
|
||||
|
||||
//Related Locations
|
||||
relatedLocation := *new(format.RelatedLocation)
|
||||
relatedLocation.ID = k + 1
|
||||
relatedLocation.PhysicalLocation = *new(format.RelatedPhysicalLocation)
|
||||
relatedLocation.PhysicalLocation.ArtifactLocation = loc.PhysicalLocation.ArtifactLocation
|
||||
relatedLocation.PhysicalLocation.Region = *new(format.RelatedRegion)
|
||||
relatedLocation.PhysicalLocation.Region.StartLine = loc.PhysicalLocation.Region.StartLine
|
||||
relatedLocation.PhysicalLocation.Region.StartColumn = r.Data.Nodes[k].Column
|
||||
result.RelatedLocations = append(result.RelatedLocations, relatedLocation)
|
||||
|
||||
threadFlowLocation := *new(format.Locations)
|
||||
tfloc := new(format.Location)
|
||||
tfloc.PhysicalLocation.ArtifactLocation.URI = r.Data.Nodes[0].FileName
|
||||
tfloc.PhysicalLocation.Region.StartLine = r.Data.Nodes[k].Line
|
||||
tfloc.PhysicalLocation.Region.EndLine = r.Data.Nodes[k].Line
|
||||
tfloc.PhysicalLocation.Region.StartColumn = r.Data.Nodes[k].Column
|
||||
tfloc.PhysicalLocation.Region.Snippet = snip
|
||||
threadFlowLocation.Location = tfloc
|
||||
threadflow.Locations = append(threadflow.Locations, threadFlowLocation)
|
||||
|
||||
}
|
||||
codeflow.ThreadFlows = append(codeflow.ThreadFlows, threadflow)
|
||||
result.CodeFlows = append(result.CodeFlows, codeflow)
|
||||
|
||||
result.PartialFingerprints.CheckmarxSimilarityID = simidString
|
||||
result.PartialFingerprints.PrimaryLocationLineHash = simidString
|
||||
|
||||
//Properties
|
||||
props := new(format.SarifProperties)
|
||||
props.Audited = false
|
||||
props.CheckmarxSimilarityID = simidString
|
||||
props.InstanceID = r.ResultID // no more PathID in cx1
|
||||
props.ToolSeverity = r.Severity
|
||||
|
||||
// classify into audit groups
|
||||
switch r.Severity {
|
||||
case "HIGH":
|
||||
props.AuditRequirement = format.AUDIT_REQUIREMENT_GROUP_1_DESC
|
||||
props.AuditRequirementIndex = format.AUDIT_REQUIREMENT_GROUP_1_INDEX
|
||||
props.ToolSeverityIndex = 3
|
||||
break
|
||||
case "MEDIUM":
|
||||
props.AuditRequirement = format.AUDIT_REQUIREMENT_GROUP_1_DESC
|
||||
props.AuditRequirementIndex = format.AUDIT_REQUIREMENT_GROUP_1_INDEX
|
||||
props.ToolSeverityIndex = 2
|
||||
break
|
||||
case "LOW":
|
||||
props.AuditRequirement = format.AUDIT_REQUIREMENT_GROUP_2_DESC
|
||||
props.AuditRequirementIndex = format.AUDIT_REQUIREMENT_GROUP_2_INDEX
|
||||
props.ToolSeverityIndex = 1
|
||||
break
|
||||
case "INFORMATION":
|
||||
props.AuditRequirement = format.AUDIT_REQUIREMENT_GROUP_3_DESC
|
||||
props.AuditRequirementIndex = format.AUDIT_REQUIREMENT_GROUP_3_INDEX
|
||||
props.ToolSeverityIndex = 0
|
||||
break
|
||||
}
|
||||
|
||||
switch r.State {
|
||||
case "NOT_EXPLOITABLE":
|
||||
props.ToolState = "NOT_EXPLOITABLE"
|
||||
props.ToolStateIndex = 1
|
||||
props.Audited = true
|
||||
break
|
||||
case "CONFIRMED":
|
||||
props.ToolState = "CONFIRMED"
|
||||
props.ToolStateIndex = 2
|
||||
props.Audited = true
|
||||
break
|
||||
case "URGENT", "URGENT ":
|
||||
props.ToolState = "URGENT"
|
||||
props.ToolStateIndex = 3
|
||||
props.Audited = true
|
||||
break
|
||||
case "PROPOSED_NOT_EXPLOITABLE":
|
||||
props.ToolState = "PROPOSED_NOT_EXPLOITABLE"
|
||||
props.ToolStateIndex = 4
|
||||
props.Audited = true
|
||||
break
|
||||
default:
|
||||
props.ToolState = "TO_VERIFY" // Includes case 0
|
||||
props.ToolStateIndex = 0
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
props.ToolAuditMessage = ""
|
||||
// currently disabled due to the extra load (one api call per finding)
|
||||
/*predicates, err := sys.GetResultsPredicates(r.SimilarityID, scanMeta.ProjectID)
|
||||
if err == nil {
|
||||
log.Entry().Infof("Retrieved %d results predicates", len(predicates))
|
||||
messageCandidates := []string{}
|
||||
for _, p := range predicates {
|
||||
messageCandidates = append([]string{strings.Trim(p.Comment, "\r\n")}, messageCandidates...) //Append in reverse order, trim to remove extra \r
|
||||
}
|
||||
log.Entry().Info(strings.Join(messageCandidates, "; "))
|
||||
props.ToolAuditMessage = strings.Join(messageCandidates, " \n ")
|
||||
} else {
|
||||
log.Entry().Warningf("Error while retrieving result predicates: %s", err)
|
||||
}*/
|
||||
|
||||
props.RuleGUID = fmt.Sprintf("%d", r.Data.QueryID)
|
||||
props.UnifiedAuditState = ""
|
||||
result.Properties = props
|
||||
|
||||
//Finalize
|
||||
sarif.Runs[0].Results = append(sarif.Runs[0].Results, result)
|
||||
|
||||
//handle the rules array
|
||||
rule := *new(format.SarifRule)
|
||||
|
||||
rule.ID = fmt.Sprintf("checkmarxOne-%v/%d", query.Language, query.QueryID)
|
||||
words := strings.Split(query.Name, "_")
|
||||
for w := 0; w < len(words); w++ {
|
||||
words[w] = piperutils.Title(strings.ToLower(words[w]))
|
||||
}
|
||||
rule.Name = strings.Join(words, "")
|
||||
|
||||
rule.HelpURI = fmt.Sprintf("%v/sast/description/%v/%v", baseURL, query.QueryDescriptionID, query.QueryID)
|
||||
rule.Help = new(format.Help)
|
||||
rule.Help.Text = rule.HelpURI
|
||||
rule.ShortDescription = new(format.Message)
|
||||
rule.ShortDescription.Text = query.Name
|
||||
rule.Properties = new(format.SarifRuleProperties)
|
||||
|
||||
if len(r.VulnerabilityDetails.Compliances) > 0 {
|
||||
rule.FullDescription = new(format.Message)
|
||||
rule.FullDescription.Text = strings.Join(r.VulnerabilityDetails.Compliances[:], ";")
|
||||
|
||||
for cat := 0; cat < len(r.VulnerabilityDetails.Compliances); cat++ {
|
||||
rule.Properties.Tags = append(rule.Properties.Tags, r.VulnerabilityDetails.Compliances[cat])
|
||||
}
|
||||
}
|
||||
switch query.Severity {
|
||||
case "INFORMATION":
|
||||
rule.Properties.SecuritySeverity = "0.0"
|
||||
case "LOW":
|
||||
rule.Properties.SecuritySeverity = "2.0"
|
||||
case "MEDIUM":
|
||||
rule.Properties.SecuritySeverity = "5.0"
|
||||
case "HIGH":
|
||||
rule.Properties.SecuritySeverity = "7.0"
|
||||
default:
|
||||
rule.Properties.SecuritySeverity = "10.0"
|
||||
}
|
||||
|
||||
if query.CweID != 0 {
|
||||
rule.Properties.Tags = append(rule.Properties.Tags, fmt.Sprintf("external/cwe/cwe-%d", query.CweID))
|
||||
}
|
||||
rulesArray = append(rulesArray, rule)
|
||||
}
|
||||
|
||||
// Handle driver object
|
||||
log.Entry().Debug("[SARIF] Now handling driver object.")
|
||||
tool := *new(format.Tool)
|
||||
tool.Driver = *new(format.Driver)
|
||||
tool.Driver.Name = "CheckmarxOne SCA"
|
||||
|
||||
// TODO: a way to fetch/store the version
|
||||
tool.Driver.Version = "1" //strings.Split(cxxml.CheckmarxVersion, "V ")
|
||||
tool.Driver.InformationUri = "https://checkmarx.com/resource/documents/en/34965-68571-viewing-results.html"
|
||||
tool.Driver.Rules = rulesArray
|
||||
sarif.Runs[0].Tool = tool
|
||||
|
||||
//handle automationDetails
|
||||
sarif.Runs[0].AutomationDetails = &format.AutomationDetails{Id: fmt.Sprintf("%v/sast", baseURL)} // Use deeplink to pass a maximum of information
|
||||
|
||||
//handle taxonomies
|
||||
//Only one exists apparently: CWE. It is fixed
|
||||
taxonomy := *new(format.Taxonomies)
|
||||
taxonomy.Name = "CWE"
|
||||
taxonomy.Organization = "MITRE"
|
||||
taxonomy.ShortDescription.Text = "The MITRE Common Weakness Enumeration"
|
||||
for key := range cweIdsForTaxonomies {
|
||||
taxa := *new(format.Taxa)
|
||||
taxa.Id = fmt.Sprintf("%d", key)
|
||||
taxonomy.Taxa = append(taxonomy.Taxa, taxa)
|
||||
}
|
||||
sarif.Runs[0].Taxonomies = append(sarif.Runs[0].Taxonomies, taxonomy)
|
||||
|
||||
// Add a conversion object to highlight this isn't native SARIF
|
||||
conversion := new(format.Conversion)
|
||||
conversion.Tool.Driver.Name = "Piper CheckmarxOne JSON to SARIF converter"
|
||||
conversion.Tool.Driver.InformationUri = "https://github.com/SAP/jenkins-library"
|
||||
conversion.Invocation.ExecutionSuccessful = true
|
||||
conversion.Invocation.StartTimeUtc = fmt.Sprintf("%s", start.Format("2006-01-02T15:04:05.000Z")) // "YYYY-MM-DDThh:mm:ss.sZ" on 2006-01-02 15:04:05
|
||||
conversion.Invocation.Account = scan.Initiator
|
||||
sarif.Runs[0].Conversion = conversion
|
||||
|
||||
return sarif, nil
|
||||
}
|
||||
|
||||
func getQuery(queries []Query, queryID uint64) *Query {
|
||||
for id := range queries {
|
||||
if queries[id].QueryID == queryID {
|
||||
return &queries[id]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
278
pkg/checkmarxone/reporting.go
Normal file
278
pkg/checkmarxone/reporting.go
Normal file
@ -0,0 +1,278 @@
|
||||
package checkmarxOne
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha1"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SAP/jenkins-library/pkg/format"
|
||||
"github.com/SAP/jenkins-library/pkg/log"
|
||||
"github.com/SAP/jenkins-library/pkg/piperutils"
|
||||
"github.com/SAP/jenkins-library/pkg/reporting"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type CheckmarxOneReportData struct {
|
||||
ToolName string `json:"toolName"`
|
||||
ProjectName string `json:"projectName"`
|
||||
ProjectID string `json:"projectID"`
|
||||
ScanID string `json:"scanID"`
|
||||
GroupName string `json:"groupName"`
|
||||
GroupPath string `json:"groupPath"`
|
||||
DeepLink string `json:"deepLink"`
|
||||
Preset string `json:"preset"`
|
||||
CheckmarxVersion string `json:"checkmarxVersion"`
|
||||
ScanType string `json:"scanType"`
|
||||
HighTotal int `json:"highTotal"`
|
||||
HighAudited int `json:"highAudited"`
|
||||
MediumTotal int `json:"mediumTotal"`
|
||||
MediumAudited int `json:"mediumAudited"`
|
||||
LowTotal int `json:"lowTotal"`
|
||||
LowAudited int `json:"lowAudited"`
|
||||
InformationTotal int `json:"informationTotal"`
|
||||
InformationAudited int `json:"informationAudited"`
|
||||
IsLowPerQueryAudited bool `json:"isLowPerQueryAudited"`
|
||||
LowPerQuery *[]LowPerQuery `json:"lowPerQuery"`
|
||||
}
|
||||
|
||||
type LowPerQuery struct {
|
||||
QueryName string `json:"query"`
|
||||
Audited int `json:"audited"`
|
||||
Total int `json:"total"`
|
||||
}
|
||||
|
||||
func CreateCustomReport(data *map[string]interface{}, insecure, neutral []string) reporting.ScanReport {
|
||||
deepLink := fmt.Sprintf(`<a href="%v" target="_blank">Link to scan in CX1 UI</a>`, (*data)["DeepLink"])
|
||||
|
||||
scanReport := reporting.ScanReport{
|
||||
ReportTitle: "CheckmarxOne SAST Report",
|
||||
Subheaders: []reporting.Subheader{
|
||||
{Description: "Project name", Details: fmt.Sprint((*data)["ProjectName"])},
|
||||
{Description: "Project ID", Details: fmt.Sprint((*data)["ProjectId"])},
|
||||
{Description: "Owner", Details: fmt.Sprint((*data)["Owner"])},
|
||||
{Description: "Scan ID", Details: fmt.Sprint((*data)["ScanId"])},
|
||||
{Description: "Group", Details: fmt.Sprint((*data)["Group"])},
|
||||
{Description: "Group full path", Details: fmt.Sprint((*data)["GroupFullPathOnReportDate"])},
|
||||
{Description: "Scan start", Details: fmt.Sprint((*data)["ScanStart"])},
|
||||
{Description: "Scan duration", Details: fmt.Sprint((*data)["ScanTime"])},
|
||||
{Description: "Scan type", Details: fmt.Sprint((*data)["ScanType"])},
|
||||
{Description: "Preset", Details: fmt.Sprint((*data)["Preset"])},
|
||||
{Description: "Report creation time", Details: fmt.Sprint((*data)["ReportCreationTime"])},
|
||||
{Description: "Lines of code scanned", Details: fmt.Sprint((*data)["LinesOfCodeScanned)"])},
|
||||
{Description: "Files scanned", Details: fmt.Sprint((*data)["FilesScanned)"])},
|
||||
{Description: "Checkmarx version", Details: fmt.Sprint((*data)["CheckmarxVersion"])},
|
||||
{Description: "Deep link", Details: deepLink},
|
||||
},
|
||||
Overview: []reporting.OverviewRow{},
|
||||
ReportTime: time.Now(),
|
||||
}
|
||||
|
||||
for _, issue := range insecure {
|
||||
row := reporting.OverviewRow{}
|
||||
row.Description = fmt.Sprint(issue)
|
||||
row.Style = reporting.Red
|
||||
|
||||
scanReport.Overview = append(scanReport.Overview, row)
|
||||
}
|
||||
for _, issue := range neutral {
|
||||
row := reporting.OverviewRow{}
|
||||
row.Description = fmt.Sprint(issue)
|
||||
|
||||
scanReport.Overview = append(scanReport.Overview, row)
|
||||
}
|
||||
|
||||
detailTable := reporting.ScanDetailTable{
|
||||
Headers: []string{
|
||||
"KPI",
|
||||
"Count",
|
||||
},
|
||||
WithCounter: false,
|
||||
}
|
||||
detailRows := []reporting.OverviewRow{
|
||||
{Description: "High issues", Details: fmt.Sprint((*data)["High"].(map[string]int)["Issues"])},
|
||||
{Description: "High not false positive issues", Details: fmt.Sprint((*data)["High"].(map[string]int)["NotFalsePositive"])},
|
||||
{Description: "High not exploitable issues", Details: fmt.Sprint((*data)["High"].(map[string]int)["NotExploitable"])},
|
||||
{Description: "High confirmed issues", Details: fmt.Sprint((*data)["High"].(map[string]int)["Confirmed"])},
|
||||
{Description: "High urgent issues", Details: fmt.Sprint((*data)["High"].(map[string]int)["Urgent"])},
|
||||
{Description: "High proposed not exploitable issues", Details: fmt.Sprint((*data)["High"].(map[string]int)["ProposedNotExploitable"])},
|
||||
{Description: "High to verify issues", Details: fmt.Sprint((*data)["High"].(map[string]int)["ToVerify"])},
|
||||
{Description: "Medium issues", Details: fmt.Sprint((*data)["Medium"].(map[string]int)["Issues"])},
|
||||
{Description: "Medium not false positive issues", Details: fmt.Sprint((*data)["Medium"].(map[string]int)["NotFalsePositive"])},
|
||||
{Description: "Medium not exploitable issues", Details: fmt.Sprint((*data)["Medium"].(map[string]int)["NotExploitable"])},
|
||||
{Description: "Medium confirmed issues", Details: fmt.Sprint((*data)["Medium"].(map[string]int)["Confirmed"])},
|
||||
{Description: "Medium urgent issues", Details: fmt.Sprint((*data)["Medium"].(map[string]int)["Urgent"])},
|
||||
{Description: "Medium proposed not exploitable issues", Details: fmt.Sprint((*data)["Medium"].(map[string]int)["ProposedNotExploitable"])},
|
||||
{Description: "Medium to verify issues", Details: fmt.Sprint((*data)["Medium"].(map[string]int)["ToVerify"])},
|
||||
{Description: "Low issues", Details: fmt.Sprint((*data)["Low"].(map[string]int)["Issues"])},
|
||||
{Description: "Low not false positive issues", Details: fmt.Sprint((*data)["Low"].(map[string]int)["NotFalsePositive"])},
|
||||
{Description: "Low not exploitable issues", Details: fmt.Sprint((*data)["Low"].(map[string]int)["NotExploitable"])},
|
||||
{Description: "Low confirmed issues", Details: fmt.Sprint((*data)["Low"].(map[string]int)["Confirmed"])},
|
||||
{Description: "Low urgent issues", Details: fmt.Sprint((*data)["Low"].(map[string]int)["Urgent"])},
|
||||
{Description: "Low proposed not exploitable issues", Details: fmt.Sprint((*data)["Low"].(map[string]int)["ProposedNotExploitable"])},
|
||||
{Description: "Low to verify issues", Details: fmt.Sprint((*data)["Low"].(map[string]int)["ToVerify"])},
|
||||
{Description: "Informational issues", Details: fmt.Sprint((*data)["Information"].(map[string]int)["Issues"])},
|
||||
{Description: "Informational not false positive issues", Details: fmt.Sprint((*data)["Information"].(map[string]int)["NotFalsePositive"])},
|
||||
{Description: "Informational not exploitable issues", Details: fmt.Sprint((*data)["Information"].(map[string]int)["NotExploitable"])},
|
||||
{Description: "Informational confirmed issues", Details: fmt.Sprint((*data)["Information"].(map[string]int)["Confirmed"])},
|
||||
{Description: "Informational urgent issues", Details: fmt.Sprint((*data)["Information"].(map[string]int)["Urgent"])},
|
||||
{Description: "Informational proposed not exploitable issues", Details: fmt.Sprint((*data)["Information"].(map[string]int)["ProposedNotExploitable"])},
|
||||
{Description: "Informational to verify issues", Details: fmt.Sprint((*data)["Information"].(map[string]int)["ToVerify"])},
|
||||
}
|
||||
for _, detailRow := range detailRows {
|
||||
row := reporting.ScanRow{}
|
||||
row.AddColumn(detailRow.Description, 0)
|
||||
row.AddColumn(detailRow.Details, 0)
|
||||
|
||||
detailTable.Rows = append(detailTable.Rows, row)
|
||||
}
|
||||
scanReport.DetailTable = detailTable
|
||||
|
||||
return scanReport
|
||||
}
|
||||
|
||||
func CreateJSONHeaderReport(data *map[string]interface{}) CheckmarxOneReportData {
|
||||
checkmarxReportData := CheckmarxOneReportData{
|
||||
ToolName: `checkmarxone`,
|
||||
ProjectName: fmt.Sprint((*data)["ProjectName"]),
|
||||
GroupName: fmt.Sprint((*data)["Group"]),
|
||||
GroupPath: fmt.Sprint((*data)["GroupFullPathOnReportDate"]),
|
||||
DeepLink: fmt.Sprint((*data)["DeepLink"]),
|
||||
Preset: fmt.Sprint((*data)["Preset"]),
|
||||
CheckmarxVersion: fmt.Sprint((*data)["CheckmarxVersion"]),
|
||||
ScanType: fmt.Sprint((*data)["ScanType"]),
|
||||
ProjectID: fmt.Sprint((*data)["ProjectId"]),
|
||||
ScanID: fmt.Sprint((*data)["ScanId"]),
|
||||
}
|
||||
|
||||
checkmarxReportData.HighAudited = (*data)["High"].(map[string]int)["Issues"] - (*data)["High"].(map[string]int)["NotFalsePositive"]
|
||||
checkmarxReportData.HighTotal = (*data)["High"].(map[string]int)["Issues"]
|
||||
|
||||
checkmarxReportData.MediumAudited = (*data)["Medium"].(map[string]int)["Issues"] - (*data)["Medium"].(map[string]int)["NotFalsePositive"]
|
||||
checkmarxReportData.MediumTotal = (*data)["Medium"].(map[string]int)["Issues"]
|
||||
|
||||
checkmarxReportData.LowAudited = (*data)["Low"].(map[string]int)["Confirmed"] + (*data)["Low"].(map[string]int)["NotExploitable"]
|
||||
checkmarxReportData.LowTotal = (*data)["Low"].(map[string]int)["Issues"]
|
||||
|
||||
checkmarxReportData.InformationAudited = (*data)["Information"].(map[string]int)["Confirmed"] + (*data)["Information"].(map[string]int)["NotExploitable"]
|
||||
checkmarxReportData.InformationTotal = (*data)["Information"].(map[string]int)["Issues"]
|
||||
|
||||
lowPerQueryList := []LowPerQuery{}
|
||||
checkmarxReportData.IsLowPerQueryAudited = true
|
||||
if _, ok := (*data)["LowPerQuery"]; ok {
|
||||
lowPerQueryMap := (*data)["LowPerQuery"].(map[string]map[string]int)
|
||||
for queryName, resultsLowQuery := range lowPerQueryMap {
|
||||
audited := resultsLowQuery["Confirmed"] + resultsLowQuery["NotExploitable"]
|
||||
total := resultsLowQuery["Issues"]
|
||||
lowPerQuery := LowPerQuery{}
|
||||
lowPerQuery.QueryName = queryName
|
||||
lowPerQuery.Audited = audited
|
||||
lowPerQuery.Total = total
|
||||
lowAuditedRequiredPerQuery := int(math.Ceil(0.10 * float64(total)))
|
||||
if audited < lowAuditedRequiredPerQuery && audited < 10 {
|
||||
checkmarxReportData.IsLowPerQueryAudited = false
|
||||
}
|
||||
lowPerQueryList = append(lowPerQueryList, lowPerQuery)
|
||||
}
|
||||
}
|
||||
checkmarxReportData.LowPerQuery = &lowPerQueryList
|
||||
|
||||
return checkmarxReportData
|
||||
}
|
||||
|
||||
func WriteJSONHeaderReport(jsonReport CheckmarxOneReportData) ([]piperutils.Path, error) {
|
||||
utils := piperutils.Files{}
|
||||
reportPaths := []piperutils.Path{}
|
||||
|
||||
// Standard JSON Report
|
||||
jsonComplianceReportPath := filepath.Join(ReportsDirectory, "piper_checkmarxone_report.json")
|
||||
// Ensure reporting directory exists
|
||||
if err := utils.MkdirAll(ReportsDirectory, 0777); err != nil {
|
||||
return reportPaths, errors.Wrapf(err, "failed to create report directory")
|
||||
}
|
||||
|
||||
file, _ := json.Marshal(jsonReport)
|
||||
if err := utils.FileWrite(jsonComplianceReportPath, file, 0666); err != nil {
|
||||
log.SetErrorCategory(log.ErrorConfiguration)
|
||||
return reportPaths, errors.Wrapf(err, "failed to write CheckmarxOne JSON compliance report")
|
||||
}
|
||||
reportPaths = append(reportPaths, piperutils.Path{Name: "CheckmarxOne JSON Compliance Report", Target: jsonComplianceReportPath})
|
||||
|
||||
return reportPaths, nil
|
||||
}
|
||||
|
||||
// WriteSarif writes a json file to disk as a .sarif if it respects the specification declared in format.SARIF
|
||||
func WriteSarif(sarif format.SARIF) ([]piperutils.Path, error) {
|
||||
utils := piperutils.Files{}
|
||||
reportPaths := []piperutils.Path{}
|
||||
|
||||
sarifReportPath := filepath.Join(ReportsDirectory, "result.sarif")
|
||||
// Ensure reporting directory exists
|
||||
if err := utils.MkdirAll(ReportsDirectory, 0777); err != nil {
|
||||
return reportPaths, errors.Wrapf(err, "failed to create report directory")
|
||||
}
|
||||
|
||||
// HTML characters will most likely be present: we need to use encode: create a buffer to hold JSON data
|
||||
buffer := new(bytes.Buffer)
|
||||
// create JSON encoder for buffer
|
||||
bufEncoder := json.NewEncoder(buffer)
|
||||
// set options
|
||||
bufEncoder.SetEscapeHTML(false)
|
||||
bufEncoder.SetIndent("", " ")
|
||||
//encode to buffer
|
||||
bufEncoder.Encode(sarif)
|
||||
log.Entry().Info("Writing file to disk: ", sarifReportPath)
|
||||
if err := utils.FileWrite(sarifReportPath, buffer.Bytes(), 0666); err != nil {
|
||||
log.SetErrorCategory(log.ErrorConfiguration)
|
||||
return reportPaths, errors.Wrapf(err, "failed to write CheckmarxOne SARIF report")
|
||||
}
|
||||
reportPaths = append(reportPaths, piperutils.Path{Name: "CheckmarxOne SARIF Report", Target: sarifReportPath})
|
||||
|
||||
return reportPaths, nil
|
||||
}
|
||||
|
||||
func WriteCustomReports(scanReport reporting.ScanReport, projectName, projectID string) ([]piperutils.Path, error) {
|
||||
utils := piperutils.Files{}
|
||||
reportPaths := []piperutils.Path{}
|
||||
|
||||
// ignore templating errors since template is in our hands and issues will be detected with the automated tests
|
||||
htmlReport, _ := scanReport.ToHTML()
|
||||
htmlReportPath := filepath.Join(ReportsDirectory, "piper_checkmarxone_report.html")
|
||||
// Ensure reporting directory exists
|
||||
if err := utils.MkdirAll(ReportsDirectory, 0777); err != nil {
|
||||
return reportPaths, errors.Wrapf(err, "failed to create report directory")
|
||||
}
|
||||
if err := utils.FileWrite(htmlReportPath, htmlReport, 0666); err != nil {
|
||||
log.SetErrorCategory(log.ErrorConfiguration)
|
||||
return reportPaths, errors.Wrapf(err, "failed to write html report")
|
||||
}
|
||||
reportPaths = append(reportPaths, piperutils.Path{Name: "CheckmarxOne Report", Target: htmlReportPath})
|
||||
|
||||
// JSON reports are used by step pipelineCreateSummary in order to e.g. prepare an issue creation in GitHub
|
||||
// ignore JSON errors since structure is in our hands
|
||||
jsonReport, _ := scanReport.ToJSON()
|
||||
if exists, _ := utils.DirExists(reporting.StepReportDirectory); !exists {
|
||||
err := utils.MkdirAll(reporting.StepReportDirectory, 0777)
|
||||
if err != nil {
|
||||
return reportPaths, errors.Wrap(err, "failed to create reporting directory")
|
||||
}
|
||||
}
|
||||
if err := utils.FileWrite(filepath.Join(reporting.StepReportDirectory, fmt.Sprintf("checkmarxOneExecuteScan_sast_%v.json", reportShaCheckmarxOne([]string{projectName, projectID}))), jsonReport, 0666); err != nil {
|
||||
return reportPaths, errors.Wrapf(err, "failed to write json report")
|
||||
}
|
||||
// we do not add the json report to the overall list of reports for now,
|
||||
// since it is just an intermediary report used as input for later
|
||||
// and there does not seem to be real benefit in archiving it.
|
||||
|
||||
return reportPaths, nil
|
||||
}
|
||||
|
||||
func reportShaCheckmarxOne(parts []string) string {
|
||||
reportShaData := []byte(strings.Join(parts, ","))
|
||||
return fmt.Sprintf("%x", sha1.Sum(reportShaData))
|
||||
}
|
139
pkg/checkmarxone/reporting_test.go
Normal file
139
pkg/checkmarxone/reporting_test.go
Normal file
@ -0,0 +1,139 @@
|
||||
package checkmarxOne
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestCreateJSONReport(t *testing.T) {
|
||||
resultMap := map[string]interface{}{}
|
||||
resultMap["ToolName"] = `checkmarxone`
|
||||
resultMap["ProjectName"] = `ssba`
|
||||
resultMap["Group"] = `test-group`
|
||||
resultMap["GroupFullPathOnReportDate"] = `test-group-path`
|
||||
resultMap["DeepLink"] = `https://cx1.sap/projects/f5702f86-b396-417f-82e2-4949a55d5382/scans?branch=master&page=1&id=21e40b36-0dd7-48e5-9768-da1a8f36c907`
|
||||
resultMap["Preset"] = `Checkmarx Default`
|
||||
resultMap["CheckmarxVersion"] = `v1`
|
||||
resultMap["ScanType"] = `Incremental`
|
||||
resultMap["ProjectId"] = `f5702f86-b396-417f-82e2-4949a55d5382`
|
||||
resultMap["ScanId"] = `21e40b36-0dd7-48e5-9768-da1a8f36c907`
|
||||
|
||||
resultMap["High"] = map[string]int{}
|
||||
resultMap["Medium"] = map[string]int{}
|
||||
resultMap["Low"] = map[string]int{}
|
||||
resultMap["Information"] = map[string]int{}
|
||||
submap := map[string]int{}
|
||||
submap["Issues"] = 10
|
||||
submap["NotFalsePositive"] = 10
|
||||
resultMap["High"] = submap
|
||||
|
||||
submap = map[string]int{}
|
||||
submap["Issues"] = 4
|
||||
submap["NotFalsePositive"] = 0
|
||||
resultMap["Medium"] = submap
|
||||
|
||||
submap = map[string]int{}
|
||||
submap["Issues"] = 2
|
||||
submap["NotFalsePositive"] = 2
|
||||
submap["Confirmed"] = 1
|
||||
submap["NotExploitable"] = 1
|
||||
resultMap["Low"] = submap
|
||||
|
||||
submap = map[string]int{}
|
||||
submap["Issues"] = 5
|
||||
submap["NotFalsePositive"] = 5
|
||||
resultMap["Information"] = submap
|
||||
|
||||
lowPerQuery := map[string]map[string]int{}
|
||||
submap = map[string]int{}
|
||||
submap["Issues"] = 4
|
||||
submap["Confirmed"] = 0
|
||||
submap["NotExploitable"] = 0
|
||||
lowPerQuery["Low_Query_Name_1"] = submap
|
||||
|
||||
submap = map[string]int{}
|
||||
submap["Issues"] = 5
|
||||
submap["Confirmed"] = 2
|
||||
submap["NotExploitable"] = 3
|
||||
lowPerQuery["Low_Query_Name_2"] = submap
|
||||
|
||||
resultMap["LowPerQuery"] = lowPerQuery
|
||||
|
||||
reportingData := CreateJSONHeaderReport(&resultMap)
|
||||
assert.Equal(t, "21e40b36-0dd7-48e5-9768-da1a8f36c907", reportingData.ScanID)
|
||||
assert.Equal(t, "ssba", reportingData.ProjectName)
|
||||
assert.Equal(t, "f5702f86-b396-417f-82e2-4949a55d5382", reportingData.ProjectID)
|
||||
assert.Equal(t, "test-group", reportingData.GroupName)
|
||||
assert.Equal(t, "test-group-path", reportingData.GroupPath)
|
||||
assert.Equal(t, "checkmarxone", reportingData.ToolName)
|
||||
assert.Equal(t, "https://cx1.sap/projects/f5702f86-b396-417f-82e2-4949a55d5382/scans?branch=master&page=1&id=21e40b36-0dd7-48e5-9768-da1a8f36c907", reportingData.DeepLink)
|
||||
assert.Equal(t, "Checkmarx Default", reportingData.Preset)
|
||||
assert.Equal(t, "v1", reportingData.CheckmarxVersion)
|
||||
assert.Equal(t, "Incremental", reportingData.ScanType)
|
||||
|
||||
assert.Equal(t, 10, reportingData.HighTotal)
|
||||
assert.Equal(t, 0, reportingData.HighAudited)
|
||||
assert.Equal(t, 4, reportingData.MediumTotal)
|
||||
assert.Equal(t, 4, reportingData.MediumAudited)
|
||||
assert.Equal(t, 2, reportingData.LowTotal)
|
||||
assert.Equal(t, 2, reportingData.LowAudited)
|
||||
assert.Equal(t, 5, reportingData.InformationTotal)
|
||||
assert.Equal(t, 0, reportingData.InformationAudited)
|
||||
assert.Equal(t, false, reportingData.IsLowPerQueryAudited)
|
||||
assert.Equal(t, 2, len(*reportingData.LowPerQuery))
|
||||
if (*reportingData.LowPerQuery)[0].QueryName == "Low_Query_Name_1" {
|
||||
assert.Equal(t, "Low_Query_Name_1", (*reportingData.LowPerQuery)[0].QueryName)
|
||||
assert.Equal(t, 0, (*reportingData.LowPerQuery)[0].Audited)
|
||||
assert.Equal(t, 4, (*reportingData.LowPerQuery)[0].Total)
|
||||
assert.Equal(t, "Low_Query_Name_2", (*reportingData.LowPerQuery)[1].QueryName)
|
||||
assert.Equal(t, 5, (*reportingData.LowPerQuery)[1].Audited)
|
||||
assert.Equal(t, 5, (*reportingData.LowPerQuery)[1].Total)
|
||||
} else {
|
||||
assert.Equal(t, "Low_Query_Name_1", (*reportingData.LowPerQuery)[1].QueryName)
|
||||
assert.Equal(t, 0, (*reportingData.LowPerQuery)[1].Audited)
|
||||
assert.Equal(t, 4, (*reportingData.LowPerQuery)[1].Total)
|
||||
assert.Equal(t, "Low_Query_Name_2", (*reportingData.LowPerQuery)[0].QueryName)
|
||||
assert.Equal(t, 5, (*reportingData.LowPerQuery)[0].Audited)
|
||||
assert.Equal(t, 5, (*reportingData.LowPerQuery)[0].Total)
|
||||
}
|
||||
|
||||
lowPerQuery = map[string]map[string]int{}
|
||||
submap = map[string]int{}
|
||||
submap["Issues"] = 100
|
||||
submap["Confirmed"] = 10
|
||||
submap["NotExploitable"] = 0
|
||||
lowPerQuery["Low_Query_Name_1"] = submap
|
||||
|
||||
submap = map[string]int{}
|
||||
submap["Issues"] = 5
|
||||
submap["Confirmed"] = 2
|
||||
submap["NotExploitable"] = 3
|
||||
lowPerQuery["Low_Query_Name_2"] = submap
|
||||
|
||||
resultMap["LowPerQuery"] = lowPerQuery
|
||||
reportingData = CreateJSONHeaderReport(&resultMap)
|
||||
assert.Equal(t, true, reportingData.IsLowPerQueryAudited)
|
||||
|
||||
lowPerQuery = map[string]map[string]int{}
|
||||
submap = map[string]int{}
|
||||
submap["Issues"] = 200
|
||||
submap["Confirmed"] = 3
|
||||
submap["NotExploitable"] = 2
|
||||
lowPerQuery["Low_Query_Name_1"] = submap
|
||||
|
||||
resultMap["LowPerQuery"] = lowPerQuery
|
||||
reportingData = CreateJSONHeaderReport(&resultMap)
|
||||
assert.Equal(t, false, reportingData.IsLowPerQueryAudited)
|
||||
|
||||
lowPerQuery = map[string]map[string]int{}
|
||||
submap = map[string]int{}
|
||||
submap["Issues"] = 200
|
||||
submap["Confirmed"] = 5
|
||||
submap["NotExploitable"] = 5
|
||||
lowPerQuery["Low_Query_Name_1"] = submap
|
||||
|
||||
resultMap["LowPerQuery"] = lowPerQuery
|
||||
reportingData = CreateJSONHeaderReport(&resultMap)
|
||||
assert.Equal(t, true, reportingData.IsLowPerQueryAudited)
|
||||
}
|
487
resources/metadata/checkmarxOneExecuteScan.yaml
Normal file
487
resources/metadata/checkmarxOneExecuteScan.yaml
Normal file
@ -0,0 +1,487 @@
|
||||
metadata:
|
||||
name: checkmarxOneExecuteScan
|
||||
description: checkmarxOne is the recommended tool for security scans of JavaScript, iOS, Swift and Ruby code.
|
||||
longDescription: |-
|
||||
checkmarxOne is a Static Application Security Testing (SAST) platform to analyze i.e. Java or TypeScript, Swift, Golang, Ruby code,
|
||||
and many other programming languages for security flaws based on a set of provided rules/queries that can be customized and extended.
|
||||
|
||||
This step by default enforces a specific audit baseline for findings and therefore ensures that:
|
||||
|
||||
* No 'To Verify' High and Medium issues exist in your project
|
||||
* Total number of High and Medium 'Confirmed' or 'Urgent' issues is zero
|
||||
* 10% of all Low issues are 'Confirmed' or 'Not Exploitable'
|
||||
|
||||
You can adapt above thresholds specifically using the provided configuration parameters and i.e. check for `absolute`
|
||||
thresholds instead of `percentage` whereas we strongly recommend you to stay with the defaults provided.
|
||||
spec:
|
||||
inputs:
|
||||
secrets:
|
||||
- name: checkmarxOneCredentialsId
|
||||
description: Jenkins 'Username with password' credentials ID containing ClientID and ClientSecret to communicate with the checkmarxOne backend.
|
||||
type: jenkins
|
||||
- name: checkmarxOneAPIKey
|
||||
description: Jenkins 'Secret Text' containing the APIKey to communicate with the checkmarxOne backend.
|
||||
type: jenkins
|
||||
- name: githubTokenCredentialsId
|
||||
description: Jenkins 'Secret text' credentials ID containing token to authenticate to GitHub.
|
||||
type: jenkins
|
||||
resources:
|
||||
- name: checkmarxOne
|
||||
type: stash
|
||||
params:
|
||||
- name: assignees
|
||||
description: Defines the assignees for the Github Issue created/updated with the results of the scan as a list of login names. [Not yet supported]
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
type: "[]string"
|
||||
default: []
|
||||
- name: avoidDuplicateProjectScans
|
||||
type: bool
|
||||
description: Whether duplicate scans of the same project state shall be avoided or not [Not yet supported]
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: true
|
||||
- name: filterPattern
|
||||
type: string
|
||||
description: The filter pattern used to zip the files relevant for scanning, patterns can be negated by setting an exclamation mark in front i.e. `!test/*.js` would avoid adding any javascript files located in the test directory
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default:
|
||||
"!**/node_modules/**, !**/.xmake/**, !**/*_test.go, !**/vendor/**/*.go,
|
||||
**/*.html, **/*.xml, **/*.go, **/*.py, **/*.js, **/*.scala, **/*.ts"
|
||||
- name: fullScanCycle
|
||||
type: string
|
||||
description: Indicates how often a full scan should happen between the incremental scans when activated
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: 5
|
||||
- name: fullScansScheduled
|
||||
type: bool
|
||||
description: Whether full scans are to be scheduled or not. Should be used in relation with `incremental` and `fullScanCycle`
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: true
|
||||
- name: generatePdfReport
|
||||
type: bool
|
||||
description: Whether to generate a PDF report of the analysis results or not
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: true
|
||||
- name: githubApiUrl
|
||||
description: "Set the GitHub API URL."
|
||||
scope:
|
||||
- GENERAL
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
type: string
|
||||
default: "https://api.github.com"
|
||||
- name: githubToken
|
||||
description: "GitHub personal access token as per
|
||||
https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line"
|
||||
scope:
|
||||
- GENERAL
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
type: string
|
||||
secret: true
|
||||
aliases:
|
||||
- name: access_token
|
||||
resourceRef:
|
||||
- name: githubTokenCredentialsId
|
||||
type: secret
|
||||
- type: vaultSecret
|
||||
default: github
|
||||
name: githubVaultSecretName
|
||||
- name: incremental
|
||||
type: bool
|
||||
description: Whether incremental scans are to be applied which optimizes the scan time but might reduce detection capabilities. Therefore full scans are still required from time to time and should be scheduled via `fullScansScheduled` and `fullScanCycle`
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: true
|
||||
- name: owner
|
||||
aliases:
|
||||
- name: githubOrg
|
||||
description: "Set the GitHub organization."
|
||||
resourceRef:
|
||||
- name: commonPipelineEnvironment
|
||||
param: github/owner
|
||||
scope:
|
||||
- GENERAL
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
type: string
|
||||
- name: clientSecret
|
||||
type: string
|
||||
description: The clientSecret to authenticate using a service account
|
||||
mandatory: true
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
secret: true
|
||||
resourceRef:
|
||||
- name: checkmarxOneCredentialsId
|
||||
type: secret
|
||||
param: clientSecret
|
||||
- type: vaultSecret
|
||||
name: checkmarxOneVaultSecretName
|
||||
default: checkmarxOne
|
||||
- name: APIKey
|
||||
type: string
|
||||
description: The APIKey to authenticate
|
||||
mandatory: true
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
secret: true
|
||||
resourceRef:
|
||||
- name: checkmarxOneAPIKey
|
||||
type: secret
|
||||
param: APIKey
|
||||
- type: vaultSecret
|
||||
name: checkmarxOneVaultSecretName
|
||||
default: checkmarxOne
|
||||
- name: preset
|
||||
type: string
|
||||
description: The preset to use for scanning, if not set explicitly the step will attempt to look up the project's setting based on the availability of `checkmarxOneCredentialsId`
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
- name: languageMode
|
||||
type: string
|
||||
description: Specifies whether the scan should be run for a 'single' language or 'multi' language, default 'multi'
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: "multi"
|
||||
- name: projectCriticality
|
||||
type: string
|
||||
description: The criticality of the checkmarxOne project, used during project creation
|
||||
mandatory: true
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: "3"
|
||||
- name: projectName
|
||||
type: string
|
||||
description: The name of the checkmarxOne project to scan into
|
||||
mandatory: true
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
- name: branch
|
||||
type: string
|
||||
description: Used to supply the branch scanned in the repository, or a friendly-name set by the user
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
mandatory: true
|
||||
- name: pullRequestName
|
||||
type: string
|
||||
description: Used to supply the name for the newly created PR project branch when being used in pull request scenarios. This is supplied by the orchestrator.
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
- name: repository
|
||||
aliases:
|
||||
- name: githubRepo
|
||||
description: "Set the GitHub repository."
|
||||
resourceRef:
|
||||
- name: commonPipelineEnvironment
|
||||
param: github/repository
|
||||
scope:
|
||||
- GENERAL
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
type: string
|
||||
- name: serverUrl
|
||||
type: string
|
||||
description: The URL pointing to the root of the checkmarxOne server to be used
|
||||
mandatory: true
|
||||
scope:
|
||||
- GENERAL
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
- name: iamUrl
|
||||
type: string
|
||||
description: The URL pointing to the access control root of the checkmarxOne IAM server to be used
|
||||
mandatory: true
|
||||
scope:
|
||||
- GENERAL
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
- name: tenant
|
||||
type: string
|
||||
description: The name of the checkmarxOne tenant to be used
|
||||
mandatory: true
|
||||
scope:
|
||||
- GENERAL
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
- name: sourceEncoding
|
||||
type: string
|
||||
description: The source encoding to be used, if not set explicitly the project's default will be used [Not yet supported]
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: "1"
|
||||
- name: groupName
|
||||
type: string
|
||||
description: The full name of the group to assign newly created projects to which is preferred to groupId
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
- name: applicationName
|
||||
type: string
|
||||
description: The full name of the Checkmarx One application to which the newly created projects will be assigned
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
- name: clientId
|
||||
type: string
|
||||
description: The username to authenticate
|
||||
mandatory: true
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
secret: true
|
||||
resourceRef:
|
||||
- name: checkmarxOneCredentialsId
|
||||
type: secret
|
||||
param: clientId
|
||||
- type: vaultSecret
|
||||
name: checkmarxOneVaultSecretName
|
||||
default: checkmarxOne
|
||||
- name: verifyOnly
|
||||
type: bool
|
||||
description: Whether the step shall only apply verification checks or whether it does a full scan and check cycle
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: false
|
||||
- name: vulnerabilityThresholdEnabled
|
||||
type: bool
|
||||
description: Whether the thresholds are enabled or not. If enabled the build will be set to `vulnerabilityThresholdResult` in case a specific threshold value is exceeded
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: true
|
||||
- name: vulnerabilityThresholdHigh
|
||||
type: int
|
||||
description: The specific threshold for high severity findings
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: 100
|
||||
- name: vulnerabilityThresholdMedium
|
||||
type: int
|
||||
description: The specific threshold for medium severity findings
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: 100
|
||||
- name: vulnerabilityThresholdLow
|
||||
type: int
|
||||
description: The specific threshold for low severity findings
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: 10
|
||||
- name: vulnerabilityThresholdLowPerQuery
|
||||
type: bool
|
||||
description: Flag to activate/deactivate the threshold of low severity findings per query
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: false
|
||||
- name: vulnerabilityThresholdLowPerQueryMax
|
||||
type: int
|
||||
description: Upper threshold of low severity findings per query (in absolute number)
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: 10
|
||||
- name: vulnerabilityThresholdResult
|
||||
type: string
|
||||
description: The result of the build in case thresholds are enabled and exceeded
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: FAILURE
|
||||
possibleValues:
|
||||
- FAILURE
|
||||
- name: vulnerabilityThresholdUnit
|
||||
type: string
|
||||
description: The unit for the threshold to apply.
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: percentage
|
||||
- name: isOptimizedAndScheduled
|
||||
type: bool
|
||||
description: Whether the pipeline runs in optimized mode and the current execution is a scheduled one
|
||||
resourceRef:
|
||||
- name: commonPipelineEnvironment
|
||||
param: custom/isOptimizedAndScheduled
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- name: createResultIssue
|
||||
type: bool
|
||||
description: Activate creation of a result issue in GitHub.
|
||||
longDescription: |
|
||||
Whether the step creates a GitHub issue containing the scan results in the originating repo.
|
||||
Since optimized pipelines are headless the creation is implicitly activated for scheduled runs.
|
||||
resourceRef:
|
||||
- name: commonPipelineEnvironment
|
||||
param: custom/isOptimizedAndScheduled
|
||||
scope:
|
||||
- GENERAL
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: false
|
||||
- name: convertToSarif
|
||||
type: bool
|
||||
description: "Convert the checkmarxOne XML scan results to the open SARIF standard."
|
||||
scope:
|
||||
- PARAMETERS
|
||||
- STAGES
|
||||
- STEPS
|
||||
default: true
|
||||
outputs:
|
||||
resources:
|
||||
- name: influx
|
||||
type: influx
|
||||
params:
|
||||
- name: step_data
|
||||
fields:
|
||||
- name: checkmarxOne
|
||||
type: bool
|
||||
- name: checkmarxOne_data
|
||||
fields:
|
||||
- name: high_issues
|
||||
type: int
|
||||
- name: high_not_false_postive
|
||||
type: int
|
||||
- name: high_not_exploitable
|
||||
type: int
|
||||
- name: high_confirmed
|
||||
type: int
|
||||
- name: high_urgent
|
||||
type: int
|
||||
- name: high_proposed_not_exploitable
|
||||
type: int
|
||||
- name: high_to_verify
|
||||
type: int
|
||||
- name: medium_issues
|
||||
type: int
|
||||
- name: medium_not_false_postive
|
||||
type: int
|
||||
- name: medium_not_exploitable
|
||||
type: int
|
||||
- name: medium_confirmed
|
||||
type: int
|
||||
- name: medium_urgent
|
||||
type: int
|
||||
- name: medium_proposed_not_exploitable
|
||||
type: int
|
||||
- name: medium_to_verify
|
||||
type: int
|
||||
- name: low_issues
|
||||
type: int
|
||||
- name: low_not_false_postive
|
||||
type: int
|
||||
- name: low_not_exploitable
|
||||
type: int
|
||||
- name: low_confirmed
|
||||
type: int
|
||||
- name: low_urgent
|
||||
type: int
|
||||
- name: low_proposed_not_exploitable
|
||||
type: int
|
||||
- name: low_to_verify
|
||||
type: int
|
||||
- name: information_issues
|
||||
type: int
|
||||
- name: information_not_false_postive
|
||||
type: int
|
||||
- name: information_not_exploitable
|
||||
type: int
|
||||
- name: information_confirmed
|
||||
type: int
|
||||
- name: information_urgent
|
||||
type: int
|
||||
- name: information_proposed_not_exploitable
|
||||
type: int
|
||||
- name: information_to_verify
|
||||
type: int
|
||||
- name: lines_of_code_scanned
|
||||
type: int
|
||||
- name: files_scanned
|
||||
type: int
|
||||
- name: initiator_name
|
||||
- name: owner
|
||||
- name: scan_id
|
||||
- name: project_id
|
||||
- name: projectName
|
||||
- name: group
|
||||
- name: group_full_path_on_report_date
|
||||
- name: scan_start
|
||||
- name: scan_time
|
||||
- name: checkmarxOne_version
|
||||
- name: scan_type
|
||||
- name: preset
|
||||
- name: deep_link
|
||||
- name: report_creation_time
|
||||
- name: reports
|
||||
type: reports
|
||||
params:
|
||||
- filePattern: "**/piper_checkmarxone_report.html"
|
||||
type: checkmarxone
|
||||
- filePattern: "**/Cx1_SASTResults_*.xml"
|
||||
type: checkmarxone
|
||||
- filePattern: "**/ScanReport.*"
|
||||
type: checkmarxone
|
||||
- filePattern: "**/toolrun_checkmarxone_*.json"
|
||||
type: checkmarxone
|
@ -146,6 +146,7 @@ public class CommonStepsTest extends BasePiperTest{
|
||||
'buildSetResult',
|
||||
'runClosures',
|
||||
'checkmarxExecuteScan', //implementing new golang pattern without fields
|
||||
'checkmarxOneExecuteScan', //implementing new golang pattern without fields
|
||||
'githubCreateIssue', //implementing new golang pattern without fields
|
||||
'githubCreatePullRequest', //implementing new golang pattern without fields
|
||||
'githubPublishRelease', //implementing new golang pattern without fields
|
||||
|
12
vars/checkmarxOneExecuteScan.groovy
Normal file
12
vars/checkmarxOneExecuteScan.groovy
Normal file
@ -0,0 +1,12 @@
|
||||
import groovy.transform.Field
|
||||
|
||||
@Field String STEP_NAME = getClass().getName()
|
||||
@Field String METADATA_FILE = 'metadata/checkmarxOneExecuteScan.yaml'
|
||||
|
||||
//Metadata maintained in file project://resources/metadata/checkmarxoneExecuteScan.yaml
|
||||
|
||||
void call(Map parameters = [:]) {
|
||||
List credentials = [[type: 'usernamePassword', id: 'checkmarxOneCredentialsId', env: ['PIPER_clientId', 'PIPER_clientSecret']],
|
||||
[type: 'token', id: 'checkmarxOneAPIKey', env: ['PIPER_APIKey']]]
|
||||
piperExecuteBin(parameters, STEP_NAME, METADATA_FILE, credentials, true)
|
||||
}
|
Loading…
Reference in New Issue
Block a user