2020-01-28 00:40:53 +02:00
package cmd
import (
"archive/zip"
2022-08-02 08:26:26 +02:00
"context"
"encoding/json"
"encoding/xml"
2020-01-28 00:40:53 +02:00
"fmt"
"io"
2022-08-05 00:17:07 +02:00
"math"
2020-01-28 00:40:53 +02:00
"os"
"path/filepath"
"regexp"
"sort"
"strconv"
"strings"
"time"
"github.com/SAP/jenkins-library/pkg/checkmarx"
2022-02-17 16:16:55 +02:00
piperGithub "github.com/SAP/jenkins-library/pkg/github"
2020-01-28 00:40:53 +02:00
piperHttp "github.com/SAP/jenkins-library/pkg/http"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/SAP/jenkins-library/pkg/piperutils"
2022-02-17 16:16:55 +02:00
"github.com/SAP/jenkins-library/pkg/reporting"
2020-02-04 11:46:43 +02:00
"github.com/SAP/jenkins-library/pkg/telemetry"
2021-06-23 15:05:00 +02:00
"github.com/SAP/jenkins-library/pkg/toolrecord"
2020-01-28 00:40:53 +02:00
"github.com/bmatcuk/doublestar"
2020-09-29 09:23:31 +02:00
"github.com/pkg/errors"
2022-08-02 08:26:26 +02:00
"github.com/google/go-github/v45/github"
2020-01-28 00:40:53 +02:00
)
2021-04-08 07:05:37 +02:00
type checkmarxExecuteScanUtils interface {
FileInfoHeader ( fi os . FileInfo ) ( * zip . FileHeader , error )
Stat ( name string ) ( os . FileInfo , error )
Open ( name string ) ( * os . File , error )
WriteFile ( filename string , data [ ] byte , perm os . FileMode ) error
2022-08-09 10:57:02 +02:00
MkdirAll ( path string , perm os . FileMode ) error
2021-04-08 07:05:37 +02:00
PathMatch ( pattern , name string ) ( bool , error )
GetWorkspace ( ) string
2022-08-02 08:26:26 +02:00
GetIssueService ( ) * github . IssuesService
GetSearchService ( ) * github . SearchService
2021-04-08 07:05:37 +02:00
}
type checkmarxExecuteScanUtilsBundle struct {
workspace string
2022-08-02 08:26:26 +02:00
issues * github . IssuesService
search * github . SearchService
2021-04-08 07:05:37 +02:00
}
2022-08-02 08:26:26 +02:00
func ( c * checkmarxExecuteScanUtilsBundle ) PathMatch ( pattern , name string ) ( bool , error ) {
2021-04-08 07:05:37 +02:00
return doublestar . PathMatch ( pattern , name )
}
2022-08-02 08:26:26 +02:00
func ( c * checkmarxExecuteScanUtilsBundle ) GetWorkspace ( ) string {
return c . workspace
2021-04-08 07:05:37 +02:00
}
2022-08-02 08:26:26 +02:00
func ( c * checkmarxExecuteScanUtilsBundle ) WriteFile ( filename string , data [ ] byte , perm os . FileMode ) error {
2022-08-09 10:57:02 +02:00
return os . WriteFile ( filename , data , perm )
}
func ( c * checkmarxExecuteScanUtilsBundle ) MkdirAll ( path string , perm os . FileMode ) error {
return os . MkdirAll ( path , perm )
2021-04-08 07:05:37 +02:00
}
2022-08-02 08:26:26 +02:00
func ( c * checkmarxExecuteScanUtilsBundle ) FileInfoHeader ( fi os . FileInfo ) ( * zip . FileHeader , error ) {
2021-04-08 07:05:37 +02:00
return zip . FileInfoHeader ( fi )
}
2022-08-02 08:26:26 +02:00
func ( c * checkmarxExecuteScanUtilsBundle ) Stat ( name string ) ( os . FileInfo , error ) {
2021-04-08 07:05:37 +02:00
return os . Stat ( name )
}
2022-08-02 08:26:26 +02:00
func ( c * checkmarxExecuteScanUtilsBundle ) Open ( name string ) ( * os . File , error ) {
2021-04-08 07:05:37 +02:00
return os . Open ( name )
}
2022-08-02 08:26:26 +02:00
func ( c * checkmarxExecuteScanUtilsBundle ) CreateIssue ( ghCreateIssueOptions * piperGithub . CreateIssueOptions ) error {
2022-02-17 16:16:55 +02:00
return piperGithub . CreateIssue ( ghCreateIssueOptions )
}
2022-08-02 08:26:26 +02:00
func ( c * checkmarxExecuteScanUtilsBundle ) GetIssueService ( ) * github . IssuesService {
return c . issues
}
func ( c * checkmarxExecuteScanUtilsBundle ) GetSearchService ( ) * github . SearchService {
return c . search
}
func newCheckmarxExecuteScanUtilsBundle ( workspace string , client * github . Client ) checkmarxExecuteScanUtils {
utils := checkmarxExecuteScanUtilsBundle {
workspace : workspace ,
}
if client != nil {
utils . issues = client . Issues
utils . search = client . Search
}
return & utils
}
2021-04-08 07:05:37 +02:00
func checkmarxExecuteScan ( config checkmarxExecuteScanOptions , _ * telemetry . CustomData , influx * checkmarxExecuteScanInflux ) {
2020-01-28 00:40:53 +02:00
client := & piperHttp . Client { }
2020-11-11 14:35:53 +02:00
options := piperHttp . ClientOptions { MaxRetries : config . MaxRetries }
client . SetOptions ( options )
2022-08-02 08:26:26 +02:00
// TODO provide parameter for trusted certs
ctx , ghClient , err := piperGithub . NewClient ( config . GithubToken , config . GithubAPIURL , "" , [ ] string { } )
if err != nil {
log . Entry ( ) . WithError ( err ) . Warning ( "Failed to get GitHub client" )
}
2020-01-28 00:40:53 +02:00
sys , err := checkmarx . NewSystemInstance ( client , config . ServerURL , config . Username , config . Password )
if err != nil {
log . Entry ( ) . WithError ( err ) . Fatalf ( "Failed to create Checkmarx client talking to URL %v" , config . ServerURL )
}
2021-03-18 11:32:03 +02:00
influx . step_data . fields . checkmarx = false
2022-08-02 08:26:26 +02:00
utils := newCheckmarxExecuteScanUtilsBundle ( "./" , ghClient )
if err := runScan ( ctx , config , sys , influx , utils ) ; err != nil {
2020-09-29 09:23:31 +02:00
log . Entry ( ) . WithError ( err ) . Fatal ( "Failed to execute Checkmarx scan." )
}
2021-03-18 11:32:03 +02:00
influx . step_data . fields . checkmarx = true
2020-01-28 00:40:53 +02:00
}
2022-08-02 08:26:26 +02:00
func runScan ( ctx context . Context , config checkmarxExecuteScanOptions , sys checkmarx . System , influx * checkmarxExecuteScanInflux , utils checkmarxExecuteScanUtils ) error {
2020-11-25 14:47:26 +02:00
teamID := config . TeamID
if len ( teamID ) == 0 {
2021-04-08 07:05:37 +02:00
readTeamID , err := loadTeamIDByTeamName ( config , sys , teamID )
2020-11-25 14:47:26 +02:00
if err != nil {
2021-04-08 07:05:37 +02:00
return err
2020-11-25 14:47:26 +02:00
}
2021-04-08 07:05:37 +02:00
teamID = readTeamID
2020-09-29 09:23:31 +02:00
}
2020-11-25 14:47:26 +02:00
project , projectName , err := loadExistingProject ( sys , config . ProjectName , config . PullRequestName , teamID )
2020-09-29 09:23:31 +02:00
if err != nil {
2020-10-01 17:08:07 +02:00
return errors . Wrap ( err , "error when trying to load project" )
2020-09-29 09:23:31 +02:00
}
2020-01-28 00:40:53 +02:00
if project . Name == projectName {
2021-04-08 07:05:37 +02:00
err = presetExistingProject ( config , sys , projectName , project )
if err != nil {
return err
2020-08-06 17:20:26 +02:00
}
2020-01-28 00:40:53 +02:00
} else {
2022-02-28 15:22:47 +02:00
if len ( teamID ) == 0 {
return errors . Wrap ( err , "TeamName or TeamID is required to create a new project" )
}
2021-04-08 07:05:37 +02:00
project , err = createNewProject ( config , sys , projectName , teamID )
2020-09-29 09:23:31 +02:00
if err != nil {
2021-04-08 07:05:37 +02:00
return err
2020-09-29 09:23:31 +02:00
}
2020-01-28 00:40:53 +02:00
}
2022-08-02 08:26:26 +02:00
err = uploadAndScan ( ctx , config , sys , project , influx , utils )
2020-09-29 09:23:31 +02:00
if err != nil {
2021-09-07 13:10:11 +02:00
return errors . Wrap ( err , "scan, upload, and result validation returned an error" )
2020-09-29 09:23:31 +02:00
}
return nil
2020-01-28 00:40:53 +02:00
}
2021-04-08 07:05:37 +02:00
func loadTeamIDByTeamName ( config checkmarxExecuteScanOptions , sys checkmarx . System , teamID string ) ( string , error ) {
team , err := loadTeam ( sys , config . TeamName )
if err != nil {
return "" , errors . Wrap ( err , "failed to load team" )
}
teamIDBytes , _ := team . ID . MarshalJSON ( )
err = json . Unmarshal ( teamIDBytes , & teamID )
if err != nil {
var teamIDInt int
err = json . Unmarshal ( teamIDBytes , & teamIDInt )
if err != nil {
return "" , errors . Wrap ( err , "failed to unmarshall team.ID" )
}
teamID = strconv . Itoa ( teamIDInt )
}
return teamID , nil
}
func createNewProject ( config checkmarxExecuteScanOptions , sys checkmarx . System , projectName string , teamID string ) ( checkmarx . Project , error ) {
log . Entry ( ) . Infof ( "Project %v does not exist, starting to create it..." , projectName )
2021-04-19 10:15:07 +02:00
presetID , _ := strconv . Atoi ( config . Preset )
2021-04-08 07:05:37 +02:00
project , err := createAndConfigureNewProject ( sys , projectName , teamID , presetID , config . Preset , config . SourceEncoding )
if err != nil {
return checkmarx . Project { } , errors . Wrapf ( err , "failed to create and configure new project %v" , projectName )
}
return project , nil
}
func presetExistingProject ( config checkmarxExecuteScanOptions , sys checkmarx . System , projectName string , project checkmarx . Project ) error {
log . Entry ( ) . Infof ( "Project %v exists..." , projectName )
if len ( config . Preset ) > 0 {
2021-04-19 10:15:07 +02:00
presetID , _ := strconv . Atoi ( config . Preset )
err := setPresetForProject ( sys , project . ID , presetID , projectName , config . Preset , config . SourceEncoding )
2021-04-08 07:05:37 +02:00
if err != nil {
return errors . Wrapf ( err , "failed to set preset %v for project %v" , config . Preset , projectName )
}
}
return nil
}
2020-11-25 14:47:26 +02:00
func loadTeam ( sys checkmarx . System , teamName string ) ( checkmarx . Team , error ) {
2020-01-28 00:40:53 +02:00
teams := sys . GetTeams ( )
team := checkmarx . Team { }
2022-02-28 15:22:47 +02:00
var err error
2020-11-25 14:47:26 +02:00
if len ( teams ) > 0 && len ( teamName ) > 0 {
2022-02-28 15:22:47 +02:00
team , err = sys . FilterTeamByName ( teams , teamName )
}
if err != nil {
return team , fmt . Errorf ( "failed to identify team by teamName %v" , teamName )
} else {
return team , nil
2020-01-28 00:40:53 +02:00
}
}
2020-09-29 09:23:31 +02:00
func loadExistingProject ( sys checkmarx . System , initialProjectName , pullRequestName , teamID string ) ( checkmarx . Project , string , error ) {
2020-01-28 00:40:53 +02:00
var project checkmarx . Project
projectName := initialProjectName
if len ( pullRequestName ) > 0 {
projectName = fmt . Sprintf ( "%v_%v" , initialProjectName , pullRequestName )
2020-09-29 09:23:31 +02:00
projects , err := sys . GetProjectsByNameAndTeam ( projectName , teamID )
if err != nil || len ( projects ) == 0 {
projects , err = sys . GetProjectsByNameAndTeam ( initialProjectName , teamID )
2020-10-01 17:08:07 +02:00
if err != nil {
return project , projectName , errors . Wrap ( err , "failed getting projects" )
}
if len ( projects ) == 0 {
return checkmarx . Project { } , projectName , nil
2020-01-28 00:40:53 +02:00
}
2020-09-29 09:23:31 +02:00
branchProject , err := sys . GetProjectByID ( sys . CreateBranch ( projects [ 0 ] . ID , projectName ) )
if err != nil {
return project , projectName , fmt . Errorf ( "failed to create branch %v for project %v" , projectName , initialProjectName )
}
project = branchProject
2020-09-22 14:39:34 +02:00
} else {
project = projects [ 0 ]
log . Entry ( ) . Debugf ( "Loaded project with name %v" , project . Name )
2020-01-28 00:40:53 +02:00
}
} else {
2020-09-29 09:23:31 +02:00
projects , err := sys . GetProjectsByNameAndTeam ( projectName , teamID )
2020-10-05 08:16:18 +02:00
if err != nil {
return project , projectName , errors . Wrap ( err , "failed getting projects" )
}
if len ( projects ) == 0 {
return checkmarx . Project { } , projectName , nil
2020-01-28 00:40:53 +02:00
}
2022-02-28 15:22:47 +02:00
if len ( projects ) == 1 {
project = projects [ 0 ]
} else {
for _ , current_project := range projects {
if projectName == current_project . Name {
project = current_project
break
}
}
if len ( project . Name ) == 0 {
return project , projectName , errors . New ( "Cannot find project " + projectName + ". You need to provide the teamName parameter if you want a new project to be created." )
}
}
2020-09-29 09:23:31 +02:00
log . Entry ( ) . Debugf ( "Loaded project with name %v" , project . Name )
2020-01-28 00:40:53 +02:00
}
2020-09-29 09:23:31 +02:00
return project , projectName , nil
2020-01-28 00:40:53 +02:00
}
2021-04-08 07:05:37 +02:00
func zipWorkspaceFiles ( filterPattern string , utils checkmarxExecuteScanUtils ) ( * os . File , error ) {
zipFileName := filepath . Join ( utils . GetWorkspace ( ) , "workspace.zip" )
2022-03-23 12:45:05 +02:00
patterns := piperutils . Trim ( strings . Split ( filterPattern , "," ) )
2020-01-28 00:40:53 +02:00
sort . Strings ( patterns )
zipFile , err := os . Create ( zipFileName )
if err != nil {
2020-09-29 09:23:31 +02:00
return zipFile , errors . Wrap ( err , "failed to create archive of project sources" )
2020-01-28 00:40:53 +02:00
}
defer zipFile . Close ( )
2021-04-08 07:05:37 +02:00
err = zipFolder ( utils . GetWorkspace ( ) , zipFile , patterns , utils )
if err != nil {
return nil , errors . Wrap ( err , "failed to compact folder" )
}
2020-09-29 09:23:31 +02:00
return zipFile , nil
2020-01-28 00:40:53 +02:00
}
2022-08-02 08:26:26 +02:00
func uploadAndScan ( ctx context . Context , config checkmarxExecuteScanOptions , sys checkmarx . System , project checkmarx . Project , influx * checkmarxExecuteScanInflux , utils checkmarxExecuteScanUtils ) error {
2020-09-29 09:23:31 +02:00
previousScans , err := sys . GetScans ( project . ID )
if err != nil && config . VerifyOnly {
2020-09-18 08:19:34 +02:00
log . Entry ( ) . Warnf ( "Cannot load scans for project %v, verification only mode aborted" , project . Name )
}
if len ( previousScans ) > 0 && config . VerifyOnly {
2022-08-02 08:26:26 +02:00
err := verifyCxProjectCompliance ( ctx , config , sys , previousScans [ 0 ] . ID , influx , utils )
2020-09-29 09:23:31 +02:00
if err != nil {
log . SetErrorCategory ( log . ErrorCompliance )
return errors . Wrapf ( err , "project %v not compliant" , project . Name )
}
2020-09-18 08:19:34 +02:00
} else {
2021-04-08 07:05:37 +02:00
zipFile , err := zipWorkspaceFiles ( config . FilterPattern , utils )
2020-09-29 09:23:31 +02:00
if err != nil {
return errors . Wrap ( err , "failed to zip workspace files" )
}
err = sys . UploadProjectSourceCode ( project . ID , zipFile . Name ( ) )
if err != nil {
return errors . Wrapf ( err , "failed to upload source code for project %v" , project . Name )
}
2020-01-28 00:40:53 +02:00
2020-09-29 09:23:31 +02:00
log . Entry ( ) . Debugf ( "Source code uploaded for project %v" , project . Name )
err = os . Remove ( zipFile . Name ( ) )
if err != nil {
log . Entry ( ) . WithError ( err ) . Warnf ( "Failed to delete zipped source code for project %v" , project . Name )
}
2020-01-28 00:40:53 +02:00
2020-09-29 09:23:31 +02:00
incremental := config . Incremental
fullScanCycle , err := strconv . Atoi ( config . FullScanCycle )
if err != nil {
log . SetErrorCategory ( log . ErrorConfiguration )
return errors . Wrapf ( err , "invalid configuration value for fullScanCycle %v, must be a positive int" , config . FullScanCycle )
}
2020-09-18 08:19:34 +02:00
2021-08-10 11:27:28 +02:00
if config . IsOptimizedAndScheduled {
incremental = false
} else if incremental && config . FullScansScheduled && fullScanCycle > 0 && ( getNumCoherentIncrementalScans ( previousScans ) + 1 ) % fullScanCycle == 0 {
2020-09-29 09:23:31 +02:00
incremental = false
2020-09-18 08:19:34 +02:00
}
2020-09-29 09:23:31 +02:00
2022-08-02 08:26:26 +02:00
return triggerScan ( ctx , config , sys , project , incremental , influx , utils )
2020-01-28 00:40:53 +02:00
}
2020-09-29 09:23:31 +02:00
return nil
2020-01-28 00:40:53 +02:00
}
2022-08-02 08:26:26 +02:00
func triggerScan ( ctx context . Context , config checkmarxExecuteScanOptions , sys checkmarx . System , project checkmarx . Project , incremental bool , influx * checkmarxExecuteScanInflux , utils checkmarxExecuteScanUtils ) error {
2020-09-29 09:23:31 +02:00
scan , err := sys . ScanProject ( project . ID , incremental , true , ! config . AvoidDuplicateProjectScans )
if err != nil {
return errors . Wrapf ( err , "cannot scan project %v" , project . Name )
}
2020-01-28 00:40:53 +02:00
2020-09-29 09:23:31 +02:00
log . Entry ( ) . Debugf ( "Scanning project %v " , project . Name )
err = pollScanStatus ( sys , scan )
if err != nil {
return errors . Wrap ( err , "polling scan status failed" )
2020-09-18 08:19:34 +02:00
}
2020-09-29 09:23:31 +02:00
log . Entry ( ) . Debugln ( "Scan finished" )
2022-08-02 08:26:26 +02:00
return verifyCxProjectCompliance ( ctx , config , sys , scan . ID , influx , utils )
2020-09-18 08:19:34 +02:00
}
2022-08-02 08:26:26 +02:00
func verifyCxProjectCompliance ( ctx context . Context , config checkmarxExecuteScanOptions , sys checkmarx . System , scanID int , influx * checkmarxExecuteScanInflux , utils checkmarxExecuteScanUtils ) error {
2020-09-18 08:19:34 +02:00
var reports [ ] piperutils . Path
if config . GeneratePdfReport {
2021-04-08 07:05:37 +02:00
pdfReportName := createReportName ( utils . GetWorkspace ( ) , "CxSASTReport_%v.pdf" )
err := downloadAndSaveReport ( sys , pdfReportName , scanID , utils )
2020-09-29 09:23:31 +02:00
if err != nil {
log . Entry ( ) . Warning ( "Report download failed - continue processing ..." )
} else {
2020-09-18 08:19:34 +02:00
reports = append ( reports , piperutils . Path { Target : pdfReportName , Mandatory : true } )
2020-01-28 00:40:53 +02:00
}
2020-09-18 08:19:34 +02:00
} else {
log . Entry ( ) . Debug ( "Report generation is disabled via configuration" )
}
2020-01-28 00:40:53 +02:00
2021-04-08 07:05:37 +02:00
xmlReportName := createReportName ( utils . GetWorkspace ( ) , "CxSASTResults_%v.xml" )
2022-08-05 00:17:07 +02:00
results , err := getDetailedResults ( config , sys , xmlReportName , scanID , utils )
2020-09-29 09:23:31 +02:00
if err != nil {
return errors . Wrap ( err , "failed to get detailed results" )
}
2020-09-18 08:19:34 +02:00
reports = append ( reports , piperutils . Path { Target : xmlReportName } )
2021-06-23 15:05:00 +02:00
2022-04-04 16:12:35 +02:00
// generate sarif report
if config . ConvertToSarif {
log . Entry ( ) . Info ( "Calling conversion to SARIF function." )
2022-06-01 15:48:56 +02:00
sarif , err := checkmarx . ConvertCxxmlToSarif ( sys , xmlReportName , scanID )
2022-04-04 16:12:35 +02:00
if err != nil {
return fmt . Errorf ( "failed to generate SARIF" )
}
paths , err := checkmarx . WriteSarif ( sarif )
if err != nil {
return fmt . Errorf ( "failed to write sarif" )
}
reports = append ( reports , paths ... )
}
2021-06-23 15:05:00 +02:00
// create toolrecord
2022-08-09 10:57:02 +02:00
toolRecordFileName , err := createToolRecordCx ( utils , utils . GetWorkspace ( ) , config , results )
2021-06-23 15:05:00 +02:00
if err != nil {
// do not fail until the framework is well established
log . Entry ( ) . Warning ( "TR_CHECKMARX: Failed to create toolrecord file ..." , err )
} else {
reports = append ( reports , piperutils . Path { Target : toolRecordFileName } )
}
2022-02-25 15:20:36 +02:00
// create JSON report (regardless vulnerabilityThreshold enabled or not)
jsonReport := checkmarx . CreateJSONReport ( results )
paths , err := checkmarx . WriteJSONReport ( jsonReport )
if err != nil {
log . Entry ( ) . Warning ( "failed to write JSON report..." , err )
} else {
// add JSON report to archiving list
reports = append ( reports , paths ... )
}
2020-09-24 08:58:53 +02:00
links := [ ] piperutils . Path { { Target : results [ "DeepLink" ] . ( string ) , Name : "Checkmarx Web UI" } }
2020-01-28 00:40:53 +02:00
2020-09-18 08:19:34 +02:00
insecure := false
2022-07-21 09:04:21 +02:00
var insecureResults [ ] string
var neutralResults [ ] string
2021-09-15 09:45:56 +02:00
2020-09-18 08:19:34 +02:00
if config . VulnerabilityThresholdEnabled {
2021-09-15 09:45:56 +02:00
insecure , insecureResults , neutralResults = enforceThresholds ( config , results )
scanReport := checkmarx . CreateCustomReport ( results , insecureResults , neutralResults )
2022-02-17 16:16:55 +02:00
2022-03-02 16:46:56 +02:00
if insecure && config . CreateResultIssue && len ( config . GithubToken ) > 0 && len ( config . GithubAPIURL ) > 0 && len ( config . Owner ) > 0 && len ( config . Repository ) > 0 {
2022-02-17 16:16:55 +02:00
log . Entry ( ) . Debug ( "Creating/updating GitHub issue with check results" )
2022-08-02 08:26:26 +02:00
gh := reporting . GitHub {
Owner : & config . Owner ,
Repository : & config . Repository ,
Assignees : & config . Assignees ,
IssueService : utils . GetIssueService ( ) ,
SearchService : utils . GetSearchService ( ) ,
}
if err := gh . UploadSingleReport ( ctx , scanReport ) ; err != nil {
2022-02-17 16:16:55 +02:00
return fmt . Errorf ( "failed to upload scan results into GitHub: %w" , err )
}
}
2021-09-15 09:45:56 +02:00
paths , err := checkmarx . WriteCustomReports ( scanReport , fmt . Sprint ( results [ "ProjectName" ] ) , fmt . Sprint ( results [ "ProjectID" ] ) )
if err != nil {
// do not fail until we have a better idea to handle it
log . Entry ( ) . Warning ( "failed to write HTML/MarkDown report file ..." , err )
} else {
reports = append ( reports , paths ... )
}
2020-09-18 08:19:34 +02:00
}
2020-01-28 00:40:53 +02:00
2022-08-09 10:57:02 +02:00
piperutils . PersistReportsAndLinks ( "checkmarxExecuteScan" , utils . GetWorkspace ( ) , utils , reports , links )
2022-08-02 08:26:26 +02:00
reportToInflux ( results , influx )
2020-09-18 08:19:34 +02:00
if insecure {
if config . VulnerabilityThresholdResult == "FAILURE" {
2020-09-29 09:23:31 +02:00
log . SetErrorCategory ( log . ErrorCompliance )
return fmt . Errorf ( "the project is not compliant - see report for details" )
2020-01-28 00:40:53 +02:00
}
2020-09-18 08:19:34 +02:00
log . Entry ( ) . Errorf ( "Checkmarx scan result set to %v, some results are not meeting defined thresholds. For details see the archived report." , config . VulnerabilityThresholdResult )
2020-01-28 00:40:53 +02:00
} else {
2020-09-29 09:23:31 +02:00
log . Entry ( ) . Infoln ( "Checkmarx scan finished successfully" )
2020-01-28 00:40:53 +02:00
}
2020-09-29 09:23:31 +02:00
return nil
2020-01-28 00:40:53 +02:00
}
func createReportName ( workspace , reportFileNameTemplate string ) string {
regExpFileName := regexp . MustCompile ( ` [^\w\d] ` )
timeStamp , _ := time . Now ( ) . Local ( ) . MarshalText ( )
return filepath . Join ( workspace , fmt . Sprintf ( reportFileNameTemplate , regExpFileName . ReplaceAllString ( string ( timeStamp ) , "_" ) ) )
}
2020-09-29 09:23:31 +02:00
func pollScanStatus ( sys checkmarx . System , scan checkmarx . Scan ) error {
2020-01-28 00:40:53 +02:00
status := "Scan phase: New"
pastStatus := status
log . Entry ( ) . Info ( status )
2022-05-03 17:34:14 +02:00
stepDetail := "..."
stageDetail := "..."
2022-07-21 09:04:21 +02:00
for {
2020-06-12 09:22:22 +02:00
var detail checkmarx . ScanStatusDetail
status , detail = sys . GetScanStatusAndDetail ( scan . ID )
2020-01-28 00:40:53 +02:00
if len ( detail . Stage ) > 0 {
stageDetail = detail . Stage
}
if len ( detail . Step ) > 0 {
stepDetail = detail . Step
}
2022-05-03 17:34:14 +02:00
if status == "Finished" || status == "Canceled" || status == "Failed" {
break
}
2020-01-28 00:40:53 +02:00
status = fmt . Sprintf ( "Scan phase: %v (%v / %v)" , status , stageDetail , stepDetail )
if pastStatus != status {
log . Entry ( ) . Info ( status )
pastStatus = status
}
log . Entry ( ) . Debug ( "Polling for status: sleeping..." )
time . Sleep ( 10 * time . Second )
}
if status == "Canceled" {
2020-09-29 09:23:31 +02:00
log . SetErrorCategory ( log . ErrorCustom )
return fmt . Errorf ( "scan canceled via web interface" )
2020-01-28 00:40:53 +02:00
}
if status == "Failed" {
2022-05-03 17:34:14 +02:00
if strings . Contains ( stageDetail , "<ErrorCode>17033</ErrorCode>" ) { // Translate a cryptic XML error into a human-readable message
stageDetail = "Failed to start scanning due to one of following reasons: source folder is empty, all source files are of an unsupported language or file format"
}
return fmt . Errorf ( "Checkmarx scan failed with the following error: %v" , stageDetail )
2020-01-28 00:40:53 +02:00
}
2020-09-29 09:23:31 +02:00
return nil
2020-01-28 00:40:53 +02:00
}
func reportToInflux ( results map [ string ] interface { } , influx * checkmarxExecuteScanInflux ) {
2021-03-10 17:00:53 +02:00
influx . checkmarx_data . fields . high_issues = results [ "High" ] . ( map [ string ] int ) [ "Issues" ]
influx . checkmarx_data . fields . high_not_false_postive = results [ "High" ] . ( map [ string ] int ) [ "NotFalsePositive" ]
influx . checkmarx_data . fields . high_not_exploitable = results [ "High" ] . ( map [ string ] int ) [ "NotExploitable" ]
influx . checkmarx_data . fields . high_confirmed = results [ "High" ] . ( map [ string ] int ) [ "Confirmed" ]
influx . checkmarx_data . fields . high_urgent = results [ "High" ] . ( map [ string ] int ) [ "Urgent" ]
influx . checkmarx_data . fields . high_proposed_not_exploitable = results [ "High" ] . ( map [ string ] int ) [ "ProposedNotExploitable" ]
influx . checkmarx_data . fields . high_to_verify = results [ "High" ] . ( map [ string ] int ) [ "ToVerify" ]
influx . checkmarx_data . fields . medium_issues = results [ "Medium" ] . ( map [ string ] int ) [ "Issues" ]
influx . checkmarx_data . fields . medium_not_false_postive = results [ "Medium" ] . ( map [ string ] int ) [ "NotFalsePositive" ]
influx . checkmarx_data . fields . medium_not_exploitable = results [ "Medium" ] . ( map [ string ] int ) [ "NotExploitable" ]
influx . checkmarx_data . fields . medium_confirmed = results [ "Medium" ] . ( map [ string ] int ) [ "Confirmed" ]
influx . checkmarx_data . fields . medium_urgent = results [ "Medium" ] . ( map [ string ] int ) [ "Urgent" ]
influx . checkmarx_data . fields . medium_proposed_not_exploitable = results [ "Medium" ] . ( map [ string ] int ) [ "ProposedNotExploitable" ]
influx . checkmarx_data . fields . medium_to_verify = results [ "Medium" ] . ( map [ string ] int ) [ "ToVerify" ]
influx . checkmarx_data . fields . low_issues = results [ "Low" ] . ( map [ string ] int ) [ "Issues" ]
influx . checkmarx_data . fields . low_not_false_postive = results [ "Low" ] . ( map [ string ] int ) [ "NotFalsePositive" ]
influx . checkmarx_data . fields . low_not_exploitable = results [ "Low" ] . ( map [ string ] int ) [ "NotExploitable" ]
influx . checkmarx_data . fields . low_confirmed = results [ "Low" ] . ( map [ string ] int ) [ "Confirmed" ]
influx . checkmarx_data . fields . low_urgent = results [ "Low" ] . ( map [ string ] int ) [ "Urgent" ]
influx . checkmarx_data . fields . low_proposed_not_exploitable = results [ "Low" ] . ( map [ string ] int ) [ "ProposedNotExploitable" ]
influx . checkmarx_data . fields . low_to_verify = results [ "Low" ] . ( map [ string ] int ) [ "ToVerify" ]
influx . checkmarx_data . fields . information_issues = results [ "Information" ] . ( map [ string ] int ) [ "Issues" ]
influx . checkmarx_data . fields . information_not_false_postive = results [ "Information" ] . ( map [ string ] int ) [ "NotFalsePositive" ]
influx . checkmarx_data . fields . information_not_exploitable = results [ "Information" ] . ( map [ string ] int ) [ "NotExploitable" ]
influx . checkmarx_data . fields . information_confirmed = results [ "Information" ] . ( map [ string ] int ) [ "Confirmed" ]
influx . checkmarx_data . fields . information_urgent = results [ "Information" ] . ( map [ string ] int ) [ "Urgent" ]
influx . checkmarx_data . fields . information_proposed_not_exploitable = results [ "Information" ] . ( map [ string ] int ) [ "ProposedNotExploitable" ]
influx . checkmarx_data . fields . information_to_verify = results [ "Information" ] . ( map [ string ] int ) [ "ToVerify" ]
2020-01-28 00:40:53 +02:00
influx . checkmarx_data . fields . initiator_name = results [ "InitiatorName" ] . ( string )
influx . checkmarx_data . fields . owner = results [ "Owner" ] . ( string )
influx . checkmarx_data . fields . scan_id = results [ "ScanId" ] . ( string )
influx . checkmarx_data . fields . project_id = results [ "ProjectId" ] . ( string )
2020-10-19 13:09:17 +02:00
influx . checkmarx_data . fields . projectName = results [ "ProjectName" ] . ( string )
2020-01-28 00:40:53 +02:00
influx . checkmarx_data . fields . team = results [ "Team" ] . ( string )
influx . checkmarx_data . fields . team_full_path_on_report_date = results [ "TeamFullPathOnReportDate" ] . ( string )
influx . checkmarx_data . fields . scan_start = results [ "ScanStart" ] . ( string )
influx . checkmarx_data . fields . scan_time = results [ "ScanTime" ] . ( string )
2021-03-10 17:00:53 +02:00
influx . checkmarx_data . fields . lines_of_code_scanned = results [ "LinesOfCodeScanned" ] . ( int )
influx . checkmarx_data . fields . files_scanned = results [ "FilesScanned" ] . ( int )
2020-01-28 00:40:53 +02:00
influx . checkmarx_data . fields . checkmarx_version = results [ "CheckmarxVersion" ] . ( string )
influx . checkmarx_data . fields . scan_type = results [ "ScanType" ] . ( string )
influx . checkmarx_data . fields . preset = results [ "Preset" ] . ( string )
influx . checkmarx_data . fields . deep_link = results [ "DeepLink" ] . ( string )
influx . checkmarx_data . fields . report_creation_time = results [ "ReportCreationTime" ] . ( string )
}
2021-04-08 07:05:37 +02:00
func downloadAndSaveReport ( sys checkmarx . System , reportFileName string , scanID int , utils checkmarxExecuteScanUtils ) error {
2020-09-29 09:23:31 +02:00
report , err := generateAndDownloadReport ( sys , scanID , "PDF" )
if err != nil {
return errors . Wrap ( err , "failed to download the report" )
2020-01-28 00:40:53 +02:00
}
2020-09-29 09:23:31 +02:00
log . Entry ( ) . Debugf ( "Saving report to file %v..." , reportFileName )
2022-08-02 08:26:26 +02:00
return utils . WriteFile ( reportFileName , report , 0 o700 )
2020-01-28 00:40:53 +02:00
}
2021-09-15 09:45:56 +02:00
func enforceThresholds ( config checkmarxExecuteScanOptions , results map [ string ] interface { } ) ( bool , [ ] string , [ ] string ) {
neutralResults := [ ] string { }
insecureResults := [ ] string { }
2020-01-28 00:40:53 +02:00
insecure := false
cxHighThreshold := config . VulnerabilityThresholdHigh
cxMediumThreshold := config . VulnerabilityThresholdMedium
cxLowThreshold := config . VulnerabilityThresholdLow
2022-08-05 00:17:07 +02:00
cxLowThresholdPerQuery := config . VulnerabilityThresholdLowPerQuery
cxLowThresholdPerQueryMax := config . VulnerabilityThresholdLowPerQueryMax
2020-01-28 00:40:53 +02:00
highValue := results [ "High" ] . ( map [ string ] int ) [ "NotFalsePositive" ]
mediumValue := results [ "Medium" ] . ( map [ string ] int ) [ "NotFalsePositive" ]
lowValue := results [ "Low" ] . ( map [ string ] int ) [ "NotFalsePositive" ]
var unit string
highViolation := ""
mediumViolation := ""
lowViolation := ""
if config . VulnerabilityThresholdUnit == "percentage" {
unit = "%"
highAudited := results [ "High" ] . ( map [ string ] int ) [ "Issues" ] - results [ "High" ] . ( map [ string ] int ) [ "NotFalsePositive" ]
highOverall := results [ "High" ] . ( map [ string ] int ) [ "Issues" ]
if highOverall == 0 {
highAudited = 1
highOverall = 1
}
mediumAudited := results [ "Medium" ] . ( map [ string ] int ) [ "Issues" ] - results [ "Medium" ] . ( map [ string ] int ) [ "NotFalsePositive" ]
mediumOverall := results [ "Medium" ] . ( map [ string ] int ) [ "Issues" ]
if mediumOverall == 0 {
mediumAudited = 1
mediumOverall = 1
}
lowAudited := results [ "Low" ] . ( map [ string ] int ) [ "Confirmed" ] + results [ "Low" ] . ( map [ string ] int ) [ "NotExploitable" ]
lowOverall := results [ "Low" ] . ( map [ string ] int ) [ "Issues" ]
if lowOverall == 0 {
lowAudited = 1
lowOverall = 1
}
highValue = int ( float32 ( highAudited ) / float32 ( highOverall ) * 100.0 )
mediumValue = int ( float32 ( mediumAudited ) / float32 ( mediumOverall ) * 100.0 )
lowValue = int ( float32 ( lowAudited ) / float32 ( lowOverall ) * 100.0 )
if highValue < cxHighThreshold {
insecure = true
highViolation = fmt . Sprintf ( "<-- %v %v deviation" , cxHighThreshold - highValue , unit )
}
if mediumValue < cxMediumThreshold {
insecure = true
mediumViolation = fmt . Sprintf ( "<-- %v %v deviation" , cxMediumThreshold - mediumValue , unit )
}
2022-08-05 00:17:07 +02:00
// if the flag is switched on, calculate the Low findings threshold per query
if cxLowThresholdPerQuery {
lowPerQueryMap := results [ "LowPerQuery" ] . ( map [ string ] map [ string ] int )
if lowPerQueryMap != nil {
for lowQuery , resultsLowQuery := range lowPerQueryMap {
lowAuditedPerQuery := resultsLowQuery [ "Confirmed" ] + resultsLowQuery [ "NotExploitable" ]
lowOverallPerQuery := resultsLowQuery [ "Issues" ]
lowAuditedRequiredPerQuery := int ( math . Ceil ( float64 ( lowOverallPerQuery ) * float64 ( cxLowThreshold ) / 100.0 ) )
if lowAuditedPerQuery < lowAuditedRequiredPerQuery && lowAuditedPerQuery < cxLowThresholdPerQueryMax {
insecure = true
lowViolation = fmt . Sprintf ( "<-- query: %v - audited: %v - required: %v " , lowQuery , lowAuditedPerQuery , lowAuditedRequiredPerQuery )
}
}
}
} else { // calculate the Low findings threshold in total
if lowValue < cxLowThreshold {
insecure = true
lowViolation = fmt . Sprintf ( "<-- %v %v deviation" , cxLowThreshold - lowValue , unit )
}
2020-01-28 00:40:53 +02:00
}
2022-08-05 00:17:07 +02:00
2020-01-28 00:40:53 +02:00
}
if config . VulnerabilityThresholdUnit == "absolute" {
2020-08-06 17:20:26 +02:00
unit = " findings"
2020-01-28 00:40:53 +02:00
if highValue > cxHighThreshold {
insecure = true
2020-08-06 17:20:26 +02:00
highViolation = fmt . Sprintf ( "<-- %v%v deviation" , highValue - cxHighThreshold , unit )
2020-01-28 00:40:53 +02:00
}
if mediumValue > cxMediumThreshold {
insecure = true
2020-08-06 17:20:26 +02:00
mediumViolation = fmt . Sprintf ( "<-- %v%v deviation" , mediumValue - cxMediumThreshold , unit )
2020-01-28 00:40:53 +02:00
}
if lowValue > cxLowThreshold {
insecure = true
2020-08-06 17:20:26 +02:00
lowViolation = fmt . Sprintf ( "<-- %v%v deviation" , lowValue - cxLowThreshold , unit )
2020-01-28 00:40:53 +02:00
}
}
2021-09-15 09:45:56 +02:00
highText := fmt . Sprintf ( "High %v%v %v" , highValue , unit , highViolation )
mediumText := fmt . Sprintf ( "Medium %v%v %v" , mediumValue , unit , mediumViolation )
lowText := fmt . Sprintf ( "Low %v%v %v" , lowValue , unit , lowViolation )
if len ( highViolation ) > 0 {
insecureResults = append ( insecureResults , highText )
} else {
neutralResults = append ( neutralResults , highText )
}
if len ( mediumViolation ) > 0 {
insecureResults = append ( insecureResults , mediumText )
} else {
neutralResults = append ( neutralResults , mediumText )
}
if len ( lowViolation ) > 0 {
insecureResults = append ( insecureResults , lowText )
} else {
neutralResults = append ( neutralResults , lowText )
}
2020-01-28 00:40:53 +02:00
log . Entry ( ) . Infoln ( "" )
2021-09-15 09:45:56 +02:00
log . Entry ( ) . Info ( highText )
log . Entry ( ) . Info ( mediumText )
log . Entry ( ) . Info ( lowText )
2020-01-28 00:40:53 +02:00
log . Entry ( ) . Infoln ( "" )
2021-09-15 09:45:56 +02:00
return insecure , insecureResults , neutralResults
2020-01-28 00:40:53 +02:00
}
2020-11-25 14:47:26 +02:00
func createAndConfigureNewProject ( sys checkmarx . System , projectName , teamID string , presetIDValue int , presetValue , engineConfiguration string ) ( checkmarx . Project , error ) {
2020-10-05 08:16:18 +02:00
if len ( presetValue ) == 0 {
log . SetErrorCategory ( log . ErrorConfiguration )
return checkmarx . Project { } , fmt . Errorf ( "preset not specified, creation of project %v failed" , projectName )
}
2020-09-29 09:23:31 +02:00
projectCreateResult , err := sys . CreateProject ( projectName , teamID )
if err != nil {
return checkmarx . Project { } , errors . Wrapf ( err , "cannot create project %v" , projectName )
}
2020-11-25 14:47:26 +02:00
if err := setPresetForProject ( sys , projectCreateResult . ID , presetIDValue , projectName , presetValue , engineConfiguration ) ; err != nil {
2020-10-05 08:16:18 +02:00
return checkmarx . Project { } , errors . Wrapf ( err , "failed to set preset %v for project" , presetValue )
2020-01-28 00:40:53 +02:00
}
2020-10-05 08:16:18 +02:00
2020-09-29 09:23:31 +02:00
projects , err := sys . GetProjectsByNameAndTeam ( projectName , teamID )
if err != nil || len ( projects ) == 0 {
return checkmarx . Project { } , errors . Wrapf ( err , "failed to load newly created project %v" , projectName )
}
log . Entry ( ) . Debugf ( "New Project %v created" , projectName )
2020-10-05 08:16:18 +02:00
log . Entry ( ) . Debugf ( "Projects: %v" , projects )
2020-09-29 09:23:31 +02:00
return projects [ 0 ] , nil
2020-01-28 00:40:53 +02:00
}
2020-08-06 17:20:26 +02:00
// loadPreset finds a checkmarx.Preset that has either the ID or Name given by presetValue.
// presetValue is not expected to be empty.
2020-09-29 09:23:31 +02:00
func loadPreset ( sys checkmarx . System , presetValue string ) ( checkmarx . Preset , error ) {
2020-01-28 00:40:53 +02:00
presets := sys . GetPresets ( )
var preset checkmarx . Preset
var configuredPresetName string
2020-11-25 14:47:26 +02:00
preset = sys . FilterPresetByName ( presets , presetValue )
configuredPresetName = presetValue
if len ( configuredPresetName ) > 0 && preset . Name == configuredPresetName {
2020-08-06 17:20:26 +02:00
log . Entry ( ) . Infof ( "Loaded preset %v" , preset . Name )
2020-09-29 09:23:31 +02:00
return preset , nil
2020-01-28 00:40:53 +02:00
}
2020-08-06 17:20:26 +02:00
log . Entry ( ) . Infof ( "Preset '%s' not found. Available presets are:" , presetValue )
for _ , prs := range presets {
log . Entry ( ) . Infof ( "preset id: %v, name: '%v'" , prs . ID , prs . Name )
}
2020-09-29 09:23:31 +02:00
return checkmarx . Preset { } , fmt . Errorf ( "preset %v not found" , preset . Name )
2020-01-28 00:40:53 +02:00
}
2020-08-06 17:20:26 +02:00
// setPresetForProject is only called when it has already been established that the preset needs to be set.
// It will exit via the logging framework in case the preset could be found, or the project could not be updated.
2020-11-25 14:47:26 +02:00
func setPresetForProject ( sys checkmarx . System , projectID , presetIDValue int , projectName , presetValue , engineConfiguration string ) error {
presetID := presetIDValue
if presetID <= 0 {
preset , err := loadPreset ( sys , presetValue )
if err != nil {
return errors . Wrapf ( err , "preset %v not found, configuration of project %v failed" , presetValue , projectName )
}
presetID = preset . ID
2020-08-06 17:20:26 +02:00
}
2020-11-25 14:47:26 +02:00
err := sys . UpdateProjectConfiguration ( projectID , presetID , engineConfiguration )
2020-09-29 09:23:31 +02:00
if err != nil {
return errors . Wrapf ( err , "updating configuration of project %v failed" , projectName )
}
return nil
2020-08-06 17:20:26 +02:00
}
2020-09-29 09:23:31 +02:00
func generateAndDownloadReport ( sys checkmarx . System , scanID int , reportType string ) ( [ ] byte , error ) {
report , err := sys . RequestNewReport ( scanID , reportType )
if err != nil {
return [ ] byte { } , errors . Wrap ( err , "failed to request new report" )
}
finalStatus := 1
for {
reportStatus , err := sys . GetReportStatus ( report . ReportID )
if err != nil {
return [ ] byte { } , errors . Wrap ( err , "failed to get report status" )
2020-01-28 00:40:53 +02:00
}
2020-09-29 09:23:31 +02:00
finalStatus = reportStatus . Status . ID
if finalStatus != 1 {
break
2020-01-28 00:40:53 +02:00
}
2020-09-29 09:23:31 +02:00
time . Sleep ( 10 * time . Second )
}
if finalStatus == 2 {
return sys . DownloadReport ( report . ReportID )
2020-01-28 00:40:53 +02:00
}
2020-09-29 09:23:31 +02:00
return [ ] byte { } , fmt . Errorf ( "unexpected status %v recieved" , finalStatus )
2020-01-28 00:40:53 +02:00
}
2021-04-08 07:05:37 +02:00
func getNumCoherentIncrementalScans ( scans [ ] checkmarx . ScanStatus ) int {
2020-01-28 00:40:53 +02:00
count := 0
2020-09-18 08:19:34 +02:00
for _ , scan := range scans {
if ! scan . IsIncremental {
break
2020-01-28 00:40:53 +02:00
}
2020-09-18 08:19:34 +02:00
count ++
2020-01-28 00:40:53 +02:00
}
return count
}
2022-08-05 00:17:07 +02:00
func getDetailedResults ( config checkmarxExecuteScanOptions , sys checkmarx . System , reportFileName string , scanID int , utils checkmarxExecuteScanUtils ) ( map [ string ] interface { } , error ) {
2020-01-28 00:40:53 +02:00
resultMap := map [ string ] interface { } { }
2020-09-29 09:23:31 +02:00
data , err := generateAndDownloadReport ( sys , scanID , "XML" )
if err != nil {
return resultMap , errors . Wrap ( err , "failed to download xml report" )
}
if len ( data ) > 0 {
2022-08-02 08:26:26 +02:00
err = utils . WriteFile ( reportFileName , data , 0 o700 )
2021-04-08 07:05:37 +02:00
if err != nil {
return resultMap , errors . Wrap ( err , "failed to write file" )
}
2020-01-28 00:40:53 +02:00
var xmlResult checkmarx . DetailedResult
err := xml . Unmarshal ( data , & xmlResult )
if err != nil {
2020-09-29 09:23:31 +02:00
return resultMap , errors . Wrapf ( err , "failed to unmarshal XML report for scan %v" , scanID )
2020-01-28 00:40:53 +02:00
}
resultMap [ "InitiatorName" ] = xmlResult . InitiatorName
resultMap [ "Owner" ] = xmlResult . Owner
resultMap [ "ScanId" ] = xmlResult . ScanID
resultMap [ "ProjectId" ] = xmlResult . ProjectID
resultMap [ "ProjectName" ] = xmlResult . ProjectName
resultMap [ "Team" ] = xmlResult . Team
resultMap [ "TeamFullPathOnReportDate" ] = xmlResult . TeamFullPathOnReportDate
resultMap [ "ScanStart" ] = xmlResult . ScanStart
resultMap [ "ScanTime" ] = xmlResult . ScanTime
resultMap [ "LinesOfCodeScanned" ] = xmlResult . LinesOfCodeScanned
resultMap [ "FilesScanned" ] = xmlResult . FilesScanned
resultMap [ "CheckmarxVersion" ] = xmlResult . CheckmarxVersion
resultMap [ "ScanType" ] = xmlResult . ScanType
resultMap [ "Preset" ] = xmlResult . Preset
resultMap [ "DeepLink" ] = xmlResult . DeepLink
resultMap [ "ReportCreationTime" ] = xmlResult . ReportCreationTime
resultMap [ "High" ] = map [ string ] int { }
resultMap [ "Medium" ] = map [ string ] int { }
resultMap [ "Low" ] = map [ string ] int { }
resultMap [ "Information" ] = map [ string ] int { }
for _ , query := range xmlResult . Queries {
for _ , result := range query . Results {
key := result . Severity
var submap map [ string ] int
if resultMap [ key ] == nil {
submap = map [ string ] int { }
resultMap [ key ] = submap
} else {
submap = resultMap [ key ] . ( map [ string ] int )
}
submap [ "Issues" ] ++
auditState := "ToVerify"
switch result . State {
case "1" :
auditState = "NotExploitable"
case "2" :
auditState = "Confirmed"
case "3" :
auditState = "Urgent"
case "4" :
auditState = "ProposedNotExploitable"
case "0" :
default :
auditState = "ToVerify"
}
submap [ auditState ] ++
if result . FalsePositive != "True" {
submap [ "NotFalsePositive" ] ++
}
}
}
2022-08-05 00:17:07 +02:00
// if the flag is switched on, build the list of Low findings per query
if config . VulnerabilityThresholdLowPerQuery {
var lowPerQuery = map [ string ] map [ string ] int { }
for _ , query := range xmlResult . Queries {
for _ , result := range query . Results {
if result . Severity != "Low" {
continue
}
key := query . Name
var submap map [ string ] int
if lowPerQuery [ key ] == nil {
submap = map [ string ] int { }
lowPerQuery [ key ] = submap
} else {
submap = lowPerQuery [ key ]
}
submap [ "Issues" ] ++
auditState := "ToVerify"
switch result . State {
case "1" :
auditState = "NotExploitable"
break
case "2" :
auditState = "Confirmed"
break
case "3" :
auditState = "Urgent"
break
case "4" :
auditState = "ProposedNotExploitable"
break
case "0" :
default :
auditState = "ToVerify"
break
}
submap [ auditState ] ++
if result . FalsePositive != "True" {
submap [ "NotFalsePositive" ] ++
}
}
}
resultMap [ "LowPerQuery" ] = lowPerQuery
}
2020-01-28 00:40:53 +02:00
}
2020-09-29 09:23:31 +02:00
return resultMap , nil
2020-01-28 00:40:53 +02:00
}
2021-04-08 07:05:37 +02:00
func zipFolder ( source string , zipFile io . Writer , patterns [ ] string , utils checkmarxExecuteScanUtils ) error {
2020-01-28 00:40:53 +02:00
archive := zip . NewWriter ( zipFile )
defer archive . Close ( )
2021-04-08 07:05:37 +02:00
info , err := utils . Stat ( source )
2020-01-28 00:40:53 +02:00
if err != nil {
return nil
}
var baseDir string
if info . IsDir ( ) {
baseDir = filepath . Base ( source )
}
2020-07-20 16:50:48 +02:00
fileCount := 0
2021-04-08 07:05:37 +02:00
err = filepath . Walk ( source , func ( path string , info os . FileInfo , err error ) error {
2020-01-28 00:40:53 +02:00
if err != nil {
return err
}
2021-04-08 07:05:37 +02:00
noMatch , err := isFileNotMatchingPattern ( patterns , path , info , utils )
if err != nil || noMatch {
return err
2020-01-28 00:40:53 +02:00
}
2021-04-08 07:05:37 +02:00
header , err := utils . FileInfoHeader ( info )
2020-01-28 00:40:53 +02:00
if err != nil {
return err
}
if baseDir != "" {
header . Name = filepath . Join ( baseDir , strings . TrimPrefix ( path , source ) )
}
2021-04-08 07:05:37 +02:00
adaptHeader ( info , header )
2020-01-28 00:40:53 +02:00
writer , err := archive . CreateHeader ( header )
2021-04-08 07:05:37 +02:00
if err != nil || info . IsDir ( ) {
2020-01-28 00:40:53 +02:00
return err
}
2021-04-08 07:05:37 +02:00
file , err := utils . Open ( path )
2020-01-28 00:40:53 +02:00
if err != nil {
return err
}
defer file . Close ( )
_ , err = io . Copy ( writer , file )
2020-07-20 16:50:48 +02:00
fileCount ++
2020-01-28 00:40:53 +02:00
return err
} )
2020-07-20 16:50:48 +02:00
log . Entry ( ) . Infof ( "Zipped %d files" , fileCount )
2021-04-08 07:05:37 +02:00
err = handleZeroFilesZipped ( source , err , fileCount )
return err
}
func adaptHeader ( info os . FileInfo , header * zip . FileHeader ) {
if info . IsDir ( ) {
header . Name += "/"
} else {
header . Method = zip . Deflate
}
}
func handleZeroFilesZipped ( source string , err error , fileCount int ) error {
if err == nil && fileCount == 0 {
2021-04-07 09:56:19 +02:00
log . SetErrorCategory ( log . ErrorConfiguration )
err = fmt . Errorf ( "filterPattern matched no files or workspace directory '%s' was empty" , source )
}
2020-01-28 00:40:53 +02:00
return err
}
2021-04-08 07:05:37 +02:00
// isFileNotMatchingPattern checks if file path does not match one of the patterns.
// If it matches a negative pattern (starting with '!') then true is returned.
//
// If it is a directory, false is returned.
// If no patterns are provided, false is returned.
func isFileNotMatchingPattern ( patterns [ ] string , path string , info os . FileInfo , utils checkmarxExecuteScanUtils ) ( bool , error ) {
if len ( patterns ) == 0 || info . IsDir ( ) {
return false , nil
}
2020-07-20 16:50:48 +02:00
for _ , pattern := range patterns {
2020-01-28 00:40:53 +02:00
negative := false
2020-07-20 16:50:48 +02:00
if strings . HasPrefix ( pattern , "!" ) {
2020-01-28 00:40:53 +02:00
pattern = strings . TrimLeft ( pattern , "!" )
negative = true
}
2021-04-08 07:05:37 +02:00
match , err := utils . PathMatch ( pattern , path )
if err != nil {
return false , errors . Wrapf ( err , "Pattern %v could not get executed" , pattern )
}
if match {
return negative , nil
2020-01-28 00:40:53 +02:00
}
}
2021-04-08 07:05:37 +02:00
return true , nil
2020-01-28 00:40:53 +02:00
}
2021-06-23 15:05:00 +02:00
2022-08-09 10:57:02 +02:00
func createToolRecordCx ( utils checkmarxExecuteScanUtils , workspace string , config checkmarxExecuteScanOptions , results map [ string ] interface { } ) ( string , error ) {
record := toolrecord . New ( utils , workspace , "checkmarx" , config . ServerURL )
2021-06-23 15:05:00 +02:00
// Todo TeamId - see run_scan()
// record.AddKeyData("team", XXX, resultMap["Team"], "")
// Project
err := record . AddKeyData ( "project" ,
results [ "ProjectId" ] . ( string ) ,
results [ "ProjectName" ] . ( string ) ,
"" )
if err != nil {
return "" , err
}
// Scan
err = record . AddKeyData ( "scanid" ,
results [ "ScanId" ] . ( string ) ,
results [ "ScanId" ] . ( string ) ,
results [ "DeepLink" ] . ( string ) )
if err != nil {
return "" , err
}
err = record . Persist ( )
if err != nil {
return "" , err
}
return record . GetFileName ( ) , nil
}