1
0
mirror of https://github.com/SAP/jenkins-library.git synced 2025-01-18 05:18:24 +02:00

feat(fortifyExecuteScan): full FPR to SARIF implementation (#3604)

* feat(FPRtoSARIF): boilerplate & comments

* Feat(Ingest): Build done, Vulnerabilities partway

* feat(Vulnerabilities): now entirely parsed

* feat(FprToSarif): integration in Piper step, full xml structure

* feat(fpr_to_sarif): base program. Need to replace names in messages

* feat(fpr_to_sarif): message substitution and custom definition integration

* fix(fpr_to_sarif): missing replacement in tools object

* fix(fpr_to_sarif): failing unit test

* Fix fortify folder creation for generating sarif

* deletion of unzip folder

* feat(fpr_to_sarif): better unit test

* fix(fpr_to_sarif): pr tests failing

* feat(fortifyExecuteScan): complete SARIF file generation

* fix(fpr_to_sarif): add extra check and test to prevent panics

* rebase onto master, fix ALL conflicts, adapt code and format

* fix missing added properties

* fix(SARIF): structure

* fix(whitesource): wrong sarif structures

* Update pkg/fortify/fpr_to_sarif.go

* Update pkg/format/sarif.go

* Update pkg/format/sarif.go

Co-authored-by: Sumeet PATIL <sumeet.patil@sap.com>
Co-authored-by: Sven Merk <33895725+nevskrem@users.noreply.github.com>
This commit is contained in:
xgoffin 2022-03-14 11:26:05 +01:00 committed by GitHub
parent b224f2294c
commit dfd2278639
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 611 additions and 49 deletions

View File

@ -9,28 +9,28 @@ type SARIF struct {
// Runs of a Tool and related Results
type Runs struct {
Results []Results `json:"results"`
Tool Tool `json:"tool"`
/*Invocations []Invocations `json:"invocations"`
Results []Results `json:"results"`
Tool Tool `json:"tool"`
Invocations []Invocations `json:"invocations"`
OriginalUriBaseIds OriginalUriBaseIds `json:"originalUriBaseIds"`
Artifacts []Artifact `json:"artifacts"`
AutomationDetails AutomationDetails `json:"automationDetails"`
ColumnKind string `json:"columnKind" default:"utf16CodeUnits"`
ThreadFlowLocations []Locations `json:"threadFlowLocations"`
Taxonomies []Taxonomies `json:"taxonomies"`*/
Taxonomies []Taxonomies `json:"taxonomies"`
}
// Results these structs are relevant to the Results object
type Results struct {
RuleID string `json:"ruleId"`
RuleIndex int `json:"ruleIndex"`
Level string `json:"level,omitempty"`
Message Message `json:"message"`
AnalysisTarget ArtifactLocation `json:"analysisTarget,omitempty"`
Locations []Location `json:"locations"`
/*CodeFlows []CodeFlow `json:"codeFlows"`
RelatedLocations []RelatedLocation `json:"relatedLocations"`*/
Properties SarifProperties `json:"properties"`
RuleID string `json:"ruleId"`
RuleIndex int `json:"ruleIndex"`
Level string `json:"level,omitempty"`
Message Message `json:"message"`
AnalysisTarget ArtifactLocation `json:"analysisTarget,omitempty"`
Locations []Location `json:"locations"`
CodeFlows []CodeFlow `json:"codeFlows"`
RelatedLocations []RelatedLocation `json:"relatedLocations"`
Properties SarifProperties `json:"properties"`
}
// Message to detail the finding
@ -40,8 +40,15 @@ type Message struct {
// Location of the finding
type Location struct {
PhysicalLocation ArtifactLocation `json:"physicalLocation,omitempty"`
Region Region `json:"region,omitempty"`
PhysicalLocation PhysicalLocation `json:"physicalLocation"`
Message *Message `json:"message,omitempty"`
}
// PhysicalLocation
type PhysicalLocation struct {
ArtifactLocation ArtifactLocation `json:"artifactLocation"`
Region Region `json:"region"`
ContextRegion ContextRegion `json:"contextRegion"`
LogicalLocations []LogicalLocation `json:"logicalLocations,omitempty"`
}
@ -53,12 +60,13 @@ type ArtifactLocation struct {
// Region where the finding was detected
type Region struct {
StartLine int `json:"startLine,omitempty"`
StartColumn int `json:"startColumn,omitempty"`
EndLine int `json:"EndLine,omitempty"`
EndColumn int `json:"EndColumn,omitempty"`
ByteOffset int `json:"ByteOffset,omitempty"`
ByteLength int `json:"ByteLength,omitempty"`
StartLine int `json:"startLine,omitempty"`
StartColumn int `json:"startColumn,omitempty"`
EndLine int `json:"EndLine,omitempty"`
EndColumn int `json:"EndColumn,omitempty"`
ByteOffset int `json:"ByteOffset,omitempty"`
ByteLength int `json:"ByteLength,omitempty"`
Snippet SnippetSarif `json:"snippet"`
}
// LogicalLocation of the finding
@ -87,11 +95,11 @@ type Tool struct {
// Driver meta information for the scan and tool context
type Driver struct {
Name string `json:"name"`
Version string `json:"version"`
InformationUri string `json:"informationUri,omitempty"`
Rules []SarifRule `json:"rules"`
//SupportedTaxonomies []SupportedTaxonomies `json:"supportedTaxonomies"`
Name string `json:"name"`
Version string `json:"version"`
InformationUri string `json:"informationUri,omitempty"`
Rules []SarifRule `json:"rules"`
SupportedTaxonomies []SupportedTaxonomies `json:"supportedTaxonomies"`
}
// SarifRule related rule use to identify the finding
@ -114,11 +122,58 @@ type Help struct {
Markdown string `json:"markdown,omitempty"`
}
// SnippetSarif holds the code snippet where the finding appears
type SnippetSarif struct {
Text string `json:"text"`
}
// ContextRegion provides the context for the finding
type ContextRegion struct {
StartLine int `json:"startLine"`
EndLine int `json:"endLine"`
Snippet SnippetSarif `json:"snippet"`
}
// CodeFlow
type CodeFlow struct {
ThreadFlows []ThreadFlow `json:"threadFlows"`
}
// ThreadFlow
type ThreadFlow struct {
Locations []Locations `json:"locations"`
}
// Locations
type Locations struct {
Location *Location `json:"location,omitempty"`
Kinds []string `json:"kinds,omitempty"`
Index int `json:"index,omitempty"`
}
// RelatedLocation
type RelatedLocation struct {
ID int `json:"id"`
PhysicalLocation RelatedPhysicalLocation `json:"physicalLocation"`
}
// RelatedPhysicalLocation
type RelatedPhysicalLocation struct {
ArtifactLocation ArtifactLocation `json:"artifactLocation"`
Region RelatedRegion `json:"region"`
}
// RelatedRegion
type RelatedRegion struct {
StartLine int `json:"startLine"`
StartColumn int `json:"startColumn,omitempty"`
}
// SupportedTaxonomies
type SupportedTaxonomies struct {
Name string `json:"name"`
Index int `json:"index"`
GUID string `json:"guid"`
Guid string `json:"guid"`
}
// DefaultConfiguration
@ -127,7 +182,7 @@ type DefaultConfiguration struct {
Level string `json:"level,omitempty"` //This exists in the template, but not sure how it is populated. TODO.
}
//DefaultProperties
// DefaultProperties
type DefaultProperties struct {
DefaultSeverity string `json:"DefaultSeverity"`
}
@ -140,17 +195,17 @@ type Relationships struct {
// Target
type Target struct {
ID string `json:"id"`
Id string `json:"id"`
ToolComponent ToolComponent `json:"toolComponent"`
}
//ToolComponent
// ToolComponent
type ToolComponent struct {
Name string `json:"name"`
GUID string `json:"guid"`
Guid string `json:"guid"`
}
// SarifRuleProperties
//SarifRuleProperties
type SarifRuleProperties struct {
Accuracy string `json:"Accuracy,omitempty"`
Impact string `json:"Impact,omitempty"`
@ -158,3 +213,75 @@ type SarifRuleProperties struct {
Tags []string `json:"tags,omitempty"`
Precision string `json:"precision,omitempty"`
}
// Invocations These structs are relevant to the Invocations object
type Invocations struct {
CommandLine string `json:"commandLine"`
StartTimeUtc string `json:"startTimeUtc"`
ToolExecutionNotifications []ToolExecutionNotifications `json:"toolExecutionNotifications"`
ExecutionSuccessful bool `json:"executionSuccessful"`
Machine string `json:"machine"`
Account string `json:"account"`
Properties InvocationProperties `json:"properties"`
}
// ToolExecutionNotifications
type ToolExecutionNotifications struct {
Message Message `json:"message"`
Descriptor Descriptor `json:"descriptor"`
}
// Descriptor
type Descriptor struct {
Id string `json:"id"`
}
// InvocationProperties
type InvocationProperties struct {
Platform string `json:"Platform"`
}
// OriginalUriBaseIds These structs are relevant to the originalUriBaseIds object
type OriginalUriBaseIds struct {
SrcRoot SrcRoot `json:"%SRCROOT%"`
}
// SrcRoot
type SrcRoot struct {
Uri string `json:"uri"`
}
// Artifact These structs are relevant to the artifacts object
type Artifact struct {
Location SarifLocation `json:"location"`
Length int `json:"length"`
MimeType string `json:"mimeType"`
Encoding string `json:"encoding"`
}
// SarifLocation
type SarifLocation struct {
Uri string `json:"uri"`
UriBaseId string `json:"uriBaseId"`
}
// AutomationDetails These structs are relevant to the automationDetails object
type AutomationDetails struct {
Id string `json:"id"`
}
// These structs are relevant to the threadFlowLocations object
// Taxonomies These structs are relevant to the taxonomies object
type Taxonomies struct {
Guid string `json:"guid"`
Name string `json:"name"`
Organization string `json:"organization"`
ShortDescription Message `json:"shortDescription"`
Taxa []Taxa `json:"taxa"`
}
// Taxa
type Taxa struct {
Id string `json:"id"`
}

View File

@ -13,6 +13,7 @@ import (
"github.com/piper-validation/fortify-client-go/models"
"github.com/SAP/jenkins-library/pkg/format"
"github.com/SAP/jenkins-library/pkg/log"
FileUtils "github.com/SAP/jenkins-library/pkg/piperutils"
)
@ -43,27 +44,29 @@ type CreatedTS struct {
Time string `xml:"time,attr"`
}
// UUID
// UUIF
type UUID struct {
XMLName xml.Name `xml:"UUID"`
Uuid string `xml:",innerxml"`
}
// LOC These structures are relevant to the Build object
// LOC
type LOC struct {
XMLName xml.Name `xml:"LOC"`
LocType string `xml:"type,attr"`
LocValue string `xml:",innerxml"`
}
// Build
// These structures are relevant to the Build object
// The Build object transports all build and scan related information
type Build struct {
XMLName xml.Name `xml:"Build"`
Project string `xml:"Project"`
Version string `xml:"Version"`
Label string `xml:"Label"`
BuildID string `xml:"BuildID"`
NumberFiles int `xml:"NumberFiles"`
Locs []LOC `xml:",any"`
Locs []LOC `xml:"LOC"`
JavaClassPath string `xml:"JavaClasspath"`
SourceBasePath string `xml:"SourceBasePath"`
SourceFiles []File `xml:"SourceFiles>File"`
@ -94,7 +97,6 @@ type Vulnerabilities struct {
Vulnerability []Vulnerability `xml:"Vulnerability"`
}
// Vulnerability
type Vulnerability struct {
XMLName xml.Name `xml:"Vulnerability"`
ClassInfo ClassInfo `xml:"ClassInfo"`
@ -491,11 +493,20 @@ type Attribute struct {
Value string `xml:"value"`
}
// Utils
func (n Node) isEmpty() bool {
return n.IsDefault == ""
}
func (a Action) isEmpty() bool {
return a.ActionData == ""
}
// ConvertFprToSarif converts the FPR file contents into SARIF format
func ConvertFprToSarif(sys System, project *models.Project, projectVersion *models.ProjectVersion, resultFilePath string) (format.SARIF, error) {
log.Entry().Debug("Extracting FPR.")
var sarif format.SARIF
tmpFolder, err := ioutil.TempDir(".", "temp-")
defer os.RemoveAll(tmpFolder)
if err != nil {
@ -512,7 +523,13 @@ func ConvertFprToSarif(sys System, project *models.Project, projectVersion *mode
if err != nil {
return sarif, err
}
if len(data) == 0 {
log.Entry().Error("Error reading audit file at " + filepath.Join(tmpFolder, "audit.fvdl") + ". This might be that the file is missing, corrupted, or too large. Aborting procedure.")
err := errors.New("cannot read audit file")
return sarif, err
}
log.Entry().Debug("Calling Parse.")
return Parse(sys, project, projectVersion, data)
}
@ -523,13 +540,18 @@ func Parse(sys System, project *models.Project, projectVersion *models.ProjectVe
decoder := xml.NewDecoder(reader)
var fvdl FVDL
decoder.Decode(&fvdl)
err := decoder.Decode(&fvdl)
if err != nil {
return format.SARIF{}, err
}
//Now, we handle the sarif
var sarif format.SARIF
sarif.Schema = "https://docs.oasis-open.org/sarif/sarif/v2.1.0/cos01/schemas/sarif-schema-2.1.0.json"
sarif.Version = "2.1.0"
var fortifyRun format.Runs
fortifyRun.ColumnKind = "utf16CodeUnits"
cweIdsForTaxonomies := make(map[string]string) //Defining this here and filling it in the course of the program helps filling the Taxonomies object easily. Map because easy to check for keys
sarif.Runs = append(sarif.Runs, fortifyRun)
// Handle results/vulnerabilities
@ -551,6 +573,121 @@ func Parse(sys System, project *models.Project, projectVersion *models.ProjectVe
}
}
// Handle all locations items
location := *new(format.Location)
var startingColumn int
//get location
for k := 0; k < len(fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace); k++ { // k iterates on traces
//In each trace/primary, there can be one or more entries
//Each trace represents a codeflow, each entry represents a location in threadflow
codeFlow := *new(format.CodeFlow)
threadFlow := *new(format.ThreadFlow)
//We now iterate on Entries in the trace/primary
for l := 0; l < len(fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry); l++ { // l iterates on entries
threadFlowLocation := *new(format.Locations) //One is created regardless
//the default node dictates the interesting threadflow (location, and so on)
//this will populate both threadFlowLocation AND the parent location object (result.Locations[0])
if !fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.isEmpty() && fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.IsDefault == "true" {
//initalize threadFlowLocation.Location
threadFlowLocation.Location = new(format.Location)
//get artifact location
for j := 0; j < len(fvdl.Build.SourceFiles); j++ { // j iterates on source files
if fvdl.Build.SourceFiles[j].Name == fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.SourceLocation.Path {
threadFlowLocation.Location.PhysicalLocation.ArtifactLocation.Index = j
break
}
}
//get region & context region
threadFlowLocation.Location.PhysicalLocation.Region.StartLine = fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.SourceLocation.Line
//Snippet is handled last
//threadFlowLocation.Location.PhysicalLocation.Region.Snippet.Text = "foobar"
targetSnippetId := fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.SourceLocation.Snippet
for j := 0; j < len(fvdl.Snippets); j++ {
if fvdl.Snippets[j].SnippetId == targetSnippetId {
threadFlowLocation.Location.PhysicalLocation.ContextRegion.StartLine = fvdl.Snippets[j].StartLine
threadFlowLocation.Location.PhysicalLocation.ContextRegion.EndLine = fvdl.Snippets[j].EndLine
threadFlowLocation.Location.PhysicalLocation.ContextRegion.Snippet.Text = fvdl.Snippets[j].Text
break
}
}
//parse SourceLocation object for the startColumn value, store it appropriately
startingColumn = fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.SourceLocation.ColStart
//check for existance of action object, and if yes, save message
if !fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.Action.isEmpty() {
threadFlowLocation.Location.Message = new(format.Message)
threadFlowLocation.Location.Message.Text = fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.Action.ActionData
// Handle snippet
snippetTarget := ""
switch fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.Action.Type {
case "Assign":
snippetWords := strings.Split(fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.Action.ActionData, " ")
if snippetWords[0] == "Assignment" {
snippetTarget = snippetWords[2]
} else {
snippetTarget = fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.Action.ActionData
}
case "InCall":
snippetTarget = strings.Split(fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.Action.ActionData, "(")[0]
case "OutCall":
snippetTarget = strings.Split(fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.Action.ActionData, "(")[0]
case "InOutCall":
snippetTarget = strings.Split(fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.Action.ActionData, "(")[0]
case "Return":
snippetTarget = fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.Action.ActionData
case "Read":
snippetWords := strings.Split(fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.Action.ActionData, " ")
if len(snippetWords) > 1 {
snippetTarget = " " + snippetWords[1]
} else {
snippetTarget = snippetWords[0]
}
default:
snippetTarget = fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].Node.Action.ActionData
}
physLocationSnippetLines := strings.Split(threadFlowLocation.Location.PhysicalLocation.ContextRegion.Snippet.Text, "\n")
snippetText := ""
for j := 0; j < len(physLocationSnippetLines); j++ {
if strings.Contains(physLocationSnippetLines[j], snippetTarget) {
snippetText = physLocationSnippetLines[j]
break
}
}
if snippetText != "" {
threadFlowLocation.Location.PhysicalLocation.Region.Snippet.Text = snippetText
} else {
threadFlowLocation.Location.PhysicalLocation.Region.Snippet.Text = threadFlowLocation.Location.PhysicalLocation.ContextRegion.Snippet.Text
}
} else {
threadFlowLocation.Location.PhysicalLocation.Region.Snippet.Text = threadFlowLocation.Location.PhysicalLocation.ContextRegion.Snippet.Text
}
location = *threadFlowLocation.Location
//set Kinds
threadFlowLocation.Kinds = append(threadFlowLocation.Kinds, "unknown") //TODO
} else { //is not a main threadflow: just register NodeRef index in threadFlowLocation
threadFlowLocation.Index = fvdl.Vulnerabilities.Vulnerability[i].AnalysisInfo.Trace[k].Primary.Entry[l].NodeRef.RefId
}
//add the threadflowlocation to the list of locations
threadFlow.Locations = append(threadFlow.Locations, threadFlowLocation)
}
codeFlow.ThreadFlows = append(codeFlow.ThreadFlows, threadFlow)
result.CodeFlows = append(result.CodeFlows, codeFlow)
}
//For some reason, the principal object only has 1 location: here we keep the last one
//Void message
location.Message = nil
result.Locations = append(result.Locations, location)
//handle relatedLocation
relatedLocation := *new(format.RelatedLocation)
relatedLocation.ID = 1
relatedLocation.PhysicalLocation = *new(format.RelatedPhysicalLocation)
relatedLocation.PhysicalLocation.ArtifactLocation = location.PhysicalLocation.ArtifactLocation
relatedLocation.PhysicalLocation.Region = *new(format.RelatedRegion)
relatedLocation.PhysicalLocation.Region.StartLine = location.PhysicalLocation.Region.StartLine
relatedLocation.PhysicalLocation.Region.StartColumn = startingColumn
result.RelatedLocations = append(result.RelatedLocations, relatedLocation)
//handle properties
prop := *new(format.SarifProperties)
prop.InstanceSeverity = fvdl.Vulnerabilities.Vulnerability[i].InstanceInfo.InstanceSeverity
@ -647,11 +784,29 @@ func Parse(sys System, project *models.Project, projectVersion *models.ProjectVe
}
//properties
//Prepare a CWE id object as an in-case
cweIds := []string{}
//scan for the properties we want:
var propArray [][]string
for j := 0; j < len(fvdl.EngineData.RuleInfo[i].MetaInfoGroup); j++ {
if (fvdl.EngineData.RuleInfo[i].MetaInfoGroup[j].Name == "Accuracy") || (fvdl.EngineData.RuleInfo[i].MetaInfoGroup[j].Name == "Impact") || (fvdl.EngineData.RuleInfo[i].MetaInfoGroup[j].Name == "Probability") {
propArray = append(propArray, []string{fvdl.EngineData.RuleInfo[i].MetaInfoGroup[j].Name, fvdl.EngineData.RuleInfo[i].MetaInfoGroup[j].Data})
} else if fvdl.EngineData.RuleInfo[i].MetaInfoGroup[j].Name == "altcategoryCWE" {
//Get all CWE IDs. First, split on ", "
rawCweIds := strings.Split(fvdl.EngineData.RuleInfo[i].MetaInfoGroup[j].Data, ", ")
//If not "None", split each string on " " and add its 2nd index
if rawCweIds[0] != "None" {
for k := 0; k < len(rawCweIds); k++ {
cweId := strings.Split(rawCweIds[k], " ")[2]
//Fill the cweIdsForTaxonomies map if not already in
if _, isIn := cweIdsForTaxonomies[cweId]; !isIn {
cweIdsForTaxonomies[cweId] = cweId
}
cweIds = append(cweIds, cweId)
}
} else {
cweIds = append(cweIds, rawCweIds[0])
}
}
}
var ruleProp *format.SarifRuleProperties
@ -669,21 +824,199 @@ func Parse(sys System, project *models.Project, projectVersion *models.ProjectVe
}
sarifRule.Properties = ruleProp
//relationships: will most likely require some expansion
//One relationship per CWE id
for j := 0; j < len(cweIds); j++ {
rls := *new(format.Relationships)
rls.Target.Id = cweIds[j]
rls.Target.ToolComponent.Name = "CWE"
rls.Target.ToolComponent.Guid = "25F72D7E-8A92-459D-AD67-64853F788765"
rls.Kinds = append(rls.Kinds, "relevant")
sarifRule.Relationships = append(sarifRule.Relationships, rls)
}
//Finalize: append the rule
tool.Driver.Rules = append(tool.Driver.Rules, sarifRule)
}
//supportedTaxonomies
sTax := *new(format.SupportedTaxonomies) //This object seems fixed, but it will have to be checked
sTax.Name = "CWE"
sTax.Index = 0
sTax.Guid = "25F72D7E-8A92-459D-AD67-64853F788765"
tool.Driver.SupportedTaxonomies = append(tool.Driver.SupportedTaxonomies, sTax)
//Finalize: tool
sarif.Runs[0].Tool = tool
//handle invocations object
invocation := *new(format.Invocations)
for i := 0; i < len(fvdl.EngineData.Properties); i++ { //i selects the properties type
if fvdl.EngineData.Properties[i].PropertiesType == "Fortify" { // This is the correct type, now iterate on props
for j := 0; j < len(fvdl.EngineData.Properties[i].Property); j++ {
if fvdl.EngineData.Properties[i].Property[j].Name == "com.fortify.SCAExecutablePath" {
splitPath := strings.Split(fvdl.EngineData.Properties[i].Property[j].Value, "/")
invocation.CommandLine = splitPath[len(splitPath)-1]
break
}
}
break
}
}
invocation.CommandLine = strings.Join(append([]string{invocation.CommandLine}, fvdl.EngineData.CLArguments...), " ")
invocation.StartTimeUtc = strings.Join([]string{fvdl.Created.Date, fvdl.Created.Time}, "T") + ".000Z"
for i := 0; i < len(fvdl.EngineData.Errors); i++ {
ten := *new(format.ToolExecutionNotifications)
ten.Message.Text = fvdl.EngineData.Errors[i].ErrorMessage
ten.Descriptor.Id = fvdl.EngineData.Errors[i].ErrorCode
invocation.ToolExecutionNotifications = append(invocation.ToolExecutionNotifications, ten)
}
invocation.ExecutionSuccessful = true //fvdl doesn't seem to plan for this setting
invocation.Machine = fvdl.EngineData.MachineInfo.Hostname
invocation.Account = fvdl.EngineData.MachineInfo.Username
invocation.Properties.Platform = fvdl.EngineData.MachineInfo.Platform
sarif.Runs[0].Invocations = append(sarif.Runs[0].Invocations, invocation)
//handle originalUriBaseIds
sarif.Runs[0].OriginalUriBaseIds.SrcRoot.Uri = "file:///" + fvdl.Build.SourceBasePath + "/"
//handle artifacts
for i := 0; i < len(fvdl.Build.SourceFiles); i++ { //i iterates on source files
artifact := *new(format.Artifact)
artifact.Location.Uri = fvdl.Build.SourceFiles[i].Name
artifact.Location.UriBaseId = "%SRCROOT%"
artifact.Length = fvdl.Build.SourceFiles[i].FileSize
switch fvdl.Build.SourceFiles[i].FileType {
case "java":
artifact.MimeType = "text/x-java-source"
case "xml":
artifact.MimeType = "text/xml"
default:
artifact.MimeType = "text"
}
artifact.Encoding = fvdl.Build.SourceFiles[i].Encoding
sarif.Runs[0].Artifacts = append(sarif.Runs[0].Artifacts, artifact)
}
//handle automationDetails
sarif.Runs[0].AutomationDetails.Id = fvdl.Build.BuildID
//handle threadFlowLocations
threadFlowLocationsObject := []format.Locations{}
//prepare a check object
for i := 0; i < len(fvdl.UnifiedNodePool.Node); i++ {
unique := true
//Uniqueness Check
for check := 0; check < i; check++ {
if fvdl.UnifiedNodePool.Node[i].SourceLocation.Snippet == fvdl.UnifiedNodePool.Node[check].SourceLocation.Snippet &&
fvdl.UnifiedNodePool.Node[i].Action.ActionData == fvdl.UnifiedNodePool.Node[check].Action.ActionData {
unique = false
}
}
if !unique {
continue
}
locations := *new(format.Locations)
loc := new(format.Location)
//get artifact location
for j := 0; j < len(fvdl.Build.SourceFiles); j++ { // j iterates on source files
if fvdl.Build.SourceFiles[j].Name == fvdl.UnifiedNodePool.Node[i].SourceLocation.Path {
loc.PhysicalLocation.ArtifactLocation.Index = j
break
}
}
//get region & context region
loc.PhysicalLocation.Region.StartLine = fvdl.UnifiedNodePool.Node[i].SourceLocation.Line
//loc.PhysicalLocation.Region.Snippet.Text = "foobar" //TODO
targetSnippetId := fvdl.UnifiedNodePool.Node[i].SourceLocation.Snippet
for j := 0; j < len(fvdl.Snippets); j++ {
if fvdl.Snippets[j].SnippetId == targetSnippetId {
loc.PhysicalLocation.ContextRegion.StartLine = fvdl.Snippets[j].StartLine
loc.PhysicalLocation.ContextRegion.EndLine = fvdl.Snippets[j].EndLine
loc.PhysicalLocation.ContextRegion.Snippet.Text = fvdl.Snippets[j].Text
break
}
}
loc.Message = new(format.Message)
loc.Message.Text = fvdl.UnifiedNodePool.Node[i].Action.ActionData
// Handle snippet
snippetTarget := ""
switch fvdl.UnifiedNodePool.Node[i].Action.Type {
case "Assign":
snippetWords := strings.Split(fvdl.UnifiedNodePool.Node[i].Action.ActionData, " ")
if snippetWords[0] == "Assignment" {
snippetTarget = snippetWords[2]
} else {
snippetTarget = fvdl.UnifiedNodePool.Node[i].Action.ActionData
}
case "InCall":
snippetTarget = strings.Split(fvdl.UnifiedNodePool.Node[i].Action.ActionData, "(")[0]
case "OutCall":
snippetTarget = strings.Split(fvdl.UnifiedNodePool.Node[i].Action.ActionData, "(")[0]
case "InOutCall":
snippetTarget = strings.Split(fvdl.UnifiedNodePool.Node[i].Action.ActionData, "(")[0]
case "Return":
snippetTarget = fvdl.UnifiedNodePool.Node[i].Action.ActionData
case "Read":
snippetWords := strings.Split(fvdl.UnifiedNodePool.Node[i].Action.ActionData, " ")
if len(snippetWords) > 1 {
snippetTarget = " " + snippetWords[1]
} else {
snippetTarget = snippetWords[0]
}
default:
snippetTarget = fvdl.UnifiedNodePool.Node[i].Action.ActionData
}
physLocationSnippetLines := strings.Split(loc.PhysicalLocation.ContextRegion.Snippet.Text, "\n")
snippetText := ""
for j := 0; j < len(physLocationSnippetLines); j++ {
if strings.Contains(physLocationSnippetLines[j], snippetTarget) {
snippetText = physLocationSnippetLines[j]
break
}
}
if snippetText != "" {
loc.PhysicalLocation.Region.Snippet.Text = snippetText
} else {
loc.PhysicalLocation.Region.Snippet.Text = loc.PhysicalLocation.ContextRegion.Snippet.Text
}
locations.Location = loc
locations.Kinds = append(locations.Kinds, "unknown")
threadFlowLocationsObject = append(threadFlowLocationsObject, locations)
}
sarif.Runs[0].ThreadFlowLocations = threadFlowLocationsObject
//handle taxonomies
//Only one exists apparently: CWE. It is fixed
taxonomy := *new(format.Taxonomies)
taxonomy.Guid = "25F72D7E-8A92-459D-AD67-64853F788765"
taxonomy.Name = "CWE"
taxonomy.Organization = "MITRE"
taxonomy.ShortDescription.Text = "The MITRE Common Weakness Enumeration"
for key, _ := range cweIdsForTaxonomies {
taxa := *new(format.Taxa)
taxa.Id = key
taxonomy.Taxa = append(taxonomy.Taxa, taxa)
}
sarif.Runs[0].Taxonomies = append(sarif.Runs[0].Taxonomies, taxonomy)
return sarif, nil
}
func integrateAuditData(ruleProp *format.SarifProperties, issueInstanceID string, sys System, project *models.Project, projectVersion *models.ProjectVersion) error {
if sys == nil {
err := errors.New("no system instance, lookup impossible for " + issueInstanceID)
return err
}
if project == nil || projectVersion == nil {
err := errors.New("project or projectVersion is undefined: lookup aborted for " + issueInstanceID)
return err
}
data, err := sys.GetIssueDetails(projectVersion.ID, issueInstanceID)
log.Entry().Debug("Looking up audit state of " + issueInstanceID)
if err != nil {
return err
}
log.Entry().Debug("Looking up audit state of " + issueInstanceID)
if len(data) != 1 { //issueInstanceID is supposedly unique so len(data) = 1
log.Entry().Error("not exactly 1 issue found, found " + fmt.Sprint(len(data)))
return errors.New("not exactly 1 issue found, found " + fmt.Sprint(len(data)))

View File

@ -5,6 +5,7 @@ import (
"strings"
"testing"
"github.com/SAP/jenkins-library/pkg/format"
"github.com/piper-validation/fortify-client-go/models"
"github.com/stretchr/testify/assert"
)
@ -348,12 +349,113 @@ If you are concerned about leaking system data via NFC on an Android device, you
})
// Close the server when test finishes
defer server.Close()
project := models.Project{}
projectVersion := models.ProjectVersion{ID: 11037}
sarif, err := Parse(sys, &project, &projectVersion, []byte(testFvdl))
assert.NoError(t, err, "error")
assert.Equal(t, len(sarif.Runs[0].Results), 2)
assert.Equal(t, len(sarif.Runs[0].Tool.Driver.Rules), 1)
assert.Equal(t, sarif.Runs[0].Results[0].Properties.ToolState, "Exploitable")
assert.Equal(t, sarif.Runs[0].Results[0].Properties.ToolAuditMessage, "Dummy comment.")
t.Run("Valid config", func(t *testing.T) {
project := models.Project{}
projectVersion := models.ProjectVersion{ID: 11037}
sarif, err := Parse(sys, &project, &projectVersion, []byte(testFvdl))
assert.NoError(t, err, "error")
assert.Equal(t, len(sarif.Runs[0].Results), 2)
assert.Equal(t, len(sarif.Runs[0].Tool.Driver.Rules), 1)
assert.Equal(t, sarif.Runs[0].Results[0].Properties.ToolState, "Exploitable")
assert.Equal(t, sarif.Runs[0].Results[0].Properties.ToolAuditMessage, "Dummy comment.")
})
t.Run("Missing data", func(t *testing.T) {
project := models.Project{}
projectVersion := models.ProjectVersion{ID: 11037}
_, err := Parse(sys, &project, &projectVersion, []byte{})
assert.Error(t, err, "EOF")
})
t.Run("No system instance", func(t *testing.T) {
project := models.Project{}
projectVersion := models.ProjectVersion{ID: 11037}
sarif, err := Parse(nil, &project, &projectVersion, []byte(testFvdl))
assert.NoError(t, err, "error")
assert.Equal(t, len(sarif.Runs[0].Results), 2)
assert.Equal(t, len(sarif.Runs[0].Tool.Driver.Rules), 1)
assert.Equal(t, sarif.Runs[0].Results[0].Properties.ToolState, "Unknown")
assert.Equal(t, sarif.Runs[0].Results[0].Properties.ToolAuditMessage, "Cannot fetch audit state")
})
}
func TestIntegrateAuditData(t *testing.T) {
sys, server := spinUpServer(func(rw http.ResponseWriter, req *http.Request) {
if strings.Split(req.URL.Path, "/")[1] == "projectVersions" {
header := rw.Header()
header.Add("Content-type", "application/json")
rw.Write([]byte(
`{
"data": [
{
"projectVersionId": 11037,
"issueInstanceId": "DUMMYDUMMYDUMMY",
"issueName": "Dummy issue",
"primaryTag": "Exploitable",
"audited": true,
"issueStatus": "Reviewed",
"hasComments": true,
"friority": "High",
"_href": "https://fortify-stage.tools.sap/ssc/api/v1/projectVersions/11037"
}
],
"count": 1,
"responseCode": 200}`))
return
}
if strings.Split(req.URL.Path, "/")[1] == "issues" {
header := rw.Header()
header.Add("Content-type", "application/json")
rw.Write([]byte(
`{
"data": [
{
"issueId": 47009919,
"comment": "Dummy comment."
}
],
"count": 1,
"responseCode": 200}`))
return
}
})
// Close the server when test finishes
defer server.Close()
t.Run("Successful lookup", func(t *testing.T) {
ruleProp := *new(format.SarifProperties)
project := models.Project{}
projectVersion := models.ProjectVersion{ID: 11037}
err := integrateAuditData(&ruleProp, "11037", sys, &project, &projectVersion)
assert.NoError(t, err, "error")
assert.Equal(t, ruleProp.Audited, true)
assert.Equal(t, ruleProp.ToolState, "Exploitable")
assert.Equal(t, ruleProp.ToolStateIndex, 5)
assert.Equal(t, ruleProp.ToolSeverity, "High")
assert.Equal(t, ruleProp.ToolSeverityIndex, 3)
assert.Equal(t, ruleProp.ToolAuditMessage, "Dummy comment.")
})
t.Run("Missing project", func(t *testing.T) {
ruleProp := *new(format.SarifProperties)
projectVersion := models.ProjectVersion{ID: 11037}
err := integrateAuditData(&ruleProp, "11037", sys, nil, &projectVersion)
assert.Error(t, err, "project or projectVersion is undefined: lookup aborted for 11037")
})
t.Run("Missing project version", func(t *testing.T) {
ruleProp := *new(format.SarifProperties)
project := models.Project{}
err := integrateAuditData(&ruleProp, "11037", sys, &project, nil)
assert.Error(t, err, "project or projectVersion is undefined: lookup aborted for 11037")
})
t.Run("Missing sys", func(t *testing.T) {
ruleProp := *new(format.SarifProperties)
project := models.Project{}
projectVersion := models.ProjectVersion{ID: 11037}
err := integrateAuditData(&ruleProp, "11037", nil, &project, &projectVersion)
assert.Error(t, err, "no system instance, lookup impossible for 11037")
})
}

View File

@ -200,7 +200,7 @@ func CreateSarifResultFile(scan *Scan, alerts *[]Alert) *format.SARIF {
result.Message = format.Message{Text: alert.Vulnerability.Description}
result.Level = alert.Level
result.AnalysisTarget = format.ArtifactLocation{URI: alert.Library.Filename, Index: 0}
location := format.Location{PhysicalLocation: format.ArtifactLocation{URI: alert.Library.Filename}, Region: format.Region{}, LogicalLocations: []format.LogicalLocation{{FullyQualifiedName: ""}}}
location := format.Location{PhysicalLocation: format.PhysicalLocation{ArtifactLocation: format.ArtifactLocation{URI: alert.Library.Filename}, Region: format.Region{}, LogicalLocations: []format.LogicalLocation{{FullyQualifiedName: ""}}}, Message: nil}
result.Locations = append(result.Locations, location)
sarifRule := *new(format.SarifRule)