2022-02-08 15:10:40 +02:00
package fortify
import (
"bytes"
"encoding/xml"
"errors"
"fmt"
"io/ioutil"
"os"
"path/filepath"
2022-05-24 13:40:49 +02:00
"strconv"
2022-02-08 15:10:40 +02:00
"strings"
2022-06-16 15:24:23 +02:00
"time"
2022-02-08 15:10:40 +02:00
"github.com/piper-validation/fortify-client-go/models"
2022-02-23 10:30:19 +02:00
"github.com/SAP/jenkins-library/pkg/format"
2022-03-14 12:26:05 +02:00
2022-02-08 15:10:40 +02:00
"github.com/SAP/jenkins-library/pkg/log"
2022-05-20 18:50:03 +02:00
"github.com/SAP/jenkins-library/pkg/piperutils"
2022-02-08 15:10:40 +02:00
)
2022-02-23 10:30:19 +02:00
// FVDL This struct encapsulates everyting in the FVDL document
2022-02-08 15:10:40 +02:00
type FVDL struct {
2022-06-22 08:54:24 +02:00
XMLName xml . Name ` xml:"FVDL" `
Xmlns string ` xml:"xmlns,attr" `
XmlnsXsi string ` xml:"xsi,attr" `
Version string ` xml:"version,attr" `
XsiType string ` xml:"type,attr" `
Created CreatedTS ` xml:"CreatedTS" `
Uuid UUID ` xml:"UUID" `
Build Build ` xml:"Build" `
2022-02-08 15:10:40 +02:00
Vulnerabilities Vulnerabilities ` xml:"Vulnerabilities" `
ContextPool ContextPool ` xml:"ContextPool" `
UnifiedNodePool UnifiedNodePool ` xml:"UnifiedNodePool" `
Description [ ] Description ` xml:"Description" `
Snippets [ ] Snippet ` xml:"Snippets>Snippet" `
ProgramData ProgramData ` xml:"ProgramData" `
EngineData EngineData ` xml:"EngineData" `
}
2022-02-23 10:30:19 +02:00
// CreatedTS
2022-02-08 15:10:40 +02:00
type CreatedTS struct {
XMLName xml . Name ` xml:"CreatedTS" `
Date string ` xml:"date,attr" `
Time string ` xml:"time,attr" `
}
2022-03-14 12:26:05 +02:00
// UUIF
2022-02-08 15:10:40 +02:00
type UUID struct {
XMLName xml . Name ` xml:"UUID" `
Uuid string ` xml:",innerxml" `
}
2022-03-14 12:26:05 +02:00
// LOC
2022-02-08 15:10:40 +02:00
type LOC struct {
XMLName xml . Name ` xml:"LOC" `
LocType string ` xml:"type,attr" `
LocValue string ` xml:",innerxml" `
}
2022-03-14 12:26:05 +02:00
// These structures are relevant to the Build object
// The Build object transports all build and scan related information
2022-02-08 15:10:40 +02:00
type Build struct {
XMLName xml . Name ` xml:"Build" `
Project string ` xml:"Project" `
2022-03-14 12:26:05 +02:00
Version string ` xml:"Version" `
2022-02-08 15:10:40 +02:00
Label string ` xml:"Label" `
BuildID string ` xml:"BuildID" `
NumberFiles int ` xml:"NumberFiles" `
2022-03-14 12:26:05 +02:00
Locs [ ] LOC ` xml:"LOC" `
2022-02-08 15:10:40 +02:00
JavaClassPath string ` xml:"JavaClasspath" `
SourceBasePath string ` xml:"SourceBasePath" `
SourceFiles [ ] File ` xml:"SourceFiles>File" `
Scantime ScanTime ` xml:"ScanTime" `
}
2022-02-23 10:30:19 +02:00
// File
2022-02-08 15:10:40 +02:00
type File struct {
XMLName xml . Name ` xml:"File" `
FileSize int ` xml:"size,attr" `
FileTimestamp string ` xml:"timestamp,attr" `
FileLoc int ` xml:"loc,attr,omitempty" `
FileType string ` xml:"type,attr" `
Encoding string ` xml:"encoding,attr" `
Name string ` xml:"Name" `
Locs [ ] LOC ` xml:",any,omitempty" `
}
2022-02-23 10:30:19 +02:00
// ScanTime
2022-02-08 15:10:40 +02:00
type ScanTime struct {
XMLName xml . Name ` xml:"ScanTime" `
Value int ` xml:"value,attr" `
}
2022-02-23 10:30:19 +02:00
// Vulnerabilities These structures are relevant to the Vulnerabilities object
2022-02-08 15:10:40 +02:00
type Vulnerabilities struct {
XMLName xml . Name ` xml:"Vulnerabilities" `
Vulnerability [ ] Vulnerability ` xml:"Vulnerability" `
}
2022-06-22 08:54:24 +02:00
// Vulnerability
2022-02-08 15:10:40 +02:00
type Vulnerability struct {
XMLName xml . Name ` xml:"Vulnerability" `
ClassInfo ClassInfo ` xml:"ClassInfo" `
InstanceInfo InstanceInfo ` xml:"InstanceInfo" `
AnalysisInfo AnalysisInfo ` xml:"AnalysisInfo>Unified" `
}
2022-02-23 10:30:19 +02:00
// ClassInfo
2022-02-08 15:10:40 +02:00
type ClassInfo struct {
XMLName xml . Name ` xml:"ClassInfo" `
ClassID string ` xml:"ClassID" `
Kingdom string ` xml:"Kingdom,omitempty" `
Type string ` xml:"Type" `
Subtype string ` xml:"Subtype,omitempty" `
AnalyzerName string ` xml:"AnalyzerName" `
2022-05-24 13:40:49 +02:00
DefaultSeverity float64 ` xml:"DefaultSeverity" `
2022-02-08 15:10:40 +02:00
}
2022-02-23 10:30:19 +02:00
// InstanceInfo
2022-02-08 15:10:40 +02:00
type InstanceInfo struct {
XMLName xml . Name ` xml:"InstanceInfo" `
InstanceID string ` xml:"InstanceID" `
2022-05-24 13:40:49 +02:00
InstanceSeverity float64 ` xml:"InstanceSeverity" `
2022-02-08 15:10:40 +02:00
Confidence string ` xml:"Confidence" `
}
2022-02-23 10:30:19 +02:00
// AnalysisInfo
2022-02-08 15:10:40 +02:00
type AnalysisInfo struct { //Note that this is directly the "Unified" object
Context Context
ReplacementDefinitions ReplacementDefinitions ` xml:"ReplacementDefinitions" `
Trace [ ] Trace ` xml:"Trace" `
}
2022-02-23 10:30:19 +02:00
// Context
2022-02-08 15:10:40 +02:00
type Context struct {
2022-06-22 08:54:24 +02:00
XMLName xml . Name ` xml:"Context" `
ContextId string ` xml:"id,attr,omitempty" `
Function Function ` xml:"Function" `
FDSL FunctionDeclarationSourceLocation ` xml:"FunctionDeclarationSourceLocation" `
2022-02-08 15:10:40 +02:00
}
2022-02-23 10:30:19 +02:00
// Function
2022-02-08 15:10:40 +02:00
type Function struct {
XMLName xml . Name ` xml:"Function" `
FunctionName string ` xml:"name,attr" `
FunctionNamespace string ` xml:"namespace,attr" `
FunctionEnclosingClass string ` xml:"enclosingClass,attr" `
}
2022-02-23 10:30:19 +02:00
// FunctionDeclarationSourceLocation
2022-02-08 15:10:40 +02:00
type FunctionDeclarationSourceLocation struct {
XMLName xml . Name ` xml:"FunctionDeclarationSourceLocation" `
FDSLPath string ` xml:"path,attr" `
FDSLLine string ` xml:"line,attr" `
FDSLLineEnd string ` xml:"lineEnd,attr" `
FDSLColStart string ` xml:"colStart,attr" `
FDSLColEnd string ` xml:"colEnd,attr" `
}
2022-02-23 10:30:19 +02:00
// ReplacementDefinitions
2022-02-08 15:10:40 +02:00
type ReplacementDefinitions struct {
XMLName xml . Name ` xml:"ReplacementDefinitions" `
Def [ ] Def ` xml:"Def" `
LocationDef [ ] LocationDef ` xml:"LocationDef" `
}
2022-02-23 10:30:19 +02:00
// Def
2022-02-08 15:10:40 +02:00
type Def struct {
XMLName xml . Name ` xml:"Def" `
DefKey string ` xml:"key,attr" `
DefValue string ` xml:"value,attr" `
}
2022-02-23 10:30:19 +02:00
// LocationDef
2022-02-08 15:10:40 +02:00
type LocationDef struct {
XMLName xml . Name ` xml:"LocationDef" `
Path string ` xml:"path,attr" `
Line int ` xml:"line,attr" `
LineEnd int ` xml:"lineEnd,attr" `
ColStart int ` xml:"colStart,attr" `
ColEnd int ` xml:"colEnd,attr" `
Key string ` xml:"key,attr" `
}
2022-02-23 10:30:19 +02:00
// Trace
2022-02-08 15:10:40 +02:00
type Trace struct {
XMLName xml . Name ` xml:"Trace" `
Primary Primary ` xml:"Primary" `
}
2022-02-23 10:30:19 +02:00
// Primary
2022-02-08 15:10:40 +02:00
type Primary struct {
XMLName xml . Name ` xml:"Primary" `
Entry [ ] Entry ` xml:"Entry" `
}
2022-02-23 10:30:19 +02:00
// Entry
2022-02-08 15:10:40 +02:00
type Entry struct {
XMLName xml . Name ` xml:"Entry" `
NodeRef NodeRef ` xml:"NodeRef,omitempty" `
Node Node ` xml:"Node,omitempty" `
}
2022-02-23 10:30:19 +02:00
// NodeRef
2022-02-08 15:10:40 +02:00
type NodeRef struct {
XMLName xml . Name ` xml:"NodeRef" `
RefId int ` xml:"id,attr" `
}
2022-02-23 10:30:19 +02:00
// Node
2022-02-08 15:10:40 +02:00
type Node struct {
XMLName xml . Name ` xml:"Node" `
IsDefault string ` xml:"isDefault,attr,omitempty" `
NodeLabel string ` xml:"label,attr,omitempty" `
2022-05-11 17:05:51 +02:00
ID int ` xml:"id,attr,omitempty" `
2022-02-08 15:10:40 +02:00
SourceLocation SourceLocation ` xml:"SourceLocation" `
Action Action ` xml:"Action,omitempty" `
Reason Reason ` xml:"Reason,omitempty" `
Knowledge Knowledge ` xml:"Knowledge,omitempty" `
}
2022-02-23 10:30:19 +02:00
// SourceLocation
2022-02-08 15:10:40 +02:00
type SourceLocation struct {
XMLName xml . Name ` xml:"SourceLocation" `
Path string ` xml:"path,attr" `
Line int ` xml:"line,attr" `
LineEnd int ` xml:"lineEnd,attr" `
ColStart int ` xml:"colStart,attr" `
ColEnd int ` xml:"colEnd,attr" `
ContextId string ` xml:"contextId,attr" `
Snippet string ` xml:"snippet,attr" `
}
2022-02-23 10:30:19 +02:00
// Action
2022-02-08 15:10:40 +02:00
type Action struct {
XMLName xml . Name ` xml:"Action" `
Type string ` xml:"type,attr" `
ActionData string ` xml:",innerxml" `
}
2022-02-23 10:30:19 +02:00
// Reason
2022-02-08 15:10:40 +02:00
type Reason struct {
XMLName xml . Name ` xml:"Reason" `
Rule Rule ` xml:"Rule,omitempty" `
Trace Trace ` xml:"Trace,omitempty" `
}
2022-02-23 10:30:19 +02:00
// Rule
2022-02-08 15:10:40 +02:00
type Rule struct {
XMLName xml . Name ` xml:"Rule" `
RuleID string ` xml:"ruleID,attr" `
}
2022-02-23 10:30:19 +02:00
// Group
2022-02-08 15:10:40 +02:00
type Group struct {
XMLName xml . Name ` xml:"Group" `
Name string ` xml:"name,attr" `
Data string ` xml:",innerxml" `
}
2022-02-23 10:30:19 +02:00
// Knowledge
2022-02-08 15:10:40 +02:00
type Knowledge struct {
XMLName xml . Name ` xml:"Knowledge" `
Facts [ ] Fact ` xml:"Fact" `
}
2022-02-23 10:30:19 +02:00
// Fact
2022-02-08 15:10:40 +02:00
type Fact struct {
XMLName xml . Name ` xml:"Fact" `
Primary string ` xml:"primary,attr" `
Type string ` xml:"type,attr,omitempty" `
FactData string ` xml:",innerxml" `
}
2022-02-23 10:30:19 +02:00
// ContextPool These structures are relevant to the ContextPool object
2022-02-08 15:10:40 +02:00
type ContextPool struct {
XMLName xml . Name ` xml:"ContextPool" `
Context [ ] Context ` xml:"Context" `
}
2022-02-23 10:30:19 +02:00
// UnifiedNodePool These structures are relevant to the UnifiedNodePool object
2022-02-08 15:10:40 +02:00
type UnifiedNodePool struct {
XMLName xml . Name ` xml:"UnifiedNodePool" `
Node [ ] Node ` xml:"Node" `
}
2022-02-23 10:30:19 +02:00
// Description These structures are relevant to the Description object
2022-02-08 15:10:40 +02:00
type Description struct {
XMLName xml . Name ` xml:"Description" `
ContentType string ` xml:"contentType,attr" `
ClassID string ` xml:"classID,attr" `
Abstract Abstract ` xml:"Abstract" `
Explanation Explanation ` xml:"Explanation" `
Recommendations Recommendations ` xml:"Recommendations" `
Tips [ ] Tip ` xml:"Tips>Tip,omitempty" `
References [ ] Reference ` xml:"References>Reference" `
CustomDescription CustomDescription ` xml:"CustomDescription,omitempty" `
}
2022-02-23 10:30:19 +02:00
// Abstract
2022-02-08 15:10:40 +02:00
type Abstract struct {
XMLName xml . Name ` xml:"Abstract" `
Text string ` xml:",innerxml" `
}
2022-02-23 10:30:19 +02:00
// Explanation
2022-02-08 15:10:40 +02:00
type Explanation struct {
XMLName xml . Name ` xml:"Explanation" `
Text string ` xml:",innerxml" `
}
2022-02-23 10:30:19 +02:00
// Recommendations
2022-02-08 15:10:40 +02:00
type Recommendations struct {
XMLName xml . Name ` xml:"Recommendations" `
Text string ` xml:",innerxml" `
}
2022-02-23 10:30:19 +02:00
// Reference
2022-02-08 15:10:40 +02:00
type Reference struct {
XMLName xml . Name ` xml:"Reference" `
Title string ` xml:"Title" `
Author string ` xml:"Author" `
}
2022-02-23 10:30:19 +02:00
// Tip
2022-02-08 15:10:40 +02:00
type Tip struct {
XMLName xml . Name ` xml:"Tip" `
Tip string ` xml:",innerxml" `
}
2022-02-23 10:30:19 +02:00
// CustomDescription
2022-02-08 15:10:40 +02:00
type CustomDescription struct {
XMLName xml . Name ` xml:"CustomDescription" `
ContentType string ` xml:"contentType,attr" `
RuleID string ` xml:"ruleID,attr" `
Explanation Explanation ` xml:"Explanation" `
Recommendations Recommendations ` xml:"Recommendations" `
References [ ] Reference ` xml:"References>Reference" `
}
2022-02-23 10:30:19 +02:00
// Snippet These structures are relevant to the Snippets object
2022-02-08 15:10:40 +02:00
type Snippet struct {
XMLName xml . Name ` xml:"Snippet" `
SnippetId string ` xml:"id,attr" `
File string ` xml:"File" `
StartLine int ` xml:"StartLine" `
EndLine int ` xml:"EndLine" `
Text string ` xml:"Text" `
}
2022-02-23 10:30:19 +02:00
// ProgramData These structures are relevant to the ProgramData object
2022-02-08 15:10:40 +02:00
type ProgramData struct {
XMLName xml . Name ` xml:"ProgramData" `
Sources [ ] SourceInstance ` xml:"Sources>SourceInstance" `
Sinks [ ] SinkInstance ` xml:"Sinks>SinkInstance" `
CalledWithNoDef [ ] Function ` xml:"CalledWithNoDef>Function" `
}
2022-02-23 10:30:19 +02:00
// SourceInstance
2022-02-08 15:10:40 +02:00
type SourceInstance struct {
XMLName xml . Name ` xml:"SourceInstance" `
RuleID string ` xml:"ruleID,attr" `
FunctionCall FunctionCall ` xml:"FunctionCall,omitempty" `
FunctionEntry FunctionEntry ` xml:"FunctionEntry,omitempty" `
SourceLocation SourceLocation ` xml:"SourceLocation,omitempty" `
TaintFlags TaintFlags ` xml:"TaintFlags" `
}
2022-02-23 10:30:19 +02:00
// FunctionCall
2022-02-08 15:10:40 +02:00
type FunctionCall struct {
XMLName xml . Name ` xml:"FunctionCall" `
SourceLocation SourceLocation ` xml:"SourceLocation" `
Function Function ` xml:"Function" `
}
2022-02-23 10:30:19 +02:00
// FunctionEntry
2022-02-08 15:10:40 +02:00
type FunctionEntry struct {
XMLName xml . Name ` xml:"FunctionEntry" `
SourceLocation SourceLocation ` xml:"SourceLocation" `
Function Function ` xml:"Function" `
}
2022-02-23 10:30:19 +02:00
// TaintFlags
2022-02-08 15:10:40 +02:00
type TaintFlags struct {
XMLName xml . Name ` xml:"TaintFlags" `
TaintFlag [ ] TaintFlag ` xml:"TaintFlag" `
}
2022-02-23 10:30:19 +02:00
// TaintFlag
2022-02-08 15:10:40 +02:00
type TaintFlag struct {
XMLName xml . Name ` xml:"TaintFlag" `
TaintFlagName string ` xml:"name,attr" `
}
2022-02-23 10:30:19 +02:00
// SinkInstance
2022-02-08 15:10:40 +02:00
type SinkInstance struct {
XMLName xml . Name ` xml:"SinkInstance" `
RuleID string ` xml:"ruleID,attr" `
FunctionCall FunctionCall ` xml:"FunctionCall,omitempty" `
SourceLocation SourceLocation ` xml:"SourceLocation,omitempty" `
}
2022-02-23 10:30:19 +02:00
// EngineData These structures are relevant to the EngineData object
2022-02-08 15:10:40 +02:00
type EngineData struct {
XMLName xml . Name ` xml:"EngineData" `
EngineVersion string ` xml:"EngineVersion" `
RulePacks [ ] RulePack ` xml:"RulePacks>RulePack" `
Properties [ ] Properties ` xml:"Properties" `
CLArguments [ ] string ` xml:"CommandLine>Argument" `
Errors [ ] Error ` xml:"Errors>Error" `
MachineInfo MachineInfo ` xml:"MachineInfo" `
FilterResult FilterResult ` xml:"FilterResult" `
RuleInfo [ ] RuleInfo ` xml:"RuleInfo>Rule" `
LicenseInfo LicenseInfo ` xml:"LicenseInfo" `
}
2022-02-23 10:30:19 +02:00
// RulePack
2022-02-08 15:10:40 +02:00
type RulePack struct {
XMLName xml . Name ` xml:"RulePack" `
RulePackID string ` xml:"RulePackID" `
SKU string ` xml:"SKU" `
Name string ` xml:"Name" `
Version string ` xml:"Version" `
MAC string ` xml:"MAC" `
}
2022-02-23 10:30:19 +02:00
// Properties
2022-02-08 15:10:40 +02:00
type Properties struct {
XMLName xml . Name ` xml:"Properties" `
PropertiesType string ` xml:"type,attr" `
Property [ ] Property ` xml:"Property" `
}
2022-02-23 10:30:19 +02:00
// Property
2022-02-08 15:10:40 +02:00
type Property struct {
XMLName xml . Name ` xml:"Property" `
Name string ` xml:"name" `
Value string ` xml:"value" `
}
2022-02-23 10:30:19 +02:00
// Error
2022-02-08 15:10:40 +02:00
type Error struct {
XMLName xml . Name ` xml:"Error" `
ErrorCode string ` xml:"code,attr" `
ErrorMessage string ` xml:",innerxml" `
}
2022-02-23 10:30:19 +02:00
// MachineInfo
2022-02-08 15:10:40 +02:00
type MachineInfo struct {
XMLName xml . Name ` xml:"MachineInfo" `
Hostname string ` xml:"Hostname" `
Username string ` xml:"Username" `
Platform string ` xml:"Platform" `
}
2022-02-23 10:30:19 +02:00
// FilterResult
2022-02-08 15:10:40 +02:00
type FilterResult struct {
XMLName xml . Name ` xml:"FilterResult" `
//Todo? No data in sample audit file
}
2022-02-23 10:30:19 +02:00
// RuleInfo
2022-02-08 15:10:40 +02:00
type RuleInfo struct {
XMLName xml . Name ` xml:"Rule" `
RuleID string ` xml:"id,attr" `
MetaInfoGroup [ ] Group ` xml:"MetaInfo>Group,omitempty" `
}
2022-02-23 10:30:19 +02:00
// LicenseInfo
2022-02-08 15:10:40 +02:00
type LicenseInfo struct {
XMLName xml . Name ` xml:"LicenseInfo" `
Metadata [ ] Metadata ` xml:"Metadata" `
Capability [ ] Capability ` xml:"Capability" `
}
2022-02-23 10:30:19 +02:00
// Metadata
2022-02-08 15:10:40 +02:00
type Metadata struct {
XMLName xml . Name ` xml:"Metadata" `
Name string ` xml:"name" `
Value string ` xml:"value" `
}
2022-02-23 10:30:19 +02:00
// Capability
2022-02-08 15:10:40 +02:00
type Capability struct {
XMLName xml . Name ` xml:"Capability" `
Name string ` xml:"Name" `
Expiration string ` xml:"Expiration" `
Attribute Attribute ` xml:"Attribute" `
}
2022-02-23 10:30:19 +02:00
// Attribute
2022-02-08 15:10:40 +02:00
type Attribute struct {
XMLName xml . Name ` xml:"Attribute" `
Name string ` xml:"name" `
Value string ` xml:"value" `
}
2022-02-23 10:30:19 +02:00
// ConvertFprToSarif converts the FPR file contents into SARIF format
2022-03-17 14:09:15 +02:00
func ConvertFprToSarif ( sys System , project * models . Project , projectVersion * models . ProjectVersion , resultFilePath string , filterSet * models . FilterSet ) ( format . SARIF , error ) {
2022-02-08 15:10:40 +02:00
log . Entry ( ) . Debug ( "Extracting FPR." )
2022-02-23 10:30:19 +02:00
var sarif format . SARIF
2022-02-08 15:10:40 +02:00
tmpFolder , err := ioutil . TempDir ( "." , "temp-" )
defer os . RemoveAll ( tmpFolder )
if err != nil {
log . Entry ( ) . WithError ( err ) . WithField ( "path" , tmpFolder ) . Debug ( "Creating temp directory failed" )
return sarif , err
}
2022-05-20 18:50:03 +02:00
_ , err = piperutils . Unzip ( resultFilePath , tmpFolder )
2022-02-08 15:10:40 +02:00
if err != nil {
return sarif , err
}
2022-04-26 12:34:54 +02:00
log . Entry ( ) . Debug ( "Reading audit file." )
2022-02-08 15:10:40 +02:00
data , err := ioutil . ReadFile ( filepath . Join ( tmpFolder , "audit.fvdl" ) )
if err != nil {
return sarif , err
}
2022-03-14 12:26:05 +02:00
if len ( data ) == 0 {
log . Entry ( ) . Error ( "Error reading audit file at " + filepath . Join ( tmpFolder , "audit.fvdl" ) + ". This might be that the file is missing, corrupted, or too large. Aborting procedure." )
err := errors . New ( "cannot read audit file" )
return sarif , err
}
2022-02-08 15:10:40 +02:00
2022-03-14 12:26:05 +02:00
log . Entry ( ) . Debug ( "Calling Parse." )
2022-03-17 14:09:15 +02:00
return Parse ( sys , project , projectVersion , data , filterSet )
2022-02-08 15:10:40 +02:00
}
2022-02-23 10:30:19 +02:00
// Parse parses the FPR file
2022-03-17 14:09:15 +02:00
func Parse ( sys System , project * models . Project , projectVersion * models . ProjectVersion , data [ ] byte , filterSet * models . FilterSet ) ( format . SARIF , error ) {
2022-02-08 15:10:40 +02:00
//To read XML data, Unmarshal or Decode can be used, here we use Decode to work on the stream
reader := bytes . NewReader ( data )
decoder := xml . NewDecoder ( reader )
2022-06-16 15:24:23 +02:00
start := time . Now ( ) // For the conversion start time
2022-02-08 15:10:40 +02:00
var fvdl FVDL
2022-03-14 12:26:05 +02:00
err := decoder . Decode ( & fvdl )
if err != nil {
return format . SARIF { } , err
}
2022-02-08 15:10:40 +02:00
2022-04-07 13:11:52 +02:00
//Create an object containing all audit data
log . Entry ( ) . Debug ( "Querying Fortify SSC for batch audit data" )
oneRequestPerIssueMode := false
var auditData [ ] * models . ProjectVersionIssue
2022-06-09 10:32:08 +02:00
maxretries := 5 // Maximum number of requests allowed to fail before stopping them
if sys != nil && projectVersion != nil {
2022-04-07 13:11:52 +02:00
auditData , err = sys . GetAllIssueDetails ( projectVersion . ID )
2022-06-09 10:32:08 +02:00
if err != nil || len ( auditData ) == 0 { // It's reasonable to admit that with a length of 0, something went wrong
2022-04-07 13:11:52 +02:00
log . Entry ( ) . WithError ( err ) . Error ( "failed to get all audit data, defaulting to one-request-per-issue basis" )
oneRequestPerIssueMode = true
2022-06-09 10:32:08 +02:00
// We do not lower maxretries here in case a "real" bug happened
2022-04-07 13:11:52 +02:00
} else {
log . Entry ( ) . Debug ( "Request successful, data frame size: " , len ( auditData ) , " audits" )
}
} else {
2022-06-09 10:32:08 +02:00
log . Entry ( ) . Error ( "no system instance or project version found, lookup impossible" )
2022-04-07 13:11:52 +02:00
oneRequestPerIssueMode = true
2022-06-09 10:32:08 +02:00
maxretries = 1 // Set to 1 if the sys instance isn't defined: chances are it couldn't be created, we'll live a chance if there was an unknown bug
log . Entry ( ) . Debug ( "request failed: remaining retries " , maxretries )
2022-04-07 13:11:52 +02:00
}
2022-02-08 15:10:40 +02:00
//Now, we handle the sarif
2022-02-23 10:30:19 +02:00
var sarif format . SARIF
2022-03-22 15:47:19 +02:00
sarif . Schema = "https://docs.oasis-open.org/sarif/sarif/v2.1.0/cos02/schemas/sarif-schema-2.1.0.json"
2022-02-08 15:10:40 +02:00
sarif . Version = "2.1.0"
2022-02-23 10:30:19 +02:00
var fortifyRun format . Runs
2022-03-14 12:26:05 +02:00
fortifyRun . ColumnKind = "utf16CodeUnits"
cweIdsForTaxonomies := make ( map [ string ] string ) //Defining this here and filling it in the course of the program helps filling the Taxonomies object easily. Map because easy to check for keys
2022-02-08 15:10:40 +02:00
sarif . Runs = append ( sarif . Runs , fortifyRun )
// Handle results/vulnerabilities
2022-04-26 12:34:54 +02:00
log . Entry ( ) . Debug ( "[SARIF] Now handling results." )
2022-02-08 15:10:40 +02:00
for i := 0 ; i < len ( fvdl . Vulnerabilities . Vulnerability ) ; i ++ {
2022-02-23 10:30:19 +02:00
result := * new ( format . Results )
2022-05-11 17:05:51 +02:00
//result.RuleID = fvdl.Vulnerabilities.Vulnerability[i].ClassInfo.ClassID
// Handle ruleID the same way than in Rule
idArray := [ ] string { }
2022-06-17 08:53:44 +02:00
/ * if fvdl . Vulnerabilities . Vulnerability [ i ] . ClassInfo . Kingdom != "" {
2022-05-11 17:05:51 +02:00
idArray = append ( idArray , fvdl . Vulnerabilities . Vulnerability [ i ] . ClassInfo . Kingdom )
2022-06-17 08:53:44 +02:00
} * /
2022-05-11 17:05:51 +02:00
if fvdl . Vulnerabilities . Vulnerability [ i ] . ClassInfo . Type != "" {
idArray = append ( idArray , fvdl . Vulnerabilities . Vulnerability [ i ] . ClassInfo . Type )
}
if fvdl . Vulnerabilities . Vulnerability [ i ] . ClassInfo . Subtype != "" {
idArray = append ( idArray , fvdl . Vulnerabilities . Vulnerability [ i ] . ClassInfo . Subtype )
}
result . RuleID = "fortify-" + strings . Join ( idArray , "/" )
2022-05-24 13:40:49 +02:00
result . Kind = "fail" // Default value, Level must not be set if kind is not fail
// This is an "easy" treatment of result.Level. It does not follow the spec exactly, but the idea is there
// An exact processing algorithm can be found here https://docs.oasis-open.org/sarif/sarif/v2.1.0/os/sarif-v2.1.0-os.html#_Toc34317648
if fvdl . Vulnerabilities . Vulnerability [ i ] . InstanceInfo . InstanceSeverity >= 3.0 {
result . Level = "error"
} else if fvdl . Vulnerabilities . Vulnerability [ i ] . InstanceInfo . InstanceSeverity >= 1.5 {
result . Level = "warning"
} else if fvdl . Vulnerabilities . Vulnerability [ i ] . InstanceInfo . InstanceSeverity < 1.5 {
result . Level = "note"
} else {
result . Level = "none"
}
2022-02-08 15:10:40 +02:00
//get message
for j := 0 ; j < len ( fvdl . Description ) ; j ++ {
2022-05-11 17:05:51 +02:00
if fvdl . Description [ j ] . ClassID == fvdl . Vulnerabilities . Vulnerability [ i ] . ClassInfo . ClassID {
result . RuleIndex = j
rawMessage := unescapeXML ( fvdl . Description [ j ] . Abstract . Text )
2022-02-08 15:10:40 +02:00
// Replacement defintions in message
for l := 0 ; l < len ( fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . ReplacementDefinitions . Def ) ; l ++ {
2022-05-11 17:05:51 +02:00
rawMessage = strings . ReplaceAll ( rawMessage , "<Replace key=\"" + fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . ReplacementDefinitions . Def [ l ] . DefKey + "\"/>" , fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . ReplacementDefinitions . Def [ l ] . DefValue )
2022-02-08 15:10:40 +02:00
}
2022-03-17 14:09:15 +02:00
msg := new ( format . Message )
msg . Text = rawMessage
result . Message = msg
2022-02-08 15:10:40 +02:00
break
}
}
2022-03-14 12:26:05 +02:00
// Handle all locations items
location := * new ( format . Location )
//get location
for k := 0 ; k < len ( fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace ) ; k ++ { // k iterates on traces
//In each trace/primary, there can be one or more entries
//Each trace represents a codeflow, each entry represents a location in threadflow
codeFlow := * new ( format . CodeFlow )
threadFlow := * new ( format . ThreadFlow )
//We now iterate on Entries in the trace/primary
for l := 0 ; l < len ( fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry ) ; l ++ { // l iterates on entries
2022-05-11 17:05:51 +02:00
tfla := * new ( [ ] format . Locations ) //threadflowlocationarray. Useful for the node-in-node edge case
threadFlowLocation := * new ( format . Locations ) //One is created regardless of the path taken afterwards
2022-03-14 12:26:05 +02:00
//this will populate both threadFlowLocation AND the parent location object (result.Locations[0])
2022-05-11 17:05:51 +02:00
// We check if a noderef is present: if no (index of ref is the default 0), this is a "real" node. As a measure of safety (in case a node refers to nodeid 0), we add another check: the node must have a label or a isdefault value
if fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . NodeRef . RefId == 0 && ( fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . NodeLabel != "" || fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . IsDefault != "" ) {
//initalize the current location object, it will be added to threadFlowLocation.Location
tfloc := new ( format . Location )
2022-03-14 12:26:05 +02:00
//get artifact location
for j := 0 ; j < len ( fvdl . Build . SourceFiles ) ; j ++ { // j iterates on source files
if fvdl . Build . SourceFiles [ j ] . Name == fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . SourceLocation . Path {
2022-05-11 17:05:51 +02:00
tfloc . PhysicalLocation . ArtifactLocation . Index = j + 1
tfloc . PhysicalLocation . ArtifactLocation . URI = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . SourceLocation . Path
tfloc . PhysicalLocation . ArtifactLocation . URIBaseId = "%SRCROOT%"
2022-03-14 12:26:05 +02:00
break
}
}
//get region & context region
2022-05-11 17:05:51 +02:00
tfloc . PhysicalLocation . Region . StartLine = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . SourceLocation . Line
tfloc . PhysicalLocation . Region . EndLine = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . SourceLocation . LineEnd
tfloc . PhysicalLocation . Region . StartColumn = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . SourceLocation . ColStart
tfloc . PhysicalLocation . Region . EndColumn = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . SourceLocation . ColEnd
2022-03-14 12:26:05 +02:00
//Snippet is handled last
targetSnippetId := fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . SourceLocation . Snippet
for j := 0 ; j < len ( fvdl . Snippets ) ; j ++ {
if fvdl . Snippets [ j ] . SnippetId == targetSnippetId {
2022-06-22 08:54:24 +02:00
tfloc . PhysicalLocation . ContextRegion = new ( format . ContextRegion )
2022-05-11 17:05:51 +02:00
tfloc . PhysicalLocation . ContextRegion . StartLine = fvdl . Snippets [ j ] . StartLine
tfloc . PhysicalLocation . ContextRegion . EndLine = fvdl . Snippets [ j ] . EndLine
2022-03-22 15:47:19 +02:00
snippetSarif := new ( format . SnippetSarif )
snippetSarif . Text = fvdl . Snippets [ j ] . Text
2022-05-11 17:05:51 +02:00
tfloc . PhysicalLocation . ContextRegion . Snippet = snippetSarif
2022-03-14 12:26:05 +02:00
break
}
}
2022-05-11 17:05:51 +02:00
// if a label is passed, put it as message
if fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . NodeLabel != "" {
tfloc . Message = new ( format . Message )
tfloc . Message . Text = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . NodeLabel
} else {
// otherwise check for existance of action object, and if yes, save message
if ! ( fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Action . ActionData == "" ) {
tfloc . Message = new ( format . Message )
tfloc . Message . Text = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Action . ActionData
// Handle snippet
snippetTarget := handleSnippet ( fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Action . Type , fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Action . ActionData )
2022-06-22 08:54:24 +02:00
if tfloc . PhysicalLocation . ContextRegion != nil && tfloc . PhysicalLocation . ContextRegion . Snippet != nil {
2022-05-11 17:05:51 +02:00
physLocationSnippetLines := strings . Split ( tfloc . PhysicalLocation . ContextRegion . Snippet . Text , "\n" )
snippetText := ""
for j := 0 ; j < len ( physLocationSnippetLines ) ; j ++ {
if strings . Contains ( physLocationSnippetLines [ j ] , snippetTarget ) {
snippetText = physLocationSnippetLines [ j ]
break
}
}
snippetSarif := new ( format . SnippetSarif )
if snippetText != "" {
snippetSarif . Text = snippetText
} else {
snippetSarif . Text = tfloc . PhysicalLocation . ContextRegion . Snippet . Text
}
tfloc . PhysicalLocation . Region . Snippet = snippetSarif
2022-03-14 12:26:05 +02:00
}
2022-05-11 17:05:51 +02:00
} else {
2022-06-22 08:54:24 +02:00
if tfloc . PhysicalLocation . ContextRegion != nil && tfloc . PhysicalLocation . ContextRegion . Snippet != nil {
2022-05-11 17:05:51 +02:00
snippetSarif := new ( format . SnippetSarif )
snippetSarif . Text = tfloc . PhysicalLocation . ContextRegion . Snippet . Text
tfloc . PhysicalLocation . Region . Snippet = snippetSarif
2022-03-14 12:26:05 +02:00
}
}
2022-05-11 17:05:51 +02:00
}
location = * tfloc
//set Kinds
threadFlowLocation . Location = tfloc
2022-05-24 13:40:49 +02:00
//threadFlowLocation.Kinds = append(threadFlowLocation.Kinds, "review") //TODO
threadFlowLocation . Index = 0 // to be safe?
2022-05-11 17:05:51 +02:00
tfla = append ( tfla , threadFlowLocation )
// "Node-in-node" edge case! in some cases the "Reason" object will contain a "Trace>Primary>Entry>Node" object
// Check for it at depth 1 only, as an in-case
if len ( fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Reason . Trace . Primary . Entry ) > 0 {
ninThreadFlowLocation := * new ( format . Locations )
if fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Reason . Trace . Primary . Entry [ 0 ] . NodeRef . RefId != 0 {
// As usual, only the index for a ref
ninThreadFlowLocation . Index = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Reason . Trace . Primary . Entry [ 0 ] . NodeRef . RefId + 1
} else {
// Build a new "node-in-node" tfloc, it will be appended to tfla
nintfloc := new ( format . Location )
// artifactlocation
for j := 0 ; j < len ( fvdl . Build . SourceFiles ) ; j ++ {
if fvdl . Build . SourceFiles [ j ] . Name == fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Reason . Trace . Primary . Entry [ 0 ] . Node . SourceLocation . Path {
nintfloc . PhysicalLocation . ArtifactLocation . Index = j + 1
nintfloc . PhysicalLocation . ArtifactLocation . URI = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Reason . Trace . Primary . Entry [ 0 ] . Node . SourceLocation . Path
nintfloc . PhysicalLocation . ArtifactLocation . URIBaseId = "%SRCROOT%"
2022-03-31 12:13:17 +02:00
break
}
2022-03-14 12:26:05 +02:00
}
2022-05-11 17:05:51 +02:00
// region & context region
nintfloc . PhysicalLocation . Region . StartLine = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Reason . Trace . Primary . Entry [ 0 ] . Node . SourceLocation . Line
nintfloc . PhysicalLocation . Region . EndLine = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Reason . Trace . Primary . Entry [ 0 ] . Node . SourceLocation . LineEnd
nintfloc . PhysicalLocation . Region . StartColumn = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Reason . Trace . Primary . Entry [ 0 ] . Node . SourceLocation . ColStart
nintfloc . PhysicalLocation . Region . EndColumn = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Reason . Trace . Primary . Entry [ 0 ] . Node . SourceLocation . ColEnd
// snippet
targetSnippetId := fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Reason . Trace . Primary . Entry [ 0 ] . Node . SourceLocation . Snippet
for j := 0 ; j < len ( fvdl . Snippets ) ; j ++ {
if fvdl . Snippets [ j ] . SnippetId == targetSnippetId {
2022-06-22 08:54:24 +02:00
nintfloc . PhysicalLocation . ContextRegion = new ( format . ContextRegion )
2022-05-11 17:05:51 +02:00
nintfloc . PhysicalLocation . ContextRegion . StartLine = fvdl . Snippets [ j ] . StartLine
nintfloc . PhysicalLocation . ContextRegion . EndLine = fvdl . Snippets [ j ] . EndLine
snippetSarif := new ( format . SnippetSarif )
snippetSarif . Text = fvdl . Snippets [ j ] . Text
nintfloc . PhysicalLocation . ContextRegion . Snippet = snippetSarif
break
}
2022-03-31 12:13:17 +02:00
}
2022-05-11 17:05:51 +02:00
// label as message
if fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Reason . Trace . Primary . Entry [ 0 ] . Node . NodeLabel != "" {
nintfloc . Message = new ( format . Message )
nintfloc . Message . Text = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . Node . Reason . Trace . Primary . Entry [ 0 ] . Node . NodeLabel
}
ninThreadFlowLocation . Location = nintfloc
ninThreadFlowLocation . Index = 0 // Safety
2022-03-14 12:26:05 +02:00
}
2022-05-11 17:05:51 +02:00
tfla = append ( tfla , ninThreadFlowLocation )
2022-03-14 12:26:05 +02:00
}
2022-05-11 17:05:51 +02:00
// END edge case
2022-03-14 12:26:05 +02:00
} else { //is not a main threadflow: just register NodeRef index in threadFlowLocation
2022-05-11 17:05:51 +02:00
// Sarif does not provision 0 as a valid array index, so we increment the node ref id
// Each index i serves to reference the i-th object in run.threadFlowLocations
threadFlowLocation . Index = fvdl . Vulnerabilities . Vulnerability [ i ] . AnalysisInfo . Trace [ k ] . Primary . Entry [ l ] . NodeRef . RefId + 1
tfla = append ( tfla , threadFlowLocation )
2022-03-14 12:26:05 +02:00
}
2022-05-11 17:05:51 +02:00
threadFlow . Locations = append ( threadFlow . Locations , tfla ... )
2022-03-14 12:26:05 +02:00
}
codeFlow . ThreadFlows = append ( codeFlow . ThreadFlows , threadFlow )
result . CodeFlows = append ( result . CodeFlows , codeFlow )
}
//For some reason, the principal object only has 1 location: here we keep the last one
//Void message
location . Message = nil
result . Locations = append ( result . Locations , location )
//handle relatedLocation
relatedLocation := * new ( format . RelatedLocation )
relatedLocation . ID = 1
relatedLocation . PhysicalLocation = * new ( format . RelatedPhysicalLocation )
relatedLocation . PhysicalLocation . ArtifactLocation = location . PhysicalLocation . ArtifactLocation
relatedLocation . PhysicalLocation . Region = * new ( format . RelatedRegion )
relatedLocation . PhysicalLocation . Region . StartLine = location . PhysicalLocation . Region . StartLine
2022-05-11 17:05:51 +02:00
relatedLocation . PhysicalLocation . Region . StartColumn = location . PhysicalLocation . Region . StartColumn
2022-03-14 12:26:05 +02:00
result . RelatedLocations = append ( result . RelatedLocations , relatedLocation )
2022-05-11 17:05:51 +02:00
//handle partialFingerprints
result . PartialFingerprints . FortifyInstanceID = fvdl . Vulnerabilities . Vulnerability [ i ] . InstanceInfo . InstanceID
result . PartialFingerprints . PrimaryLocationLineHash = fvdl . Vulnerabilities . Vulnerability [ i ] . InstanceInfo . InstanceID //Fixit
2022-02-08 15:10:40 +02:00
//handle properties
2022-03-22 15:47:19 +02:00
prop := new ( format . SarifProperties )
2022-05-24 13:40:49 +02:00
prop . InstanceSeverity = strconv . FormatFloat ( fvdl . Vulnerabilities . Vulnerability [ i ] . InstanceInfo . InstanceSeverity , 'f' , 1 , 64 )
2022-02-08 15:10:40 +02:00
prop . Confidence = fvdl . Vulnerabilities . Vulnerability [ i ] . InstanceInfo . Confidence
prop . InstanceID = fvdl . Vulnerabilities . Vulnerability [ i ] . InstanceInfo . InstanceID
2022-06-17 08:53:44 +02:00
prop . RuleGUID = fvdl . Vulnerabilities . Vulnerability [ i ] . ClassInfo . ClassID
2022-05-11 17:05:51 +02:00
//Get the audit data
2022-06-09 10:32:08 +02:00
if err := integrateAuditData ( prop , fvdl . Vulnerabilities . Vulnerability [ i ] . InstanceInfo . InstanceID , sys , project , projectVersion , auditData , filterSet , oneRequestPerIssueMode , maxretries ) ; err != nil {
log . Entry ( ) . Debug ( err )
maxretries = maxretries - 1
if maxretries >= 0 {
log . Entry ( ) . Debug ( "request failed: remaining retries " , maxretries )
2022-02-08 15:10:40 +02:00
}
}
result . Properties = prop
sarif . Runs [ 0 ] . Results = append ( sarif . Runs [ 0 ] . Results , result )
}
//handle the tool object
2022-04-26 12:34:54 +02:00
log . Entry ( ) . Debug ( "[SARIF] Now handling driver object." )
2022-02-23 10:30:19 +02:00
tool := * new ( format . Tool )
tool . Driver = * new ( format . Driver )
2022-02-08 15:10:40 +02:00
tool . Driver . Name = "MicroFocus Fortify SCA"
tool . Driver . Version = fvdl . EngineData . EngineVersion
tool . Driver . InformationUri = "https://www.microfocus.com/documentation/fortify-static-code-analyzer-and-tools/2020/SCA_Guide_20.2.0.pdf"
//handles rules
for i := 0 ; i < len ( fvdl . EngineData . RuleInfo ) ; i ++ { //i iterates on rules
2022-02-23 10:30:19 +02:00
sarifRule := * new ( format . SarifRule )
sarifRule . ID = fvdl . EngineData . RuleInfo [ i ] . RuleID
sarifRule . GUID = fvdl . EngineData . RuleInfo [ i ] . RuleID
2022-02-08 15:10:40 +02:00
for j := 0 ; j < len ( fvdl . Vulnerabilities . Vulnerability ) ; j ++ { //j iterates on vulns to find the name
if fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . ClassID == fvdl . EngineData . RuleInfo [ i ] . RuleID {
var nameArray [ ] string
2022-05-11 17:05:51 +02:00
var idArray [ ] string
2022-02-08 15:10:40 +02:00
if fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Kingdom != "" {
2022-05-11 17:05:51 +02:00
idArray = append ( idArray , fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Kingdom )
words := strings . Split ( fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Kingdom , " " )
for index , element := range words { // These are required to ensure that titlecase is respected in titles, part of sarif "friendly name" rules
2022-05-20 18:50:03 +02:00
words [ index ] = piperutils . Title ( strings . ToLower ( element ) )
2022-05-11 17:05:51 +02:00
}
nameArray = append ( nameArray , words ... )
2022-02-08 15:10:40 +02:00
}
if fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Type != "" {
2022-05-11 17:05:51 +02:00
idArray = append ( idArray , fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Type )
words := strings . Split ( fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Type , " " )
for index , element := range words {
2022-05-20 18:50:03 +02:00
words [ index ] = piperutils . Title ( strings . ToLower ( element ) )
2022-05-11 17:05:51 +02:00
}
nameArray = append ( nameArray , words ... )
2022-02-08 15:10:40 +02:00
}
if fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Subtype != "" {
2022-05-11 17:05:51 +02:00
idArray = append ( idArray , fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Subtype )
words := strings . Split ( fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Subtype , " " )
for index , element := range words {
2022-05-20 18:50:03 +02:00
words [ index ] = piperutils . Title ( strings . ToLower ( element ) )
2022-05-11 17:05:51 +02:00
}
nameArray = append ( nameArray , words ... )
2022-02-08 15:10:40 +02:00
}
2022-05-11 17:05:51 +02:00
sarifRule . ID = "fortify-" + strings . Join ( idArray , "/" )
sarifRule . Name = strings . Join ( nameArray , "" )
2022-03-17 14:09:15 +02:00
defaultConfig := new ( format . DefaultConfiguration )
2022-05-11 17:05:51 +02:00
defaultConfig . Level = "warning" // Default value
2022-05-24 13:40:49 +02:00
defaultConfig . Enabled = true // Default value
defaultConfig . Rank = - 1.0 // Default value
defaultConfig . Properties . DefaultSeverity = strconv . FormatFloat ( fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . DefaultSeverity , 'f' , 1 , 64 )
2022-03-17 14:09:15 +02:00
sarifRule . DefaultConfiguration = defaultConfig
2022-05-11 17:05:51 +02:00
//Descriptions
for j := 0 ; j < len ( fvdl . Description ) ; j ++ {
if fvdl . Description [ j ] . ClassID == sarifRule . GUID {
2022-05-24 13:40:49 +02:00
//rawAbstract := strings.Join(idArray, "/")
2022-05-11 17:05:51 +02:00
rawAbstract := unescapeXML ( fvdl . Description [ j ] . Abstract . Text )
rawExplanation := unescapeXML ( fvdl . Description [ j ] . Explanation . Text )
// Replacement defintions in abstract/explanation
for k := 0 ; k < len ( fvdl . Vulnerabilities . Vulnerability ) ; k ++ { // Iterate on vulns to find the correct one (where ReplacementDefinitions are)
if fvdl . Vulnerabilities . Vulnerability [ k ] . ClassInfo . ClassID == fvdl . Description [ j ] . ClassID {
for l := 0 ; l < len ( fvdl . Vulnerabilities . Vulnerability [ k ] . AnalysisInfo . ReplacementDefinitions . Def ) ; l ++ {
rawAbstract = strings . ReplaceAll ( rawAbstract , "<Replace key=\"" + fvdl . Vulnerabilities . Vulnerability [ k ] . AnalysisInfo . ReplacementDefinitions . Def [ l ] . DefKey + "\"/>" , fvdl . Vulnerabilities . Vulnerability [ k ] . AnalysisInfo . ReplacementDefinitions . Def [ l ] . DefValue )
rawExplanation = strings . ReplaceAll ( rawExplanation , "<Replace key=\"" + fvdl . Vulnerabilities . Vulnerability [ k ] . AnalysisInfo . ReplacementDefinitions . Def [ l ] . DefKey + "\"/>" , fvdl . Vulnerabilities . Vulnerability [ k ] . AnalysisInfo . ReplacementDefinitions . Def [ l ] . DefValue )
}
// Replacement locationdef in explanation
for l := 0 ; l < len ( fvdl . Vulnerabilities . Vulnerability [ k ] . AnalysisInfo . ReplacementDefinitions . LocationDef ) ; l ++ {
rawExplanation = strings . ReplaceAll ( rawExplanation , fvdl . Vulnerabilities . Vulnerability [ k ] . AnalysisInfo . ReplacementDefinitions . LocationDef [ l ] . Key , fvdl . Vulnerabilities . Vulnerability [ k ] . AnalysisInfo . ReplacementDefinitions . LocationDef [ l ] . Path )
}
// If Description has a CustomDescription, add it for good measure
if fvdl . Description [ j ] . CustomDescription . RuleID != "" {
2022-05-24 13:40:49 +02:00
rawExplanation = rawExplanation + " \n; " + unescapeXML ( fvdl . Description [ j ] . CustomDescription . Explanation . Text )
2022-05-11 17:05:51 +02:00
}
sd := new ( format . Message )
sd . Text = rawAbstract
sarifRule . ShortDescription = sd
fd := new ( format . Message )
fd . Text = rawExplanation
sarifRule . FullDescription = fd
break
}
2022-02-08 15:10:40 +02:00
}
break
}
}
2022-05-11 17:05:51 +02:00
//properties
//Prepare a CWE id object as an in-case
cweIds := [ ] string { }
//scan for the properties we want:
var propArray [ ] [ ] string
for j := 0 ; j < len ( fvdl . EngineData . RuleInfo [ i ] . MetaInfoGroup ) ; j ++ {
if ( fvdl . EngineData . RuleInfo [ i ] . MetaInfoGroup [ j ] . Name == "Accuracy" ) || ( fvdl . EngineData . RuleInfo [ i ] . MetaInfoGroup [ j ] . Name == "Impact" ) || ( fvdl . EngineData . RuleInfo [ i ] . MetaInfoGroup [ j ] . Name == "Probability" ) {
propArray = append ( propArray , [ ] string { fvdl . EngineData . RuleInfo [ i ] . MetaInfoGroup [ j ] . Name , fvdl . EngineData . RuleInfo [ i ] . MetaInfoGroup [ j ] . Data } )
} else if fvdl . EngineData . RuleInfo [ i ] . MetaInfoGroup [ j ] . Name == "altcategoryCWE" {
//Get all CWE IDs. First, split on ", "
rawCweIds := strings . Split ( fvdl . EngineData . RuleInfo [ i ] . MetaInfoGroup [ j ] . Data , ", " )
//If not "None", split each string on " " and add its 2nd index
if rawCweIds [ 0 ] != "None" {
for k := 0 ; k < len ( rawCweIds ) ; k ++ {
cweId := strings . Split ( rawCweIds [ k ] , " " ) [ 2 ]
//Fill the cweIdsForTaxonomies map if not already in
if _ , isIn := cweIdsForTaxonomies [ cweId ] ; ! isIn {
cweIdsForTaxonomies [ cweId ] = cweId
}
cweIds = append ( cweIds , cweId )
}
} else {
cweIds = append ( cweIds , rawCweIds [ 0 ] )
2022-03-14 12:26:05 +02:00
}
}
}
2022-05-11 17:05:51 +02:00
var ruleProp * format . SarifRuleProperties
2022-05-24 13:40:49 +02:00
ruleProp = new ( format . SarifRuleProperties )
2022-05-11 17:05:51 +02:00
if len ( propArray ) != 0 {
for j := 0 ; j < len ( propArray ) ; j ++ {
if propArray [ j ] [ 0 ] == "Accuracy" {
ruleProp . Accuracy = propArray [ j ] [ 1 ]
} else if propArray [ j ] [ 0 ] == "Impact" {
ruleProp . Impact = propArray [ j ] [ 1 ]
} else if propArray [ j ] [ 0 ] == "Probability" {
ruleProp . Probability = propArray [ j ] [ 1 ]
}
}
}
2022-05-24 13:40:49 +02:00
// Add each part of the "name" in the tags
if fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Kingdom != "" {
ruleProp . Tags = append ( ruleProp . Tags , fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Kingdom )
}
if fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Type != "" {
ruleProp . Tags = append ( ruleProp . Tags , fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Type )
}
if fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Subtype != "" {
ruleProp . Tags = append ( ruleProp . Tags , fvdl . Vulnerabilities . Vulnerability [ j ] . ClassInfo . Subtype )
}
//Add the SecuritySeverity parameter for GHAS tagging
ruleProp . SecuritySeverity = strconv . FormatFloat ( 2 * fvdl . Vulnerabilities . Vulnerability [ j ] . InstanceInfo . InstanceSeverity , 'f' , 1 , 64 )
2022-05-11 17:05:51 +02:00
sarifRule . Properties = ruleProp
//relationships: will most likely require some expansion
//One relationship per CWE id
for j := 0 ; j < len ( cweIds ) ; j ++ {
2022-05-24 13:40:49 +02:00
if cweIds [ j ] == "None" {
continue
}
2022-05-11 17:05:51 +02:00
sarifRule . Properties . Tags = append ( sarifRule . Properties . Tags , "external/cwe/cwe-" + cweIds [ j ] )
rls := * new ( format . Relationships )
rls . Target . Id = cweIds [ j ]
rls . Target . ToolComponent . Name = "CWE"
rls . Target . ToolComponent . Guid = "25F72D7E-8A92-459D-AD67-64853F788765"
rls . Kinds = append ( rls . Kinds , "relevant" )
sarifRule . Relationships = append ( sarifRule . Relationships , rls )
2022-02-08 15:10:40 +02:00
}
2022-05-11 17:05:51 +02:00
// Add a helpURI as some processors require it
sarifRule . HelpURI = "https://vulncat.fortify.com/en/weakness"
//Finalize: append the rule
tool . Driver . Rules = append ( tool . Driver . Rules , sarifRule )
// A rule vuln has been found for this rule, no need to keep iterating
break
2022-02-08 15:10:40 +02:00
}
}
}
2022-03-14 12:26:05 +02:00
//supportedTaxonomies
sTax := * new ( format . SupportedTaxonomies ) //This object seems fixed, but it will have to be checked
sTax . Name = "CWE"
2022-05-11 17:05:51 +02:00
sTax . Index = 1
2022-03-14 12:26:05 +02:00
sTax . Guid = "25F72D7E-8A92-459D-AD67-64853F788765"
tool . Driver . SupportedTaxonomies = append ( tool . Driver . SupportedTaxonomies , sTax )
2022-05-24 13:40:49 +02:00
//Add additional rulepacks
for pack := 0 ; pack < len ( fvdl . EngineData . RulePacks ) ; pack ++ {
extension := * new ( format . Driver )
extension . Name = fvdl . EngineData . RulePacks [ pack ] . Name
extension . Version = fvdl . EngineData . RulePacks [ pack ] . Version
extension . GUID = fvdl . EngineData . RulePacks [ pack ] . RulePackID
tool . Extensions = append ( tool . Extensions , extension )
}
2022-02-08 15:10:40 +02:00
//Finalize: tool
sarif . Runs [ 0 ] . Tool = tool
2022-03-14 12:26:05 +02:00
//handle invocations object
2022-04-26 12:34:54 +02:00
log . Entry ( ) . Debug ( "[SARIF] Now handling invocation." )
2022-06-16 15:24:23 +02:00
invocation := * new ( format . Invocation )
2022-03-14 12:26:05 +02:00
for i := 0 ; i < len ( fvdl . EngineData . Properties ) ; i ++ { //i selects the properties type
if fvdl . EngineData . Properties [ i ] . PropertiesType == "Fortify" { // This is the correct type, now iterate on props
for j := 0 ; j < len ( fvdl . EngineData . Properties [ i ] . Property ) ; j ++ {
if fvdl . EngineData . Properties [ i ] . Property [ j ] . Name == "com.fortify.SCAExecutablePath" {
splitPath := strings . Split ( fvdl . EngineData . Properties [ i ] . Property [ j ] . Value , "/" )
invocation . CommandLine = splitPath [ len ( splitPath ) - 1 ]
break
}
}
break
}
}
invocation . CommandLine = strings . Join ( append ( [ ] string { invocation . CommandLine } , fvdl . EngineData . CLArguments ... ) , " " )
invocation . StartTimeUtc = strings . Join ( [ ] string { fvdl . Created . Date , fvdl . Created . Time } , "T" ) + ".000Z"
for i := 0 ; i < len ( fvdl . EngineData . Errors ) ; i ++ {
ten := * new ( format . ToolExecutionNotifications )
ten . Message . Text = fvdl . EngineData . Errors [ i ] . ErrorMessage
ten . Descriptor . Id = fvdl . EngineData . Errors [ i ] . ErrorCode
invocation . ToolExecutionNotifications = append ( invocation . ToolExecutionNotifications , ten )
}
invocation . ExecutionSuccessful = true //fvdl doesn't seem to plan for this setting
invocation . Machine = fvdl . EngineData . MachineInfo . Hostname
invocation . Account = fvdl . EngineData . MachineInfo . Username
2022-06-16 15:24:23 +02:00
invocProp := new ( format . InvocationProperties )
invocProp . Platform = fvdl . EngineData . MachineInfo . Platform
invocation . Properties = invocProp
2022-03-14 12:26:05 +02:00
sarif . Runs [ 0 ] . Invocations = append ( sarif . Runs [ 0 ] . Invocations , invocation )
//handle originalUriBaseIds
2022-03-17 14:09:15 +02:00
oubi := new ( format . OriginalUriBaseIds )
2022-05-11 17:05:51 +02:00
prefix := "file://"
if fvdl . Build . SourceBasePath [ 0 ] == '/' {
oubi . SrcRoot . Uri = prefix + fvdl . Build . SourceBasePath + "/"
} else {
oubi . SrcRoot . Uri = prefix + "/" + fvdl . Build . SourceBasePath + "/"
}
2022-03-17 14:09:15 +02:00
sarif . Runs [ 0 ] . OriginalUriBaseIds = oubi
2022-03-14 12:26:05 +02:00
//handle artifacts
2022-04-26 12:34:54 +02:00
log . Entry ( ) . Debug ( "[SARIF] Now handling artifacts." )
2022-03-14 12:26:05 +02:00
for i := 0 ; i < len ( fvdl . Build . SourceFiles ) ; i ++ { //i iterates on source files
artifact := * new ( format . Artifact )
artifact . Location . Uri = fvdl . Build . SourceFiles [ i ] . Name
artifact . Location . UriBaseId = "%SRCROOT%"
artifact . Length = fvdl . Build . SourceFiles [ i ] . FileSize
switch fvdl . Build . SourceFiles [ i ] . FileType {
case "java" :
artifact . MimeType = "text/x-java-source"
case "xml" :
artifact . MimeType = "text/xml"
default :
artifact . MimeType = "text"
}
artifact . Encoding = fvdl . Build . SourceFiles [ i ] . Encoding
sarif . Runs [ 0 ] . Artifacts = append ( sarif . Runs [ 0 ] . Artifacts , artifact )
}
//handle automationDetails
sarif . Runs [ 0 ] . AutomationDetails . Id = fvdl . Build . BuildID
//handle threadFlowLocations
2022-04-26 12:34:54 +02:00
log . Entry ( ) . Debug ( "[SARIF] Now handling threadFlowLocations." )
2022-03-14 12:26:05 +02:00
threadFlowLocationsObject := [ ] format . Locations { }
2022-05-11 17:05:51 +02:00
//to ensure an exact replacement in case a threadFlowLocation object refers to another, we prepare a map
threadFlowIndexMap := make ( map [ int ] ( [ ] int ) ) // This will store indexes, we will work with it only to reduce item copies to a minimum
2022-03-14 12:26:05 +02:00
for i := 0 ; i < len ( fvdl . UnifiedNodePool . Node ) ; i ++ {
2022-05-11 17:05:51 +02:00
threadFlowIndexMap [ i + 1 ] = append ( threadFlowIndexMap [ i + 1 ] , i + 1 )
2022-03-14 12:26:05 +02:00
loc := new ( format . Location )
//get artifact location
for j := 0 ; j < len ( fvdl . Build . SourceFiles ) ; j ++ { // j iterates on source files
if fvdl . Build . SourceFiles [ j ] . Name == fvdl . UnifiedNodePool . Node [ i ] . SourceLocation . Path {
2022-05-11 17:05:51 +02:00
loc . PhysicalLocation . ArtifactLocation . Index = j + 1
loc . PhysicalLocation . ArtifactLocation . URI = fvdl . UnifiedNodePool . Node [ i ] . SourceLocation . Path
loc . PhysicalLocation . ArtifactLocation . URIBaseId = "%SRCROOT%"
2022-03-14 12:26:05 +02:00
break
}
}
//get region & context region
loc . PhysicalLocation . Region . StartLine = fvdl . UnifiedNodePool . Node [ i ] . SourceLocation . Line
2022-05-11 17:05:51 +02:00
loc . PhysicalLocation . Region . EndLine = fvdl . UnifiedNodePool . Node [ i ] . SourceLocation . LineEnd
loc . PhysicalLocation . Region . StartColumn = fvdl . UnifiedNodePool . Node [ i ] . SourceLocation . ColStart
loc . PhysicalLocation . Region . EndColumn = fvdl . UnifiedNodePool . Node [ i ] . SourceLocation . ColEnd
2022-03-14 12:26:05 +02:00
targetSnippetId := fvdl . UnifiedNodePool . Node [ i ] . SourceLocation . Snippet
for j := 0 ; j < len ( fvdl . Snippets ) ; j ++ {
if fvdl . Snippets [ j ] . SnippetId == targetSnippetId {
2022-06-22 08:54:24 +02:00
loc . PhysicalLocation . ContextRegion = new ( format . ContextRegion )
2022-03-14 12:26:05 +02:00
loc . PhysicalLocation . ContextRegion . StartLine = fvdl . Snippets [ j ] . StartLine
loc . PhysicalLocation . ContextRegion . EndLine = fvdl . Snippets [ j ] . EndLine
2022-03-22 15:47:19 +02:00
snippetSarif := new ( format . SnippetSarif )
snippetSarif . Text = fvdl . Snippets [ j ] . Text
loc . PhysicalLocation . ContextRegion . Snippet = snippetSarif
2022-03-14 12:26:05 +02:00
break
}
}
loc . Message = new ( format . Message )
loc . Message . Text = fvdl . UnifiedNodePool . Node [ i ] . Action . ActionData
2022-05-11 17:05:51 +02:00
2022-03-14 12:26:05 +02:00
// Handle snippet
2022-05-11 17:05:51 +02:00
snippetTarget := handleSnippet ( fvdl . UnifiedNodePool . Node [ i ] . Action . Type , fvdl . UnifiedNodePool . Node [ i ] . Action . ActionData )
2022-06-22 08:54:24 +02:00
if loc . PhysicalLocation . ContextRegion != nil && loc . PhysicalLocation . ContextRegion . Snippet != nil {
2022-03-22 15:47:19 +02:00
physLocationSnippetLines := strings . Split ( loc . PhysicalLocation . ContextRegion . Snippet . Text , "\n" )
snippetText := ""
for j := 0 ; j < len ( physLocationSnippetLines ) ; j ++ {
if strings . Contains ( physLocationSnippetLines [ j ] , snippetTarget ) {
snippetText = physLocationSnippetLines [ j ]
break
}
2022-03-14 12:26:05 +02:00
}
2022-03-22 15:47:19 +02:00
snippetSarif := new ( format . SnippetSarif )
if snippetText != "" {
snippetSarif . Text = snippetText
} else {
snippetSarif . Text = loc . PhysicalLocation . ContextRegion . Snippet . Text
}
loc . PhysicalLocation . Region . Snippet = snippetSarif
2022-03-14 12:26:05 +02:00
}
2022-05-11 17:05:51 +02:00
log . Entry ( ) . Debug ( "Compute eventual sub-nodes" )
threadFlowIndexMap [ i + 1 ] = computeLocationPath ( fvdl , i + 1 ) // Recursively traverse array
locs := format . Locations { Location : loc }
threadFlowLocationsObject = append ( threadFlowLocationsObject , locs )
2022-03-14 12:26:05 +02:00
}
sarif . Runs [ 0 ] . ThreadFlowLocations = threadFlowLocationsObject
2022-05-11 17:05:51 +02:00
// Now, iterate on threadflows in each result, and replace eventual indexes...
for i := 0 ; i < len ( sarif . Runs [ 0 ] . Results ) ; i ++ {
for cf := 0 ; cf < len ( sarif . Runs [ 0 ] . Results [ i ] . CodeFlows ) ; cf ++ {
for tf := 0 ; tf < len ( sarif . Runs [ 0 ] . Results [ i ] . CodeFlows [ cf ] . ThreadFlows ) ; tf ++ {
log . Entry ( ) . Debug ( "Handling tf: " , tf , "from instance " , sarif . Runs [ 0 ] . Results [ i ] . PartialFingerprints . FortifyInstanceID )
newLocations := * new ( [ ] format . Locations )
for j := 0 ; j < len ( sarif . Runs [ 0 ] . Results [ i ] . CodeFlows [ cf ] . ThreadFlows [ tf ] . Locations ) ; j ++ {
if sarif . Runs [ 0 ] . Results [ i ] . CodeFlows [ cf ] . ThreadFlows [ tf ] . Locations [ j ] . Index != 0 {
indexes := threadFlowIndexMap [ sarif . Runs [ 0 ] . Results [ i ] . CodeFlows [ cf ] . ThreadFlows [ tf ] . Locations [ j ] . Index ]
log . Entry ( ) . Debug ( "Indexes found: " , indexes )
for rep := 0 ; rep < len ( indexes ) ; rep ++ {
newLocations = append ( newLocations , sarif . Runs [ 0 ] . ThreadFlowLocations [ indexes [ rep ] - 1 ] )
newLocations [ rep ] . Index = 0 // void index
}
} else {
newLocations = append ( newLocations , sarif . Runs [ 0 ] . Results [ i ] . CodeFlows [ cf ] . ThreadFlows [ tf ] . Locations [ j ] )
}
}
sarif . Runs [ 0 ] . Results [ i ] . CodeFlows [ cf ] . ThreadFlows [ tf ] . Locations = newLocations
}
}
}
// Threadflowlocations is no loger useful: voiding it will make for smaller reports
sarif . Runs [ 0 ] . ThreadFlowLocations = [ ] format . Locations { }
2022-06-16 15:24:23 +02:00
// Add a conversion object to highlight this isn't native SARIF
conversion := new ( format . Conversion )
conversion . Tool . Driver . Name = "Piper FPR to SARIF converter"
conversion . Tool . Driver . InformationUri = "https://github.com/SAP/jenkins-library"
conversion . Invocation . ExecutionSuccessful = true
conversion . Invocation . StartTimeUtc = fmt . Sprintf ( "%s" , start . Format ( "2006-01-02T15:04:05.000Z" ) ) // "YYYY-MM-DDThh:mm:ss.sZ" on 2006-01-02 15:04:05
conversion . Invocation . Machine = fvdl . EngineData . MachineInfo . Hostname
conversion . Invocation . Account = fvdl . EngineData . MachineInfo . Username
convInvocProp := new ( format . InvocationProperties )
convInvocProp . Platform = fvdl . EngineData . MachineInfo . Platform
conversion . Invocation . Properties = convInvocProp
sarif . Runs [ 0 ] . Conversion = conversion
2022-03-14 12:26:05 +02:00
//handle taxonomies
//Only one exists apparently: CWE. It is fixed
taxonomy := * new ( format . Taxonomies )
2022-04-04 16:12:35 +02:00
taxonomy . GUID = "25F72D7E-8A92-459D-AD67-64853F788765"
2022-03-14 12:26:05 +02:00
taxonomy . Name = "CWE"
taxonomy . Organization = "MITRE"
taxonomy . ShortDescription . Text = "The MITRE Common Weakness Enumeration"
2022-03-17 14:09:15 +02:00
for key := range cweIdsForTaxonomies {
2022-03-14 12:26:05 +02:00
taxa := * new ( format . Taxa )
taxa . Id = key
taxonomy . Taxa = append ( taxonomy . Taxa , taxa )
}
sarif . Runs [ 0 ] . Taxonomies = append ( sarif . Runs [ 0 ] . Taxonomies , taxonomy )
2022-02-08 15:10:40 +02:00
return sarif , nil
}
2022-06-09 10:32:08 +02:00
func integrateAuditData ( ruleProp * format . SarifProperties , issueInstanceID string , sys System , project * models . Project , projectVersion * models . ProjectVersion , auditData [ ] * models . ProjectVersionIssue , filterSet * models . FilterSet , oneRequestPerIssue bool , maxretries int ) error {
2022-05-24 13:40:49 +02:00
// Set default values
ruleProp . Audited = false
ruleProp . FortifyCategory = "Unknown"
ruleProp . ToolSeverity = "Unknown"
2022-06-09 10:32:08 +02:00
ruleProp . ToolState = "Unknown"
ruleProp . ToolAuditMessage = "Error fetching audit state" // We set this as default for the error phase, then reset it to nothing
2022-05-24 13:40:49 +02:00
ruleProp . ToolSeverityIndex = 0
ruleProp . ToolStateIndex = 0
// These default values allow for the property bag to be filled even if an error happens later. They all should be overwritten by a normal course of the progrma.
2022-06-09 10:32:08 +02:00
if maxretries == 0 {
// Max retries reached, we stop there to avoid a longer execution time
err := errors . New ( "request failed: maximum number of retries reached, placeholder values will be set from now on for audit data" )
return err
} else if maxretries < 0 {
return nil // Avoid spamming logfile
}
2022-03-14 12:26:05 +02:00
if sys == nil {
2022-06-09 10:32:08 +02:00
ruleProp . ToolAuditMessage = "Cannot fetch audit state: no sys instance"
2022-03-14 12:26:05 +02:00
err := errors . New ( "no system instance, lookup impossible for " + issueInstanceID )
return err
}
if project == nil || projectVersion == nil {
err := errors . New ( "project or projectVersion is undefined: lookup aborted for " + issueInstanceID )
return err
}
2022-06-09 10:32:08 +02:00
// Reset the audit message
ruleProp . ToolAuditMessage = ""
2022-04-07 13:11:52 +02:00
var data [ ] * models . ProjectVersionIssue
var err error
if oneRequestPerIssue {
log . Entry ( ) . Debug ( "operating in one-request-per-issue mode: looking up audit state of " + issueInstanceID )
data , err = sys . GetIssueDetails ( projectVersion . ID , issueInstanceID )
if err != nil {
return err
}
} else {
for i := 0 ; i < len ( auditData ) ; i ++ {
if issueInstanceID == * auditData [ i ] . IssueInstanceID {
data = append ( data , auditData [ i ] )
break
}
}
2022-02-08 15:10:40 +02:00
}
if len ( data ) != 1 { //issueInstanceID is supposedly unique so len(data) = 1
2022-05-11 17:05:51 +02:00
return errors . New ( "not exactly 1 issue found, found " + fmt . Sprint ( len ( data ) ) )
2022-02-08 15:10:40 +02:00
}
2022-05-24 13:40:49 +02:00
if filterSet != nil {
for i := 0 ; i < len ( filterSet . Folders ) ; i ++ {
if filterSet . Folders [ i ] . GUID == * data [ 0 ] . FolderGUID {
ruleProp . FortifyCategory = filterSet . Folders [ i ] . Name
break
}
}
} else {
err := errors . New ( "no filter set defined, category will be missing from " + issueInstanceID )
return err
}
2022-02-23 10:30:19 +02:00
ruleProp . Audited = data [ 0 ] . Audited
ruleProp . ToolSeverity = * data [ 0 ] . Friority
switch ruleProp . ToolSeverity {
2022-02-08 15:10:40 +02:00
case "Critical" :
2022-02-23 10:30:19 +02:00
ruleProp . ToolSeverityIndex = 5
2022-02-08 15:10:40 +02:00
case "Urgent" :
2022-02-23 10:30:19 +02:00
ruleProp . ToolSeverityIndex = 4
2022-02-08 15:10:40 +02:00
case "High" :
2022-02-23 10:30:19 +02:00
ruleProp . ToolSeverityIndex = 3
2022-02-08 15:10:40 +02:00
case "Medium" :
2022-02-23 10:30:19 +02:00
ruleProp . ToolSeverityIndex = 2
2022-02-08 15:10:40 +02:00
case "Low" :
2022-02-23 10:30:19 +02:00
ruleProp . ToolSeverityIndex = 1
2022-02-08 15:10:40 +02:00
}
2022-02-23 10:30:19 +02:00
if ruleProp . Audited {
ruleProp . ToolState = * data [ 0 ] . PrimaryTag
switch ruleProp . ToolState { //This is as easy as it can get, seeing that the index is not in the response.
2022-02-08 15:10:40 +02:00
case "Exploitable" :
2022-02-23 10:30:19 +02:00
ruleProp . ToolStateIndex = 5
2022-02-08 15:10:40 +02:00
case "Suspicious" :
2022-02-23 10:30:19 +02:00
ruleProp . ToolStateIndex = 4
2022-02-08 15:10:40 +02:00
case "Bad Practice" :
2022-02-23 10:30:19 +02:00
ruleProp . ToolStateIndex = 3
2022-02-08 15:10:40 +02:00
case "Reliability Issue" :
2022-02-23 10:30:19 +02:00
ruleProp . ToolStateIndex = 2
2022-02-08 15:10:40 +02:00
case "Not an Issue" :
2022-02-23 10:30:19 +02:00
ruleProp . ToolStateIndex = 1
2022-02-08 15:10:40 +02:00
}
} else {
2022-02-23 10:30:19 +02:00
ruleProp . ToolState = "Unreviewed"
2022-02-08 15:10:40 +02:00
}
if * data [ 0 ] . HasComments { //fetch latest message if comments exist
//Fetch the ID
parentID := data [ 0 ] . ID
commentData , err := sys . GetIssueComments ( parentID )
if err != nil {
return err
}
2022-05-11 17:05:51 +02:00
ruleProp . ToolAuditMessage = unescapeXML ( * commentData [ 0 ] . Comment )
2022-02-08 15:10:40 +02:00
}
return nil
}
2022-05-11 17:05:51 +02:00
// Factorizes some code used to obtain the relevant value for a snippet based on the type given by Fortify
func handleSnippet ( snippetType string , snippet string ) string {
snippetTarget := ""
switch snippetType {
case "Assign" :
snippetWords := strings . Split ( snippet , " " )
if snippetWords [ 0 ] == "Assignment" {
snippetTarget = snippetWords [ 2 ]
} else {
snippetTarget = snippet
}
case "InCall" :
snippetTarget = strings . Split ( snippet , "(" ) [ 0 ]
case "OutCall" :
snippetTarget = strings . Split ( snippet , "(" ) [ 0 ]
case "InOutCall" :
snippetTarget = strings . Split ( snippet , "(" ) [ 0 ]
case "Return" :
snippetTarget = snippet
case "Read" :
snippetWords := strings . Split ( snippet , " " )
if len ( snippetWords ) > 1 {
snippetTarget = " " + snippetWords [ 1 ]
} else {
snippetTarget = snippetWords [ 0 ]
}
default :
snippetTarget = snippet
}
return snippetTarget
}
func unescapeXML ( input string ) string {
raw := input
// Post-treat string to change the XML escaping generated by Unmarshal
2022-05-24 13:40:49 +02:00
raw = strings . ReplaceAll ( raw , "&" , "&" )
2022-05-11 17:05:51 +02:00
raw = strings . ReplaceAll ( raw , "<" , "<" )
raw = strings . ReplaceAll ( raw , ">" , ">" )
raw = strings . ReplaceAll ( raw , "'" , "'" )
raw = strings . ReplaceAll ( raw , """ , "\"" )
return raw
}
// Used to build a reference array of index for the successors of each node in the UnifiedNodePool
func computeLocationPath ( fvdl FVDL , input int ) [ ] int {
log . Entry ( ) . Debug ( "Computing for ID " , input )
// Find the successors of input
var subnodes [ ] int
var result [ ] int
for j := 0 ; j < len ( fvdl . UnifiedNodePool . Node [ input - 1 ] . Reason . Trace . Primary . Entry ) ; j ++ {
if fvdl . UnifiedNodePool . Node [ input - 1 ] . Reason . Trace . Primary . Entry [ j ] . NodeRef . RefId != 0 && fvdl . UnifiedNodePool . Node [ input - 1 ] . Reason . Trace . Primary . Entry [ j ] . NodeRef . RefId != ( input - 1 ) {
subnodes = append ( subnodes , fvdl . UnifiedNodePool . Node [ input - 1 ] . Reason . Trace . Primary . Entry [ j ] . NodeRef . RefId + 1 )
}
}
result = append ( result , input )
log . Entry ( ) . Debug ( "Successors: " , subnodes )
for j := 0 ; j < len ( subnodes ) ; j ++ {
result = append ( result , computeLocationPath ( fvdl , subnodes [ j ] ) ... )
}
log . Entry ( ) . Debug ( "Finishing computing for ID " , input )
return result
}