1
0
mirror of https://github.com/SAP/jenkins-library.git synced 2025-02-19 19:44:27 +02:00

feat(gcs): allow upload to gcs from steps (#3034)

* Upload reports to Google Cloud Storage bucket

* Added tests. Made fixes

* Update step generation. GCS client was moved to GeneralConfig

* Code was refactored

* Fixed issues

* Fixed issues

* Code correction due to PR comments

* Improved gcs client and integration tests

* Integrated gcp config. Updated step metadata

* Fixed issues. Added tests

* Added cpe, vault, aliases resolving for reporting parameters

* Added tests

* Uncommented DeferExitHandler. Removed useless comments

* fixed cloning of config

* Added comments for exported functions. Removed unused mock

* minor fix

* Implemented setting of report name via paramRef

* some refactoring. Writing tests

* Update pkg/config/reporting.go

* Update cmd/sonarExecuteScan_generated.go

* Apply suggestions from code review

* Update pkg/config/reporting.go

* Update pkg/config/reporting.go

* fixed removing valut secret files

* Update pkg/config/reporting.go

* restore order

* restore order

* Apply suggestions from code review

* go generate

* fixed tests

* Update resources/metadata/sonarExecuteScan.yaml

* Update resources.go

* Fixed tests. Code was regenerated

* changed somewhere gcp to gcs. Fixed one test

* move gcsSubFolder to input parameters

* fixed removing valut secret files

* minor fix in integration tests

* fix integration tests

Co-authored-by: Oliver Nocon <33484802+OliverNocon@users.noreply.github.com>
Co-authored-by: Christopher Fenner <26137398+CCFenner@users.noreply.github.com>
Co-authored-by: Sven Merk <33895725+nevskrem@users.noreply.github.com>
This commit is contained in:
Siarhei Pazdniakou 2021-12-15 17:07:47 +03:00 committed by GitHub
parent b7e1d28675
commit cd243ee542
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
65 changed files with 1645 additions and 79 deletions

View File

@ -117,8 +117,8 @@ For Terminology refer to the [Scenario Description](https://www.project-piper.io
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -117,8 +117,8 @@ For Terminology refer to the [Scenario Description](https://www.project-piper.io
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -117,8 +117,8 @@ For Terminology refer to the [Scenario Description](https://www.project-piper.io
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -118,8 +118,8 @@ For Terminology refer to the [Scenario Description](https://www.project-piper.io
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -116,8 +116,8 @@ For Terminology refer to the [Scenario Description](https://www.project-piper.io
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -122,8 +122,8 @@ For Terminology refer to the [Scenario Description](https://www.project-piper.io
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -119,8 +119,8 @@ func AbapEnvironmentAssembleConfirmCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -121,8 +121,8 @@ Platform ABAP Environment system and saves the corresponding [SAR archive](https
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -130,8 +130,8 @@ func AbapEnvironmentBuildCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -204,8 +204,8 @@ Define ` + "`" + `buildTool: custom` + "`" + `, ` + "`" + `filePath: <path to yo
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -117,8 +117,8 @@ func BatsExecuteTestsCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
influx.persist(GeneralConfig.EnvRootPath, "influx")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -163,7 +163,6 @@ func initializeConfig(pConfig *config.Config) (*config.Config, error) {
}
}
var flags map[string]interface{}
stepAliase := []config.Alias{}
filter := config.StepFilters{
All: []string{},
General: []string{},
@ -172,8 +171,7 @@ func initializeConfig(pConfig *config.Config) (*config.Config, error) {
Env: []string{},
}
_, err = pConfig.GetStepConfig(flags, "", customConfig, defaultConfig, GeneralConfig.IgnoreCustomDefaults, filter, nil, nil, nil, "", "",
stepAliase)
_, err = pConfig.GetStepConfig(flags, "", customConfig, defaultConfig, GeneralConfig.IgnoreCustomDefaults, filter, config.StepData{}, nil, "", "")
if err != nil {
return nil, errors.Wrap(err, "getting step config failed")
}

View File

@ -242,8 +242,8 @@ thresholds instead of ` + "`" + `percentage` + "`" + ` whereas we strongly recom
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
influx.persist(GeneralConfig.EnvRootPath, "influx")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -162,8 +162,8 @@ func CloudFoundryDeployCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
influx.persist(GeneralConfig.EnvRootPath, "influx")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -122,8 +122,8 @@ func CnbBuildCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -155,8 +155,8 @@ Please configure your BlackDuck server Url using the serverUrl parameter and the
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
influx.persist(GeneralConfig.EnvRootPath, "influx")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -213,8 +213,8 @@ Besides triggering a scan the step verifies the results after they have been upl
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
influx.persist(GeneralConfig.EnvRootPath, "influx")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -126,8 +126,8 @@ You can use the [sample projects](https://github.com/getgauge/gauge-mvn-archetyp
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
influx.persist(GeneralConfig.EnvRootPath, "influx")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -143,7 +143,9 @@ func getConfig() (config.StepConfig, error) {
prepareOutputEnvironment(metadata.Spec.Outputs.Resources, GeneralConfig.EnvRootPath)
resourceParams := metadata.GetResourceParameters(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
envParams := metadata.GetResourceParameters(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
reportingEnvParams := config.ReportingParameters.GetResourceParameters(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
resourceParams := mergeResourceParameters(envParams, reportingEnvParams)
projectConfigFile := getProjectConfigFile(GeneralConfig.CustomConfig)
@ -173,12 +175,11 @@ func getConfig() (config.StepConfig, error) {
var flags map[string]interface{}
params := []config.StepParameters{}
if !configOptions.contextConfig {
params = metadata.Spec.Inputs.Parameters
if configOptions.contextConfig {
metadata.Spec.Inputs.Parameters = []config.StepParameters{}
}
stepConfig, err = myConfig.GetStepConfig(flags, GeneralConfig.ParametersJSON, customConfig, defaultConfig, GeneralConfig.IgnoreCustomDefaults, paramFilter, params, metadata.Spec.Inputs.Secrets, resourceParams, GeneralConfig.StageName, metadata.Metadata.Name, metadata.Metadata.Aliases)
stepConfig, err = myConfig.GetStepConfig(flags, GeneralConfig.ParametersJSON, customConfig, defaultConfig, GeneralConfig.IgnoreCustomDefaults, paramFilter, metadata, resourceParams, GeneralConfig.StageName, metadata.Metadata.Name)
if err != nil {
return stepConfig, errors.Wrap(err, "getting step config failed")
}

View File

@ -112,8 +112,8 @@ func IntegrationArtifactGetMplStatusCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -110,8 +110,8 @@ func IntegrationArtifactGetServiceEndpointCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -115,8 +115,8 @@ func IsChangeInDevelopmentCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit
@ -256,7 +256,7 @@ func isChangeInDevelopmentMetadata() config.StepData {
Name: "commonPipelineEnvironment",
Type: "piperEnvironment",
Parameters: []map[string]interface{}{
{"name": "custom/isChangeInDevelopment"},
{"name": "custom/isChangeInDevelopment", "type": "bool"},
},
},
},

View File

@ -124,8 +124,8 @@ func KanikoExecuteCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -127,8 +127,8 @@ supports ci friendly versioning by flattening the pom before installing.`,
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -134,8 +134,8 @@ func MtaBuildCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -120,8 +120,8 @@ func NewmanExecuteCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
influx.persist(GeneralConfig.EnvRootPath, "influx")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -122,8 +122,8 @@ func NpmExecuteScriptsCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -43,6 +43,10 @@ type GeneralConfigOptions struct {
VaultPath string
HookConfig HookConfiguration
MetaDataResolver func() map[string]config.StepData
GCPJsonKeyFilePath string
GCSFolderPath string
GCSBucketId string
GCSSubFolder string
}
// HookConfiguration contains the configuration for supported hooks, so far Sentry and Splunk are supported.
@ -205,6 +209,10 @@ func addRootFlags(rootCmd *cobra.Command) {
rootCmd.PersistentFlags().StringVar(&GeneralConfig.VaultServerURL, "vaultServerUrl", "", "The vault server which should be used to fetch credentials")
rootCmd.PersistentFlags().StringVar(&GeneralConfig.VaultNamespace, "vaultNamespace", "", "The vault namespace which should be used to fetch credentials")
rootCmd.PersistentFlags().StringVar(&GeneralConfig.VaultPath, "vaultPath", "", "The path which should be used to fetch credentials")
rootCmd.PersistentFlags().StringVar(&GeneralConfig.GCPJsonKeyFilePath, "gcpJsonKeyFilePath", "", "File path to Google Cloud Platform JSON key file")
rootCmd.PersistentFlags().StringVar(&GeneralConfig.GCSFolderPath, "gcsFolderPath", "", "GCS folder path. One of the components of GCS target folder")
rootCmd.PersistentFlags().StringVar(&GeneralConfig.GCSBucketId, "gcsBucketId", "", "Bucket name for Google Cloud Storage")
rootCmd.PersistentFlags().StringVar(&GeneralConfig.GCSSubFolder, "gcsSubFolder", "", "Used to logically separate results of the same step result type")
}
@ -301,7 +309,10 @@ func PrepareConfig(cmd *cobra.Command, metadata *config.StepData, stepName strin
filters.General = append(filters.General, "collectTelemetryData")
filters.Parameters = append(filters.Parameters, "collectTelemetryData")
resourceParams := metadata.GetResourceParameters(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
envParams := metadata.GetResourceParameters(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
reportingEnvParams := config.ReportingParameters.GetResourceParameters(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
resourceParams := mergeResourceParameters(envParams, reportingEnvParams)
flagValues := config.AvailableFlagValues(cmd, &filters)
var myConfig config.Config
@ -358,7 +369,7 @@ func PrepareConfig(cmd *cobra.Command, metadata *config.StepData, stepName strin
defaultConfig = append(defaultConfig, fc)
}
}
stepConfig, err = myConfig.GetStepConfig(flagValues, GeneralConfig.ParametersJSON, customConfig, defaultConfig, GeneralConfig.IgnoreCustomDefaults, filters, metadata.Spec.Inputs.Parameters, metadata.Spec.Inputs.Secrets, resourceParams, GeneralConfig.StageName, stepName, metadata.Metadata.Aliases)
stepConfig, err = myConfig.GetStepConfig(flagValues, GeneralConfig.ParametersJSON, customConfig, defaultConfig, GeneralConfig.IgnoreCustomDefaults, filters, *metadata, resourceParams, GeneralConfig.StageName, stepName)
if verbose, ok := stepConfig.Config["verbose"].(bool); ok && verbose {
log.SetVerbose(verbose)
GeneralConfig.Verbose = verbose
@ -382,6 +393,18 @@ func PrepareConfig(cmd *cobra.Command, metadata *config.StepData, stepName strin
retrieveHookConfig(stepConfig.HookConfig, &GeneralConfig.HookConfig)
if GeneralConfig.GCPJsonKeyFilePath == "" {
GeneralConfig.GCPJsonKeyFilePath, _ = stepConfig.Config["gcpJsonKeyFilePath"].(string)
}
if GeneralConfig.GCSFolderPath == "" {
GeneralConfig.GCSFolderPath, _ = stepConfig.Config["gcsFolderPath"].(string)
}
if GeneralConfig.GCSBucketId == "" {
GeneralConfig.GCSBucketId, _ = stepConfig.Config["gcsBucketId"].(string)
}
if GeneralConfig.GCSSubFolder == "" {
GeneralConfig.GCSSubFolder, _ = stepConfig.Config["gcsSubFolder"].(string)
}
return nil
}
@ -559,3 +582,13 @@ func getProjectConfigFile(name string) string {
}
return name
}
func mergeResourceParameters(resParams ...map[string]interface{}) map[string]interface{} {
result := make(map[string]interface{})
for _, m := range resParams {
for k, v := range m {
result[k] = v
}
}
return result
}

View File

@ -155,8 +155,8 @@ func ProtecodeExecuteScanCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
influx.persist(GeneralConfig.EnvRootPath, "influx")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -9,12 +9,16 @@ import (
"time"
"github.com/SAP/jenkins-library/pkg/config"
"github.com/SAP/jenkins-library/pkg/gcs"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/SAP/jenkins-library/pkg/piperenv"
"github.com/SAP/jenkins-library/pkg/splunk"
"github.com/SAP/jenkins-library/pkg/telemetry"
"github.com/SAP/jenkins-library/pkg/validation"
"github.com/bmatcuk/doublestar"
"github.com/spf13/cobra"
"reflect"
"strings"
)
type sonarExecuteScanOptions struct {
@ -47,6 +51,37 @@ type sonarExecuteScanOptions struct {
M2Path string `json:"m2Path,omitempty"`
}
type sonarExecuteScanReports struct {
}
func (p *sonarExecuteScanReports) persist(stepConfig sonarExecuteScanOptions) {
content := []gcs.ReportOutputParam{
{FilePattern: "sonarscan.json", ParamRef: "", StepResultType: "sonarqube"},
{FilePattern: "sonarExecuteScan_*.json", ParamRef: "", StepResultType: "sonarqube"},
}
envVars := []gcs.EnvVar{
{Name: "GOOGLE_APPLICATION_CREDENTIALS", Value: GeneralConfig.GCPJsonKeyFilePath, Modified: false},
}
gcsClient, err := gcs.NewClient(gcs.WithEnvVars(envVars))
if err != nil {
log.Entry().Errorf("creation of GCS client failed: %v", err)
}
defer gcsClient.Close()
structVal := reflect.ValueOf(&stepConfig).Elem()
inputParameters := map[string]string{}
for i := 0; i < structVal.NumField(); i++ {
field := structVal.Type().Field(i)
if field.Type.String() == "string" {
paramName := strings.Split(field.Tag.Get("json"), ",")
paramValue, _ := structVal.Field(i).Interface().(string)
inputParameters[paramName[0]] = paramValue
}
}
if err := gcs.PersistReportsToGCS(gcsClient, content, inputParameters, GeneralConfig.GCSFolderPath, GeneralConfig.GCSBucketId, GeneralConfig.GCSSubFolder, doublestar.Glob, os.Stat); err != nil {
log.Entry().Errorf("failed to persist reports: %v", err)
}
}
type sonarExecuteScanInflux struct {
step_data struct {
fields struct {
@ -103,6 +138,7 @@ func SonarExecuteScanCommand() *cobra.Command {
metadata := sonarExecuteScanMetadata()
var stepConfig sonarExecuteScanOptions
var startTime time.Time
var reports sonarExecuteScanReports
var influx sonarExecuteScanInflux
var logCollector *log.CollectorHook
var splunkClient *splunk.Splunk
@ -157,8 +193,9 @@ func SonarExecuteScanCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
reports.persist(stepConfig)
influx.persist(GeneralConfig.EnvRootPath, "influx")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit
@ -521,6 +558,14 @@ func sonarExecuteScanMetadata() config.StepData {
},
Outputs: config.StepOutputs{
Resources: []config.StepResources{
{
Name: "reports",
Type: "reports",
Parameters: []map[string]interface{}{
{"filePattern": "sonarscan.json", "type": "sonarqube"},
{"filePattern": "sonarExecuteScan_*.json", "type": "sonarqube"},
},
},
{
Name: "influx",
Type: "influx",

View File

@ -115,8 +115,8 @@ func TerraformExecuteCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit
@ -262,7 +262,7 @@ func terraformExecuteMetadata() config.StepData {
Name: "commonPipelineEnvironment",
Type: "piperEnvironment",
Parameters: []map[string]interface{}{
{"name": "custom/terraformOutputs"},
{"name": "custom/terraformOutputs", "type": "map[string]interface{}"},
},
},
},

View File

@ -111,8 +111,8 @@ It is primarily made for the transportRequestUploadSOLMAN step to provide the ch
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -111,8 +111,8 @@ It is primarily made for the transport request upload steps to provide the trans
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -122,8 +122,8 @@ It processes the results of the ` + "`" + `ui5 build` + "`" + ` command of the S
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -122,8 +122,8 @@ func TransportRequestUploadRFCCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -121,8 +121,8 @@ The application ID specifies how the file needs to be handled on server side.`,
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

View File

@ -213,9 +213,9 @@ The step uses the so-called WhiteSource Unified Agent. For details please refer
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
influx.persist(GeneralConfig.EnvRootPath, "influx")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit
@ -793,7 +793,7 @@ func whitesourceExecuteScanMetadata() config.StepData {
Name: "commonPipelineEnvironment",
Type: "piperEnvironment",
Parameters: []map[string]interface{}{
{"name": "custom/whitesourceProjectNames"},
{"name": "custom/whitesourceProjectNames", "type": "[]string"},
},
},
{

View File

@ -120,8 +120,8 @@ func XsDeployCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit

2
go.mod
View File

@ -3,6 +3,7 @@ module github.com/SAP/jenkins-library
go 1.15
require (
cloud.google.com/go/storage v1.10.0
github.com/GoogleContainerTools/container-diff v0.17.0
github.com/Jeffail/gabs/v2 v2.6.1
github.com/Masterminds/sprig v2.22.0+incompatible
@ -48,6 +49,7 @@ require (
github.com/xuri/excelize/v2 v2.4.1
golang.org/x/mod v0.5.1
golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1
google.golang.org/api v0.47.0
gopkg.in/ini.v1 v1.63.2
gopkg.in/yaml.v2 v2.4.0
)

4
go.sum
View File

@ -41,6 +41,7 @@ cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiy
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
cloud.google.com/go/storage v1.10.0 h1:STgFzyU5/8miMl0//zKh2aQeTyeaUH3WN9bSUiJ09bA=
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
code.cloudfoundry.org/bytefmt v0.0.0-20180906201452-2aa6f33b730c/go.mod h1:wN/zk7mhREp/oviagqUXY3EwuHhWyOvAdsn5Y4CzOrc=
code.cloudfoundry.org/gofileutils v0.0.0-20170111115228-4d0c80011a0f h1:UrKzEwTgeiff9vxdrfdqxibzpWjxLnuXDI5m6z3GJAk=
@ -888,9 +889,11 @@ github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSN
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/gofuzz v1.1.0 h1:Hsa8mG0dQ46ij8Sl2AYJDUv1oA9/d6Vk+3LG99Oe02g=
github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
github.com/google/martian/v3 v3.2.1 h1:d8MncMlErDFTwQGBK1xhv026j9kqhvw1Qv9IbWT1VLQ=
github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
@ -1251,6 +1254,7 @@ github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/
github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ=
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=

View File

@ -0,0 +1,180 @@
// +build integration
// can be execute with go test -tags=integration ./integration/...
package main
import (
"context"
"crypto/tls"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"path/filepath"
"strings"
"testing"
"github.com/SAP/jenkins-library/pkg/gcs"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/testcontainers/testcontainers-go"
"github.com/testcontainers/testcontainers-go/wait"
"google.golang.org/api/option"
)
func Test_gcsClient(t *testing.T) {
t.Parallel()
ctx := context.Background()
testdataPath, err := filepath.Abs("testdata/TestGCSIntegration")
assert.NoError(t, err)
req := testcontainers.GenericContainerRequest{
ContainerRequest: testcontainers.ContainerRequest{
AlwaysPullImage: true,
Image: "fsouza/fake-gcs-server:1.30.2",
ExposedPorts: []string{"4443/tcp"},
WaitingFor: wait.ForListeningPort("4443/tcp"),
Cmd: []string{"-scheme", "https", "-public-host", "localhost"},
BindMounts: map[string]string{
testdataPath: "/data",
},
},
Started: true,
}
gcsContainer, err := testcontainers.GenericContainer(ctx, req)
require.NoError(t, err)
defer gcsContainer.Terminate(ctx)
ip, err := gcsContainer.Host(ctx)
require.NoError(t, err)
port, err := gcsContainer.MappedPort(ctx, "4443")
endpoint := fmt.Sprintf("https://%s:%s/storage/v1/", ip, port.Port())
httpclient := http.Client{
Transport: &http.Transport{
TLSClientConfig: &tls.Config{
InsecureSkipVerify: true,
},
},
}
t.Run("Test list files - success", func(t *testing.T) {
bucketID := "sample-bucket"
gcsClient, err := gcs.NewClient(gcs.WithClientOptions(option.WithEndpoint(endpoint), option.WithoutAuthentication(), option.WithHTTPClient(&httpclient)))
assert.NoError(t, err)
fileNames, err := gcsClient.ListFiles(bucketID)
assert.NoError(t, err)
assert.Equal(t, []string{"dir/test_file2.yaml", "test_file.txt"}, fileNames)
err = gcsClient.Close()
assert.NoError(t, err)
})
t.Run("Test list files in missing bucket", func(t *testing.T) {
bucketID := "missing-bucket"
gcsClient, err := gcs.NewClient(gcs.WithClientOptions(option.WithEndpoint(endpoint), option.WithoutAuthentication(), option.WithHTTPClient(&httpclient)))
defer gcsClient.Close()
assert.NoError(t, err)
_, err = gcsClient.ListFiles(bucketID)
assert.Error(t, err, "bucket doesn't exist")
err = gcsClient.Close()
assert.NoError(t, err)
})
t.Run("Test upload & download files - success", func(t *testing.T) {
bucketID := "upload-bucket"
file1Reader, file1Writer := io.Pipe()
file2Reader, file2Writer := io.Pipe()
gcsClient, err := gcs.NewClient(gcs.WithOpenFileFunction(openFileMock), gcs.WithCreateFileFunction(getCreateFileMock(file1Writer, file2Writer)),
gcs.WithClientOptions(option.WithEndpoint(endpoint), option.WithoutAuthentication(), option.WithHTTPClient(&httpclient)))
assert.NoError(t, err)
err = gcsClient.UploadFile(bucketID, "file1", "test/file1")
assert.NoError(t, err)
err = gcsClient.UploadFile(bucketID, "folder/file2", "test/folder/file2")
assert.NoError(t, err)
fileNames, err := gcsClient.ListFiles(bucketID)
assert.NoError(t, err)
assert.Equal(t, []string{"placeholder", "test/file1", "test/folder/file2"}, fileNames)
go gcsClient.DownloadFile(bucketID, "test/file1", "file1")
fileContent, err := ioutil.ReadAll(file1Reader)
assert.NoError(t, err)
assert.Equal(t, file1Content, string(fileContent))
go gcsClient.DownloadFile(bucketID, "test/folder/file2", "file2")
fileContent, err = ioutil.ReadAll(file2Reader)
assert.NoError(t, err)
assert.Equal(t, file2Content, string(fileContent))
err = gcsClient.Close()
assert.NoError(t, err)
})
t.Run("Test upload missing file", func(t *testing.T) {
bucketID := "upload-bucket"
gcsClient, err := gcs.NewClient(gcs.WithOpenFileFunction(openFileMock),
gcs.WithClientOptions(option.WithEndpoint(endpoint), option.WithoutAuthentication(), option.WithHTTPClient(&httpclient)))
assert.NoError(t, err)
err = gcsClient.UploadFile(bucketID, "file3", "test/file3")
assert.Contains(t, err.Error(), "could not open source file")
err = gcsClient.Close()
assert.NoError(t, err)
})
t.Run("Test download missing file", func(t *testing.T) {
bucketID := "upload-bucket"
gcsClient, err := gcs.NewClient(gcs.WithOpenFileFunction(openFileMock),
gcs.WithClientOptions(option.WithEndpoint(endpoint), option.WithoutAuthentication(), option.WithHTTPClient(&httpclient)))
assert.NoError(t, err)
err = gcsClient.DownloadFile(bucketID, "test/file3", "file3")
assert.Contains(t, err.Error(), "could not open source file")
err = gcsClient.Close()
assert.NoError(t, err)
})
t.Run("Test download file - failed file creation", func(t *testing.T) {
bucketID := "upload-bucket"
_, file1Writer := io.Pipe()
_, file2Writer := io.Pipe()
gcsClient, err := gcs.NewClient(gcs.WithOpenFileFunction(openFileMock), gcs.WithCreateFileFunction(getCreateFileMock(file1Writer, file2Writer)),
gcs.WithClientOptions(option.WithEndpoint(endpoint), option.WithoutAuthentication(), option.WithHTTPClient(&httpclient)))
assert.NoError(t, err)
err = gcsClient.DownloadFile(bucketID, "placeholder", "file3")
assert.Contains(t, err.Error(), "could not create target file")
err = gcsClient.Close()
assert.NoError(t, err)
})
}
const (
file1Content = `test file`
file2Content = `
foo : bar
pleh : help
stuff : {'foo': 'bar', 'bar': 'foo'}
`
)
func openFileMock(name string) (io.ReadCloser, error) {
var fileContent string
switch name {
case "file1":
fileContent = file1Content
case "folder/file2":
fileContent = file2Content
default:
return nil, errors.New("open file faled")
}
return ioutil.NopCloser(strings.NewReader(fileContent)), nil
}
func getCreateFileMock(file1Writer io.WriteCloser, file2Writer io.WriteCloser) func(name string) (io.WriteCloser, error) {
return func(name string) (io.WriteCloser, error) {
switch name {
case "file1":
return file1Writer, nil
case "file2":
return file2Writer, nil
default:
return nil, errors.New("could not create target file")
}
}
}

View File

@ -0,0 +1,3 @@
foo : bar
pleh : help
stuff : {'foo': 'bar', 'bar': 'foo'}

View File

@ -0,0 +1 @@
test file

View File

@ -166,7 +166,11 @@ func (c *Config) InitializeConfig(configuration io.ReadCloser, defaults []io.Rea
}
// GetStepConfig provides merged step configuration using defaults, config, if available
func (c *Config) GetStepConfig(flagValues map[string]interface{}, paramJSON string, configuration io.ReadCloser, defaults []io.ReadCloser, ignoreCustomDefaults bool, filters StepFilters, parameters []StepParameters, secrets []StepSecrets, envParameters map[string]interface{}, stageName, stepName string, stepAliases []Alias) (StepConfig, error) {
func (c *Config) GetStepConfig(flagValues map[string]interface{}, paramJSON string, configuration io.ReadCloser, defaults []io.ReadCloser, ignoreCustomDefaults bool, filters StepFilters, metadata StepData, envParameters map[string]interface{}, stageName, stepName string) (StepConfig, error) {
parameters := metadata.Spec.Inputs.Parameters
secrets := metadata.Spec.Inputs.Secrets
stepAliases := metadata.Metadata.Aliases
var stepConfig StepConfig
var err error
@ -184,6 +188,7 @@ func (c *Config) GetStepConfig(flagValues map[string]interface{}, paramJSON stri
// merge parameters provided by Piper environment
stepConfig.mixIn(envParameters, filters.All)
stepConfig.mixIn(envParameters, ReportingParameters.getReportingFilter())
// read defaults & merge general -> steps (-> general -> steps ...)
for _, def := range c.defaults.Defaults {
@ -192,6 +197,13 @@ func (c *Config) GetStepConfig(flagValues map[string]interface{}, paramJSON stri
stepConfig.mixIn(def.Steps[stepName], filters.Steps)
stepConfig.mixIn(def.Stages[stageName], filters.Steps)
stepConfig.mixinVaultConfig(parameters, def.General, def.Steps[stepName], def.Stages[stageName])
reportingConfig, err := cloneConfig(&def)
if err != nil {
return StepConfig{}, err
}
reportingConfig.ApplyAliasConfig(ReportingParameters.Parameters, []StepSecrets{}, ReportingParameters.getStepFilters(), stageName, stepName, []Alias{})
stepConfig.mixinReportingConfig(reportingConfig.General, reportingConfig.Steps[stepName], reportingConfig.Stages[stageName])
stepConfig.mixInHookConfig(def.Hooks)
}
@ -234,6 +246,14 @@ func (c *Config) GetStepConfig(flagValues map[string]interface{}, paramJSON stri
}
stepConfig.mixinVaultConfig(parameters, c.General, c.Steps[stepName], c.Stages[stageName])
reportingConfig, err := cloneConfig(c)
if err != nil {
return StepConfig{}, err
}
reportingConfig.ApplyAliasConfig(ReportingParameters.Parameters, []StepSecrets{}, ReportingParameters.getStepFilters(), stageName, stepName, []Alias{})
stepConfig.mixinReportingConfig(reportingConfig.General, reportingConfig.Steps[stepName], reportingConfig.Stages[stageName])
// check whether vault should be skipped
if skip, ok := stepConfig.Config["skipVault"].(bool); !ok || !skip {
// fetch secrets from vault
@ -243,7 +263,7 @@ func (c *Config) GetStepConfig(flagValues map[string]interface{}, paramJSON stri
}
if vaultClient != nil {
defer vaultClient.MustRevokeToken()
resolveAllVaultReferences(&stepConfig, vaultClient, parameters)
resolveAllVaultReferences(&stepConfig, vaultClient, append(parameters, ReportingParameters.Parameters...))
resolveVaultTestCredentials(&stepConfig, vaultClient)
}
}
@ -312,7 +332,7 @@ func (c *Config) GetStageConfig(paramJSON string, configuration io.ReadCloser, d
Parameters: acceptedParams,
Env: []string{},
}
return c.GetStepConfig(map[string]interface{}{}, paramJSON, configuration, defaults, ignoreCustomDefaults, filters, []StepParameters{}, []StepSecrets{}, map[string]interface{}{}, stageName, "", []Alias{})
return c.GetStepConfig(map[string]interface{}{}, paramJSON, configuration, defaults, ignoreCustomDefaults, filters, StepData{}, map[string]interface{}{}, stageName, "")
}
// GetJSON returns JSON representation of an object
@ -479,3 +499,17 @@ func sliceContains(slice []string, find string) bool {
}
return false
}
func cloneConfig(config *Config) (*Config, error) {
configJSON, err := json.Marshal(config)
if err != nil {
return nil, err
}
clone := &Config{}
if err = json.Unmarshal(configJSON, &clone); err != nil {
return nil, err
}
return clone, nil
}

View File

@ -165,7 +165,19 @@ steps:
},
}
stepMeta := StepData{Spec: StepSpec{Inputs: StepInputs{Parameters: parameterMetadata, Secrets: secretMetadata}}}
stepAliases := []Alias{{Name: "stepAlias"}}
stepMeta := StepData{
Spec: StepSpec{
Inputs: StepInputs{
Parameters: parameterMetadata,
Secrets: secretMetadata,
},
},
Metadata: StepMetadata{
Aliases: stepAliases,
},
}
dir, err := ioutil.TempDir("", "")
if err != nil {
@ -177,8 +189,7 @@ steps:
piperenv.SetParameter(filepath.Join(dir, "commonPipelineEnvironment"), "test_pe1", "pe1_val")
stepAliases := []Alias{{Name: "stepAlias"}}
stepConfig, err := c.GetStepConfig(flags, paramJSON, myConfig, defaults, false, filters, parameterMetadata, secretMetadata, stepMeta.GetResourceParameters(dir, "commonPipelineEnvironment"), "stage1", "step1", stepAliases)
stepConfig, err := c.GetStepConfig(flags, paramJSON, myConfig, defaults, false, filters, stepMeta, stepMeta.GetResourceParameters(dir, "commonPipelineEnvironment"), "stage1", "step1")
assert.Equal(t, nil, err, "error occurred but none expected")
@ -246,7 +257,7 @@ steps:
// clean up tmp dir
defer os.RemoveAll(dir)
stepConfig, err := c.GetStepConfig(map[string]interface{}{}, "", myConfig, defaults, false, filters, []StepParameters{}, []StepSecrets{}, stepMeta.GetResourceParameters(dir, "commonPipelineEnvironment"), "stage1", "step1", []Alias{})
stepConfig, err := c.GetStepConfig(map[string]interface{}{}, "", myConfig, defaults, false, filters, stepMeta, stepMeta.GetResourceParameters(dir, "commonPipelineEnvironment"), "stage1", "step1")
assert.Equal(t, nil, err, "error occurred but none expected")
@ -259,7 +270,7 @@ steps:
c.openFile = customDefaultsOpenFileMock
stepConfig, err := c.GetStepConfig(nil, "", ioutil.NopCloser(strings.NewReader(testConfDefaults)), nil, false, StepFilters{General: []string{"p0"}}, []StepParameters{}, nil, nil, "stage1", "step1", []Alias{})
stepConfig, err := c.GetStepConfig(nil, "", ioutil.NopCloser(strings.NewReader(testConfDefaults)), nil, false, StepFilters{General: []string{"p0"}}, StepData{}, nil, "stage1", "step1")
assert.NoError(t, err, "Error occurred but no error expected")
assert.Equal(t, "p0_custom_default", stepConfig.Config["p0"])
@ -273,7 +284,7 @@ steps:
c.openFile = customDefaultsOpenFileMock
stepConfig, err := c.GetStepConfig(nil, "", ioutil.NopCloser(strings.NewReader(testConfDefaults)), nil, true, StepFilters{General: []string{"p0"}}, []StepParameters{}, nil, nil, "stage1", "step1", []Alias{})
stepConfig, err := c.GetStepConfig(nil, "", ioutil.NopCloser(strings.NewReader(testConfDefaults)), nil, true, StepFilters{General: []string{"p0"}}, StepData{}, nil, "stage1", "step1")
assert.NoError(t, err, "Error occurred but no error expected")
assert.Equal(t, nil, stepConfig.Config["p0"])
@ -285,9 +296,16 @@ steps:
var c Config
stepParams := []StepParameters{{Name: "p0", Scope: []string{"GENERAL"}, Type: "string", Default: "p0_step_default", Aliases: []Alias{{Name: "p0_alias"}}}}
metadata := StepData{
Spec: StepSpec{
Inputs: StepInputs{
Parameters: stepParams,
},
},
}
testConf := "general:\n p1: p1_conf"
stepConfig, err := c.GetStepConfig(nil, "", ioutil.NopCloser(strings.NewReader(testConf)), nil, false, StepFilters{General: []string{"p0", "p1"}}, stepParams, nil, nil, "stage1", "step1", []Alias{})
stepConfig, err := c.GetStepConfig(nil, "", ioutil.NopCloser(strings.NewReader(testConf)), nil, false, StepFilters{General: []string{"p0", "p1"}}, metadata, nil, "stage1", "step1")
assert.NoError(t, err, "Error occurred but no error expected")
assert.Equal(t, "p0_step_default", stepConfig.Config["p0"])
@ -300,9 +318,16 @@ steps:
stepParams := []StepParameters{
{Name: "p0", Scope: []string{"GENERAL"}, Type: "bool", Aliases: []Alias{}},
{Name: "p1", Scope: []string{"GENERAL"}, Type: "string", Aliases: []Alias{{Name: "p0/subParam"}}}}
metadata := StepData{
Spec: StepSpec{
Inputs: StepInputs{
Parameters: stepParams,
},
},
}
testConf := "general:\n p0: true"
stepConfig, err := c.GetStepConfig(nil, "", ioutil.NopCloser(strings.NewReader(testConf)), nil, false, StepFilters{General: []string{"p0", "p1"}}, stepParams, nil, nil, "stage1", "step1", []Alias{{}})
stepConfig, err := c.GetStepConfig(nil, "", ioutil.NopCloser(strings.NewReader(testConf)), nil, false, StepFilters{General: []string{"p0", "p1"}}, metadata, nil, "stage1", "step1")
assert.NoError(t, err, "Error occurred but no error expected")
assert.Equal(t, true, stepConfig.Config["p0"])
@ -314,10 +339,17 @@ steps:
secrets := []StepSecrets{
{Name: "p0", Type: "string", Aliases: []Alias{{Name: "p1/subParam"}}}}
metadata := StepData{
Spec: StepSpec{
Inputs: StepInputs{
Secrets: secrets,
},
},
}
testConf := ""
paramJSON := "{\"p1\":{\"subParam\":\"p1_value\"}}"
stepConfig, err := c.GetStepConfig(nil, paramJSON, ioutil.NopCloser(strings.NewReader(testConf)), nil, true, StepFilters{Parameters: []string{"p0"}}, nil, secrets, nil, "stage1", "step1", []Alias{{}})
stepConfig, err := c.GetStepConfig(nil, paramJSON, ioutil.NopCloser(strings.NewReader(testConf)), nil, true, StepFilters{Parameters: []string{"p0"}}, metadata, nil, "stage1", "step1")
assert.NoError(t, err, "Error occurred but no error expected")
assert.Equal(t, "p1_value", stepConfig.Config["p0"])
@ -326,7 +358,7 @@ steps:
t.Run("Failure case config", func(t *testing.T) {
var c Config
myConfig := ioutil.NopCloser(strings.NewReader("invalid config"))
_, err := c.GetStepConfig(nil, "", myConfig, nil, false, StepFilters{}, []StepParameters{}, nil, nil, "stage1", "step1", []Alias{})
_, err := c.GetStepConfig(nil, "", myConfig, nil, false, StepFilters{}, StepData{}, nil, "stage1", "step1")
assert.EqualError(t, err, "failed to parse custom pipeline configuration: format of configuration is invalid \"invalid config\": error unmarshaling JSON: while decoding JSON: json: cannot unmarshal string into Go value of type config.Config", "default error expected")
})
@ -334,10 +366,47 @@ steps:
var c Config
myConfig := ioutil.NopCloser(strings.NewReader(""))
myDefaults := []io.ReadCloser{ioutil.NopCloser(strings.NewReader("invalid defaults"))}
_, err := c.GetStepConfig(nil, "", myConfig, myDefaults, false, StepFilters{}, []StepParameters{}, nil, nil, "stage1", "step1", []Alias{})
_, err := c.GetStepConfig(nil, "", myConfig, myDefaults, false, StepFilters{}, StepData{}, nil, "stage1", "step1")
assert.EqualError(t, err, "failed to read default configuration: error unmarshalling \"invalid defaults\": error unmarshaling JSON: while decoding JSON: json: cannot unmarshal string into Go value of type config.Config", "default error expected")
})
t.Run("Test reporting parameters with aliases and cpe resources", func(t *testing.T) {
var c Config
testConfig := ioutil.NopCloser(strings.NewReader(`general:
gcpJsonKeyFilePath: gcpJsonKeyFilePath_value
steps:
step1:
jsonKeyFilePath: gcpJsonKeyFilePath_from_alias`))
testDefaults := []io.ReadCloser{ioutil.NopCloser(strings.NewReader(`general:
pipelineId: gcsBucketId_from_alias
steps:
step1:
gcsBucketId: gcsBucketId_value`))}
dir, err := ioutil.TempDir("", "")
if err != nil {
t.Fatal("Failed to create temporary directory")
}
// clean up tmp dir
defer os.RemoveAll(dir)
cpeDir := filepath.Join(dir, "commonPipelineEnvironment/custom")
err = os.MkdirAll(cpeDir, 0700)
if err != nil {
t.Fatal("Failed to create sub directory")
}
err = ioutil.WriteFile(filepath.Join(cpeDir, "gcsFolderPath.json"), []byte("\"value_from_cpe\""), 0700)
assert.NoError(t, err)
stepMeta := StepData{Spec: StepSpec{Inputs: StepInputs{Parameters: []StepParameters{}}}}
stepConfig, err := c.GetStepConfig(nil, "", testConfig, testDefaults, false, StepFilters{General: []string{"p0", "p1"}}, stepMeta, ReportingParameters.GetResourceParameters(dir, "commonPipelineEnvironment"), "stage1", "step1")
assert.NoError(t, err, "Error occurred but no error expected")
assert.Equal(t, "gcpJsonKeyFilePath_from_alias", stepConfig.Config["gcpJsonKeyFilePath"])
assert.Equal(t, "gcsBucketId_value", stepConfig.Config["gcsBucketId"])
assert.Equal(t, "value_from_cpe", stepConfig.Config["gcsFolderPath"])
})
//ToDo: test merging of env and parameters/flags
}
@ -910,3 +979,26 @@ func TestMixInStepDefaults(t *testing.T) {
assert.Equal(t, test.expected, test.stepConfig.Config, test.name)
}
}
func TestCloneConfig(t *testing.T) {
testConfig := &Config{
General: map[string]interface{}{
"p0": "p0_general",
},
Stages: map[string]map[string]interface{}{
"stage1": {
"p1": "p1_stage",
},
},
Steps: map[string]map[string]interface{}{
"step1": {
"p2": "p2_step",
},
},
}
clone, err := cloneConfig(testConfig)
assert.NoError(t, err)
assert.Equal(t, testConfig, clone)
testConfig.General["p0"] = "new_value"
assert.NotEqual(t, testConfig.General, clone.General)
}

81
pkg/config/reporting.go Normal file
View File

@ -0,0 +1,81 @@
package config
// ReportingParams holds reporting parameters
type ReportingParams struct {
Parameters []StepParameters
}
// ReportingParameters is a global variable with values of reporting parameters
var ReportingParameters = ReportingParams{
Parameters: []StepParameters{
{
Name: "gcpJsonKeyFilePath",
Aliases: []Alias{{Name: "jsonKeyFilePath"}},
ResourceRef: []ResourceReference{
{
Name: "gcpFileVaultSecretName",
Type: "vaultSecretFile",
Default: "gcp",
},
},
},
{
Name: "gcsFolderPath",
ResourceRef: []ResourceReference{
{
Name: "commonPipelineEnvironment",
Param: "custom/gcsFolderPath",
},
},
},
{
Name: "gcsBucketId",
Aliases: []Alias{{Name: "pipelineId"}},
},
{
Name: "gcsSubFolder",
},
},
}
// GetResourceParameters retrieves reporting parameters from a named pipeline resource with a defined path
func (r ReportingParams) GetResourceParameters(path, name string) map[string]interface{} {
resourceParams := map[string]interface{}{}
for _, param := range r.Parameters {
for _, res := range param.ResourceRef {
if res.Name == name {
if val := getParameterValue(path, res, param); val != nil {
resourceParams[param.Name] = val
}
}
}
}
return resourceParams
}
func (r ReportingParams) getStepFilters() StepFilters {
var filters StepFilters
reportingFilter := r.getReportingFilter()
filters.All = append(filters.All, reportingFilter...)
filters.General = append(filters.General, reportingFilter...)
filters.Steps = append(filters.Steps, reportingFilter...)
filters.Stages = append(filters.Stages, reportingFilter...)
return filters
}
func (r ReportingParams) getReportingFilter() []string {
var reportingFilter []string
for _, param := range r.Parameters {
reportingFilter = append(reportingFilter, param.Name)
}
reportingFilter = append(reportingFilter, getFilterForResourceReferences(r.Parameters)...)
return reportingFilter
}
func (s *StepConfig) mixinReportingConfig(configs ...map[string]interface{}) {
reportingFilter := ReportingParameters.getReportingFilter()
for _, config := range configs {
s.mixIn(config, reportingFilter)
}
}

View File

@ -0,0 +1,203 @@
package config
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
)
func TestMixinReportingConfig(t *testing.T) {
gcpJsonKeyFilePath := "path/key.json"
gcsFolderPath := "test/folder/path"
gcsBucketID := "testBucketId"
config := StepConfig{
Config: map[string]interface{}{},
HookConfig: nil,
}
general := map[string]interface{}{
"gcpJsonKeyFilePath": gcpJsonKeyFilePath,
"gcsFolderPath": gcsFolderPath,
"gcsBucketId": "generalBucketId",
}
steps := map[string]interface{}{
"gcsBucketId": gcsBucketID,
"unknownConfig": "test",
}
config.mixinReportingConfig(nil, general, steps)
assert.Contains(t, config.Config, "gcpJsonKeyFilePath")
assert.Equal(t, gcpJsonKeyFilePath, config.Config["gcpJsonKeyFilePath"])
assert.Contains(t, config.Config, "gcsFolderPath")
assert.Equal(t, gcsFolderPath, config.Config["gcsFolderPath"])
assert.Contains(t, config.Config, "gcsBucketId")
assert.Equal(t, gcsBucketID, config.Config["gcsBucketId"])
assert.NotContains(t, config.Config, "unknownConfig")
}
func TestReportingParams_GetResourceParameters(t *testing.T) {
tt := []struct {
in ReportingParams
expected map[string]interface{}
}{
{
in: ReportingParams{Parameters: []StepParameters{}},
expected: map[string]interface{}{},
},
{
in: ReportingParams{Parameters: []StepParameters{
{Name: "param1"},
{Name: "param2"},
}},
expected: map[string]interface{}{},
},
{
in: ReportingParams{Parameters: []StepParameters{
{Name: "param1", ResourceRef: []ResourceReference{}},
{Name: "param2", ResourceRef: []ResourceReference{}},
}},
expected: map[string]interface{}{},
},
{
in: ReportingParams{Parameters: []StepParameters{
{Name: "param1", ResourceRef: []ResourceReference{{Name: "notAvailable", Param: "envparam1"}}},
{Name: "param2", ResourceRef: []ResourceReference{{Name: "commonPipelineEnvironment", Param: "envparam2"}}, Type: "string"},
}},
expected: map[string]interface{}{"param2": "val2"},
},
{
in: ReportingParams{Parameters: []StepParameters{
{Name: "param2", ResourceRef: []ResourceReference{{Name: "commonPipelineEnvironment", Param: "envparam2"}}, Type: "string"},
{Name: "param3", ResourceRef: []ResourceReference{{Name: "commonPipelineEnvironment", Param: "jsonList"}}, Type: "[]string"},
}},
expected: map[string]interface{}{"param2": "val2", "param3": []interface{}{"value1", "value2"}},
},
{
in: ReportingParams{Parameters: []StepParameters{
{Name: "param4", ResourceRef: []ResourceReference{{Name: "commonPipelineEnvironment", Param: "jsonKeyValue"}}, Type: "map[string]interface{}"},
}},
expected: map[string]interface{}{"param4": map[string]interface{}{"key": "value"}},
},
{
in: ReportingParams{Parameters: []StepParameters{
{Name: "param1", ResourceRef: []ResourceReference{{Name: "commonPipelineEnvironment", Param: "envparam1"}}, Type: "noString"},
{Name: "param4", ResourceRef: []ResourceReference{{Name: "commonPipelineEnvironment", Param: "jsonKeyValueString"}}, Type: "string"},
}},
expected: map[string]interface{}{"param4": "{\"key\":\"valueString\"}"},
},
}
dir, err := ioutil.TempDir("", "")
if err != nil {
t.Fatal("Failed to create temporary directory")
}
// clean up tmp dir
defer os.RemoveAll(dir)
cpeDir := filepath.Join(dir, "commonPipelineEnvironment")
err = os.MkdirAll(cpeDir, 0700)
if err != nil {
t.Fatal("Failed to create sub directory")
}
ioutil.WriteFile(filepath.Join(cpeDir, "envparam1"), []byte("val1"), 0700)
ioutil.WriteFile(filepath.Join(cpeDir, "envparam2"), []byte("val2"), 0700)
ioutil.WriteFile(filepath.Join(cpeDir, "jsonList.json"), []byte("[\"value1\",\"value2\"]"), 0700)
ioutil.WriteFile(filepath.Join(cpeDir, "jsonKeyValue.json"), []byte("{\"key\":\"value\"}"), 0700)
ioutil.WriteFile(filepath.Join(cpeDir, "jsonKeyValueString"), []byte("{\"key\":\"valueString\"}"), 0700)
for run, test := range tt {
t.Run(fmt.Sprintf("Run %v", run), func(t *testing.T) {
actual := test.in.GetResourceParameters(dir, "commonPipelineEnvironment")
assert.Equal(t, test.expected, actual)
})
}
}
func TestReportingParams_GetGetStepFilters(t *testing.T) {
tt := []struct {
in ReportingParams
expected StepFilters
}{
{
in: ReportingParams{Parameters: []StepParameters{}},
expected: StepFilters{},
},
{
in: ReportingParams{Parameters: []StepParameters{
{Name: "param1"},
{Name: "param2"},
}},
expected: StepFilters{
All: []string{"param1", "param2"},
General: []string{"param1", "param2"},
Steps: []string{"param1", "param2"},
Stages: []string{"param1", "param2"},
},
},
{
in: ReportingParams{Parameters: []StepParameters{
{Name: "param1"},
{Name: "param2"},
{Name: "param3"},
{Name: "param4"},
{Name: "param5"},
{Name: "param6"},
}},
expected: StepFilters{
All: []string{"param1", "param2", "param3", "param4", "param5", "param6"},
General: []string{"param1", "param2", "param3", "param4", "param5", "param6"},
Steps: []string{"param1", "param2", "param3", "param4", "param5", "param6"},
Stages: []string{"param1", "param2", "param3", "param4", "param5", "param6"},
},
},
}
for run, test := range tt {
t.Run(fmt.Sprintf("Run %v", run), func(t *testing.T) {
actual := test.in.getStepFilters()
assert.Equal(t, test.expected, actual)
})
}
}
func TestReportingParams_GetReportingFilter(t *testing.T) {
tt := []struct {
in ReportingParams
expected []string
}{
{
in: ReportingParams{Parameters: []StepParameters{}},
expected: nil,
},
{
in: ReportingParams{Parameters: []StepParameters{
{Name: "param1"},
{Name: "param2"},
}},
expected: []string{"param1", "param2"},
},
{
in: ReportingParams{Parameters: []StepParameters{
{Name: "param1"},
{Name: "param2"},
{Name: "param3"},
{Name: "param4"},
{Name: "param5"},
{Name: "param6"},
}},
expected: []string{"param1", "param2", "param3", "param4", "param5", "param6"},
},
}
for run, test := range tt {
t.Run(fmt.Sprintf("Run %v", run), func(t *testing.T) {
actual := test.in.getReportingFilter()
assert.Equal(t, test.expected, actual)
})
}
}

View File

@ -127,8 +127,14 @@ func (r *RunConfig) getStepConfig(config *Config, stageName, stepName string, fi
// not considered releavant for pipeline yaml syntax resolution
paramJSON := ""
return config.GetStepConfig(flagValues, paramJSON, nil, nil, false, filters[stepName], parameters[stepName], secrets[stepName],
envParameters, stageName, stepName, stepAliases[stepName])
stepMeta := StepData{
Spec: StepSpec{
Inputs: StepInputs{Parameters: parameters[stepName], Secrets: secrets[stepName]},
},
Metadata: StepMetadata{Aliases: stepAliases[stepName]},
}
return config.GetStepConfig(flagValues, paramJSON, nil, nil, false, filters[stepName], stepMeta, envParameters, stageName, stepName)
}
func (r *RunConfig) loadConditions() error {

View File

@ -188,7 +188,7 @@ stages:
filter := StepFilters{All: []string{}, General: []string{}, Stages: []string{}, Steps: []string{}, Env: []string{}}
projectConfig := Config{}
_, err := projectConfig.GetStepConfig(map[string]interface{}{}, "", tt.customConfig,
[]io.ReadCloser{}, false, filter, nil, nil, nil, "", "", []Alias{})
[]io.ReadCloser{}, false, filter, StepData{}, nil, "", "")
assert.NoError(t, err)
err = runConfig.InitRunConfig(&projectConfig, nil, nil, nil, nil, initRunConfigGlobMock, nil)
if tt.wantErr {

View File

@ -48,12 +48,10 @@ func readStepConfiguration(stepMetadata config.StepData, customDefaultFiles []st
defaultFiles,
false,
filters,
stepMetadata.Spec.Inputs.Parameters,
stepMetadata.Spec.Inputs.Secrets,
stepMetadata,
map[string]interface{}{},
"",
stepMetadata.Metadata.Name,
stepMetadata.Metadata.Aliases,
)
checkError(err)
return stepConfiguration

220
pkg/gcs/gcsClient.go Normal file
View File

@ -0,0 +1,220 @@
package gcs
import (
"context"
"io"
"os"
"path/filepath"
"cloud.google.com/go/storage"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/pkg/errors"
"google.golang.org/api/iterator"
"google.golang.org/api/option"
)
// Client is an interface to mock gcsClient
type Client interface {
UploadFile(bucketID string, sourcePath string, targetPath string) error
DownloadFile(bucketID string, sourcePath string, targetPath string) error
ListFiles(bucketID string) ([]string, error)
Close() error
}
// gcsClient provides functions to interact with google cloud storage API
type gcsClient struct {
context context.Context
envVars []EnvVar
client storage.Client
clientOptions []option.ClientOption
openFile func(name string) (io.ReadCloser, error)
createFile func(name string) (io.WriteCloser, error)
}
// EnvVar defines an environment variable incl. information about a potential modification to the variable
type EnvVar struct {
Name string
Value string
Modified bool
}
type gcsOption func(*gcsClient)
// WithEnvVars initializes env variables in gcsClient
func WithEnvVars(envVars []EnvVar) gcsOption {
return func(g *gcsClient) {
g.envVars = envVars
}
}
// WithEnvVars initializes the openFile function in gcsClient
func WithOpenFileFunction(openFile func(name string) (io.ReadCloser, error)) gcsOption {
return func(g *gcsClient) {
g.openFile = openFile
}
}
// WithEnvVars initializes the createFile function in gcsClient
func WithCreateFileFunction(createFile func(name string) (io.WriteCloser, error)) gcsOption {
return func(g *gcsClient) {
g.createFile = createFile
}
}
// WithEnvVars initializes the Google Cloud Storage client options
func WithClientOptions(opts ...option.ClientOption) gcsOption {
return func(g *gcsClient) {
g.clientOptions = append(g.clientOptions, opts...)
}
}
// Init intitializes the google cloud storage client
func NewClient(opts ...gcsOption) (*gcsClient, error) {
var (
defaultOpenFile = openFileFromFS
defaultCreateFile = createFileOnFS
)
ctx := context.Background()
gcsClient := &gcsClient{
context: ctx,
openFile: defaultOpenFile,
createFile: defaultCreateFile,
}
// options handling
for _, opt := range opts {
opt(gcsClient)
}
gcsClient.prepareEnv()
client, err := storage.NewClient(ctx, gcsClient.clientOptions...)
if err != nil {
return nil, errors.Wrapf(err, "bucket connection failed: %v", err)
}
gcsClient.client = *client
return gcsClient, nil
}
// UploadFile uploads a file into a google cloud storage bucket
func (g *gcsClient) UploadFile(bucketID string, sourcePath string, targetPath string) error {
target := g.client.Bucket(bucketID).Object(targetPath).NewWriter(g.context)
log.Entry().Debugf("uploading %v to %v\n", sourcePath, targetPath)
sourceFile, err := g.openFile(sourcePath)
if err != nil {
return errors.Wrapf(err, "could not open source file: %v", err)
}
defer sourceFile.Close()
if err := g.copy(sourceFile, target); err != nil {
return errors.Wrapf(err, "upload failed: %v", err)
}
if err := target.Close(); err != nil {
return errors.Wrapf(err, "closing bucket failed: %v", err)
}
return nil
}
// DownloadFile downloads a file from a google cloud storage bucket
func (g *gcsClient) DownloadFile(bucketID string, sourcePath string, targetPath string) error {
log.Entry().Debugf("downloading %v to %v\n", sourcePath, targetPath)
gcsReader, err := g.client.Bucket(bucketID).Object(sourcePath).NewReader(g.context)
if err != nil {
return errors.Wrapf(err, "could not open source file from a google cloud storage bucket: %v", err)
}
targetWriter, err := g.createFile(targetPath)
if err != nil {
return errors.Wrapf(err, "could not create target file: %v", err)
}
defer targetWriter.Close()
if err := g.copy(gcsReader, targetWriter); err != nil {
return errors.Wrapf(err, "download failed: %v", err)
}
if err := gcsReader.Close(); err != nil {
return errors.Wrapf(err, "closing bucket failed: %v", err)
}
return nil
}
// ListFiles lists all files in certain GCS bucket
func (g *gcsClient) ListFiles(bucketID string) ([]string, error) {
fileNames := []string{}
it := g.client.Bucket(bucketID).Objects(g.context, nil)
for {
attrs, err := it.Next()
if err == iterator.Done {
break
}
if err != nil {
return nil, err
}
fileNames = append(fileNames, attrs.Name)
}
return fileNames, nil
}
// Close closes the client and removes previously set environment variables
func (g *gcsClient) Close() error {
if err := g.client.Close(); err != nil {
return err
}
if err := g.cleanupEnv(); err != nil {
return err
}
return nil
}
func (g *gcsClient) copy(source io.Reader, target io.Writer) error {
if _, err := io.Copy(target, source); err != nil {
return err
}
return nil
}
// prepareEnv sets required environment variables in case they are not set yet
func (g *gcsClient) prepareEnv() {
for key, env := range g.envVars {
g.envVars[key].Modified = setenvIfEmpty(env.Name, env.Value)
}
}
// cleanupEnv removes environment variables set by prepareEnv
func (g *gcsClient) cleanupEnv() error {
for _, env := range g.envVars {
if err := removeEnvIfPreviouslySet(env.Name, env.Modified); err != nil {
return err
}
}
return nil
}
func setenvIfEmpty(env, val string) bool {
if len(os.Getenv(env)) == 0 {
os.Setenv(env, val)
return true
}
return false
}
func removeEnvIfPreviouslySet(env string, previouslySet bool) error {
if previouslySet {
if err := os.Setenv(env, ""); err != nil {
return err
}
}
return nil
}
func openFileFromFS(name string) (io.ReadCloser, error) {
return os.Open(name)
}
func createFileOnFS(name string) (io.WriteCloser, error) {
if err := os.MkdirAll(filepath.Dir(name), os.ModePerm); err != nil {
return nil, err
}
return os.Create(name)
}

49
pkg/gcs/gcsClient_test.go Normal file
View File

@ -0,0 +1,49 @@
package gcs
import (
"os"
"testing"
)
func Test_gcsClient_prepareEnv(t *testing.T) {
os.Setenv("TESTVAR1", "test1")
gcsClient := gcsClient{envVars: []EnvVar{
{Name: "TESTVAR1", Value: "test1_new"},
{Name: "TESTVAR2", Value: "test2_new"},
}}
gcsClient.prepareEnv()
if gcsClient.envVars[0].Modified {
t.Errorf("%v - expected '%v' was '%v'", gcsClient.envVars[0].Name, false, gcsClient.envVars[0].Modified)
}
if !gcsClient.envVars[1].Modified {
t.Errorf("%v - expected '%v' was '%v'", gcsClient.envVars[1].Name, true, gcsClient.envVars[1].Modified)
}
os.Setenv("TESTVAR1", "")
os.Setenv("TESTVAR2", "")
}
func TestCleanupEnv(t *testing.T) {
os.Setenv("TESTVAR1", "test1")
os.Setenv("TESTVAR2", "test2")
gcsClient := gcsClient{envVars: []EnvVar{
{Name: "TESTVAR1", Modified: false},
{Name: "TESTVAR2", Modified: true},
}}
gcsClient.cleanupEnv()
if os.Getenv("TESTVAR1") != "test1" {
t.Errorf("%v - expected '%v' was '%v'", gcsClient.envVars[0].Name, "test1", os.Getenv("TESTVAR1"))
}
if len(os.Getenv("TESTVAR2")) > 0 {
t.Errorf("%v - expected '%v' was '%v'", gcsClient.envVars[1].Name, "", os.Getenv("TESTVAR2"))
}
os.Setenv("TESTVAR1", "")
os.Setenv("TESTVAR2", "")
}

75
pkg/gcs/mocks/client.go Normal file
View File

@ -0,0 +1,75 @@
// Code generated by mockery v2.7.4. DO NOT EDIT.
package mocks
import mock "github.com/stretchr/testify/mock"
// Client is an autogenerated mock type for the Client type
type Client struct {
mock.Mock
}
// Close provides a mock function with given fields:
func (_m *Client) Close() error {
ret := _m.Called()
var r0 error
if rf, ok := ret.Get(0).(func() error); ok {
r0 = rf()
} else {
r0 = ret.Error(0)
}
return r0
}
// DownloadFile provides a mock function with given fields: bucketID, sourcePath, targetPath
func (_m *Client) DownloadFile(bucketID string, sourcePath string, targetPath string) error {
ret := _m.Called(bucketID, sourcePath, targetPath)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string) error); ok {
r0 = rf(bucketID, sourcePath, targetPath)
} else {
r0 = ret.Error(0)
}
return r0
}
// ListFiles provides a mock function with given fields: bucketID
func (_m *Client) ListFiles(bucketID string) ([]string, error) {
ret := _m.Called(bucketID)
var r0 []string
if rf, ok := ret.Get(0).(func(string) []string); ok {
r0 = rf(bucketID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]string)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(string) error); ok {
r1 = rf(bucketID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// UploadFile provides a mock function with given fields: bucketID, sourcePath, targetPath
func (_m *Client) UploadFile(bucketID string, sourcePath string, targetPath string) error {
ret := _m.Called(bucketID, sourcePath, targetPath)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string) error); ok {
r0 = rf(bucketID, sourcePath, targetPath)
} else {
r0 = ret.Error(0)
}
return r0
}

56
pkg/gcs/reporting.go Normal file
View File

@ -0,0 +1,56 @@
package gcs
import (
"fmt"
"os"
"path/filepath"
)
type ReportOutputParam struct {
FilePattern string
ParamRef string
StepResultType string
}
type Task struct {
SourcePath string
TargetPath string
}
func PersistReportsToGCS(gcsClient Client, outputParams []ReportOutputParam, inputParams map[string]string, gcsFolderPath string, gcsBucketID string, gcsSubFolder string, searchFilesFunc func(string) ([]string, error), fileInfo func(string) (os.FileInfo, error)) error {
tasks := []Task{}
for _, param := range outputParams {
targetFolder := GetTargetFolder(gcsFolderPath, param.StepResultType, gcsSubFolder)
if param.ParamRef != "" {
paramValue, ok := inputParams[param.ParamRef]
if !ok {
return fmt.Errorf("there is no such input parameter as %s", param.ParamRef)
}
if paramValue == "" {
return fmt.Errorf("the value of the parameter %s must not be empty", param.ParamRef)
}
tasks = append(tasks, Task{SourcePath: paramValue, TargetPath: filepath.Join(targetFolder, paramValue)})
} else {
foundFiles, err := searchFilesFunc(param.FilePattern)
if err != nil {
return fmt.Errorf("failed to persist reports: %v", err)
}
for _, sourcePath := range foundFiles {
fileInfo, err := fileInfo(sourcePath)
if err != nil {
return fmt.Errorf("failed to persist reports: %v", err)
}
if fileInfo.IsDir() {
continue
}
tasks = append(tasks, Task{SourcePath: sourcePath, TargetPath: filepath.Join(targetFolder, sourcePath)})
}
}
}
for _, task := range tasks {
if err := gcsClient.UploadFile(gcsBucketID, task.SourcePath, task.TargetPath); err != nil {
return fmt.Errorf("failed to persist reports: %v", err)
}
}
return nil
}

168
pkg/gcs/reporting_test.go Normal file
View File

@ -0,0 +1,168 @@
package gcs
import (
"errors"
"os"
"strings"
"testing"
"time"
"github.com/SAP/jenkins-library/pkg/gcs/mocks"
"github.com/bmatcuk/doublestar"
"github.com/stretchr/testify/assert"
)
type testFileInfo struct {
path string
}
func (t testFileInfo) Name() string {
return ""
}
func (t testFileInfo) Size() int64 {
return 0
}
func (t testFileInfo) Mode() os.FileMode {
return os.FileMode(0)
}
func (t testFileInfo) ModTime() time.Time {
return time.Time{}
}
func (t testFileInfo) IsDir() bool {
if strings.HasSuffix(t.path, "test2") {
return true
}
return false
}
func (t testFileInfo) Sys() interface{} {
return nil
}
type testStepConfig struct {
FirstParameter string
SecondParameter int
ThirdParameter string
FourthParameter bool
}
func TestPersistReportsToGCS(t *testing.T) {
var testCases = []struct {
testName string
gcsFolderPath string
gcsSubFolder string
outputParams []ReportOutputParam
expected []Task
detectedFiles []string
uploadFileErr error
expectedError error
}{
{
testName: "success case",
gcsFolderPath: "test/folder/path",
gcsSubFolder: "sub/folder",
outputParams: []ReportOutputParam{
{FilePattern: "*.json", ParamRef: "", StepResultType: "general"},
{FilePattern: "*/test*", ParamRef: "", StepResultType: ""},
{FilePattern: "*.json", ParamRef: "firstParameter", StepResultType: "general"},
{FilePattern: "", ParamRef: "secondParameter", StepResultType: "general"},
{FilePattern: "", ParamRef: "thirdParameter", StepResultType: ""},
},
expected: []Task{
{SourcePath: "asdf.json", TargetPath: "test/folder/path/general/sub/folder/asdf.json"},
{SourcePath: "folder/test1", TargetPath: "test/folder/path/sub/folder/folder/test1"},
{SourcePath: "testFolder/test3", TargetPath: "test/folder/path/sub/folder/testFolder/test3"},
{SourcePath: "report1.json", TargetPath: "test/folder/path/general/sub/folder/report1.json"},
{SourcePath: "test-report.json", TargetPath: "test/folder/path/general/sub/folder/test-report.json"},
{SourcePath: "test-report2.json", TargetPath: "test/folder/path/sub/folder/test-report2.json"},
},
detectedFiles: []string{"asdf.json", "someFolder/someFile", "folder/test1", "folder1/test2", "testFolder/test3"},
uploadFileErr: nil,
expectedError: nil,
},
{
testName: "failed upload to GCS",
gcsFolderPath: "test/folder/path",
gcsSubFolder: "",
outputParams: []ReportOutputParam{
{FilePattern: "*.json", ParamRef: "", StepResultType: "general"},
},
expected: []Task{
{SourcePath: "asdf.json", TargetPath: "test/folder/path/general/asdf.json"},
},
detectedFiles: []string{"asdf.json", "someFolder/someFile", "folder/test1", "folder1/test2", "testFolder/test3"},
uploadFileErr: errors.New("upload failed"),
expectedError: errors.New("failed to persist reports: upload failed"),
},
{
testName: "failed - input parameter does not exist",
gcsFolderPath: "test/folder/path",
gcsSubFolder: "",
outputParams: []ReportOutputParam{
{FilePattern: "", ParamRef: "missingParameter", StepResultType: "general"},
},
expected: []Task{},
detectedFiles: []string{"asdf.json", "someFolder/someFile", "folder/test1", "folder1/test2", "testFolder/test3"},
uploadFileErr: nil,
expectedError: errors.New("there is no such input parameter as missingParameter"),
},
{
testName: "failed - input parameter is empty",
gcsFolderPath: "test/folder/path",
outputParams: []ReportOutputParam{
{FilePattern: "", ParamRef: "emptyParameter", StepResultType: "general"},
},
expected: []Task{},
detectedFiles: []string{"asdf.json", "someFolder/someFile", "folder/test1", "folder1/test2", "testFolder/test3"},
uploadFileErr: nil,
expectedError: errors.New("the value of the parameter emptyParameter must not be empty"),
},
}
for _, tt := range testCases {
t.Run(tt.testName, func(t *testing.T) {
inputParameters := map[string]string{
"firstParameter": "report1.json",
"secondParameter": "test-report.json",
"thirdParameter": "test-report2.json",
"emptyParameter": "",
}
gcsBucketID := "testBucketID"
mockedClient := &mocks.Client{}
for _, expectation := range tt.expected {
mockedClient.Mock.On("UploadFile", gcsBucketID, expectation.SourcePath, expectation.TargetPath).Return(
func(pipelineId string, sourcePath string, targetPath string) error { return tt.uploadFileErr },
).Once()
}
searchFn := func(path string) ([]string, error) {
matchedFiles := []string{}
for _, value := range tt.detectedFiles {
match, _ := doublestar.Match(path, value)
if match {
matchedFiles = append(matchedFiles, value)
}
}
return matchedFiles, nil
}
fileInfoFn := func(name string) (os.FileInfo, error) {
return testFileInfo{name}, nil
}
err := PersistReportsToGCS(mockedClient, tt.outputParams, inputParameters, tt.gcsFolderPath, gcsBucketID, tt.gcsSubFolder, searchFn, fileInfoFn)
if tt.expectedError == nil {
assert.NoError(t, err)
} else {
assert.Equal(t, tt.expectedError.Error(), err.Error())
}
mockedClient.Mock.AssertNumberOfCalls(t, "UploadFile", len(tt.expected))
mockedClient.Mock.AssertExpectations(t)
})
}
}

View File

@ -0,0 +1,8 @@
package gcs
import "path"
// GetTargetFolder calculates the target folder in GCS bucket
func GetTargetFolder(folderPath string, stepResultType string, subFolder string) string {
return path.Join(folderPath, stepResultType, subFolder)
}

View File

@ -0,0 +1,28 @@
package gcs
import (
"fmt"
"testing"
)
func TestGetTargetFolder(t *testing.T) {
tests := []struct {
folderPath string
stepResultType string
subFolder string
expected string
}{
{folderPath: "folder/path/", stepResultType: "general", subFolder: "sub/folder", expected: "folder/path/general/sub/folder"},
{folderPath: "folder/path/", subFolder: "sub/folder", expected: "folder/path/sub/folder"},
{folderPath: "folder/path/", stepResultType: "general", expected: "folder/path/general"},
{folderPath: "folder1", stepResultType: "general", subFolder: "folder2/", expected: "folder1/general/folder2"}}
for key, tt := range tests {
t.Run(fmt.Sprintf("Row %v", key+1), func(t *testing.T) {
actualTargetFolder := GetTargetFolder(tt.folderPath, tt.stepResultType, tt.subFolder)
if actualTargetFolder != tt.expected {
t.Errorf("Expected '%v' was '%v'", tt.expected, actualTargetFolder)
}
})
}
}

View File

@ -56,6 +56,16 @@ import (
"github.com/SAP/jenkins-library/pkg/config"
"github.com/SAP/jenkins-library/pkg/log"
{{ if .OutputResources -}}
{{ $reportsExist := false -}}
{{ range $notused, $oRes := .OutputResources -}}
{{ if eq (index $oRes "type") "reports" -}}{{ $reportsExist = true -}}{{ end -}}
{{ end -}}
{{ if $reportsExist -}}
"github.com/bmatcuk/doublestar"
"github.com/SAP/jenkins-library/pkg/gcs"
"reflect"
"strings"
{{ end -}}
"github.com/SAP/jenkins-library/pkg/piperenv"
{{ end -}}
"github.com/SAP/jenkins-library/pkg/telemetry"
@ -146,9 +156,11 @@ func {{.CobraCmdFuncName}}() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
{{- range $notused, $oRes := .OutputResources }}
{{ index $oRes "name" }}.persist({{if $.ExportPrefix}}{{ $.ExportPrefix }}.{{end}}GeneralConfig.EnvRootPath, {{ index $oRes "name" | quote }}){{ end }}
{{ index $oRes "name" }}.persist({{ if eq (index $oRes "type") "reports" -}}stepConfig{{- else -}}
{{if $.ExportPrefix}}{{ $.ExportPrefix }}.{{end}}GeneralConfig.EnvRootPath, {{ index $oRes "name" | quote }}{{- end -}}
){{- end }}
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = {{if .ExportPrefix}}{{ .ExportPrefix }}.{{end}}GitCommit
@ -168,7 +180,7 @@ func {{.CobraCmdFuncName}}() *cobra.Command {
{{if .ExportPrefix}}{{ .ExportPrefix }}.{{end}}GeneralConfig.HookConfig.SplunkConfig.Index,
{{if .ExportPrefix}}{{ .ExportPrefix }}.{{end}}GeneralConfig.HookConfig.SplunkConfig.SendLogs)
}
{{.StepName}}(stepConfig, &stepTelemetryData{{ range $notused, $oRes := .OutputResources}}, &{{ index $oRes "name" }}{{ end }})
{{.StepName}}(stepConfig, &stepTelemetryData{{ range $notused, $oRes := .OutputResources}}{{ if ne (index $oRes "type") "reports" }}, &{{ index $oRes "name" }}{{ end }}{{ end }})
stepTelemetryData.ErrorCode = "0"
log.Entry().Info("SUCCESS")
},
@ -287,6 +299,9 @@ func {{ .StepName }}Metadata() config.StepData {
{{ if $p.name}}"name": {{ $p.name | quote }},{{ end -}}
{{ if $p.fields}}"fields": []map[string]string{ {{- range $j, $f := $p.fields}} {"name": {{ $f.name | quote }}}, {{end -}} },{{ end -}}
{{ if $p.tags}}"tags": []map[string]string{ {{- range $j, $t := $p.tags}} {"name": {{ $t.name | quote }}}, {{end -}} },{{ end -}}
{{ if $p.filePattern}}"filePattern": {{ $p.filePattern | quote }},{{ end -}}
{{ if $p.type}}"type": {{ $p.type | quote }},{{ end -}}
{{ if $p.subFolder}}"subFolder": {{ $p.subFolder | quote }},{{ end -}}
{{ if $p }}}, {{- end -}}
{{ end }}
{{ if $res.Parameters -}} }, {{- end }}
@ -638,6 +653,7 @@ func getOutputResourceDetails(stepData *config.StepData) ([]map[string]string, e
for _, res := range stepData.Spec.Outputs.Resources {
currentResource := map[string]string{}
currentResource["name"] = res.Name
currentResource["type"] = res.Type
switch res.Type {
case "piperEnvironment":
@ -695,6 +711,27 @@ func getOutputResourceDetails(stepData *config.StepData) ([]map[string]string, e
currentResource["def"] = def
currentResource["objectname"] = influxResource.StructName()
outputResources = append(outputResources, currentResource)
case "reports":
var reportsResource ReportsResource
reportsResource.Name = res.Name
reportsResource.StepName = stepData.Metadata.Name
for _, param := range res.Parameters {
filePattern, _ := param["filePattern"].(string)
paramRef, _ := param["paramRef"].(string)
if filePattern == "" && paramRef == "" {
return outputResources, errors.New("both filePattern and paramRef cannot be empty at the same time")
}
stepResultType, _ := param["type"].(string)
reportsParam := ReportsParameter{FilePattern: filePattern, ParamRef: paramRef, Type: stepResultType}
reportsResource.Parameters = append(reportsResource.Parameters, reportsParam)
}
def, err := reportsResource.StructString()
if err != nil {
return outputResources, err
}
currentResource["def"] = def
currentResource["objectname"] = reportsResource.StructName()
outputResources = append(outputResources, currentResource)
}
}
@ -725,7 +762,7 @@ func isCLIParam(myType string) bool {
func stepTemplate(myStepInfo stepInfo, templateName, goTemplate string) []byte {
funcMap := sprig.HermeticTxtFuncMap()
funcMap["flagType"] = flagType
funcMap["golangName"] = golangNameTitle
funcMap["golangName"] = GolangNameTitle
funcMap["title"] = strings.Title
funcMap["longName"] = longName
funcMap["uniqueName"] = mustUniqName
@ -778,7 +815,8 @@ func golangName(name string) string {
return properName
}
func golangNameTitle(name string) string {
// GolangNameTitle returns name in title case with abbriviations in capital (API, URL, ID, JSON, TLS)
func GolangNameTitle(name string) string {
return strings.Title(golangName(name))
}

View File

@ -25,6 +25,13 @@ func configOpenFileMock(name string) (io.ReadCloser, error) {
spec:
outputs:
resources:
- name: reports
type: reports
params:
- filePattern: "test-report_*.json"
subFolder: "sonarExecuteScan"
- filePattern: "report1"
type: general
- name: commonPipelineEnvironment
type: piperEnvironment
params:
@ -289,7 +296,7 @@ func TestGolangNameTitle(t *testing.T) {
}
for k, v := range tt {
assert.Equal(t, v.expected, golangNameTitle(v.input), fmt.Sprintf("wrong golang name for run %v", k))
assert.Equal(t, v.expected, GolangNameTitle(v.input), fmt.Sprintf("wrong golang name for run %v", k))
}
}

View File

@ -195,3 +195,76 @@ func (i *InfluxResource) StructString() (string, error) {
func (i *InfluxResource) StructName() string {
return fmt.Sprintf("%v%v", i.StepName, strings.Title(i.Name))
}
// PiperEnvironmentResource defines a piper environement resource which stores data across multiple pipeline steps
type ReportsResource struct {
Name string
StepName string
Parameters []ReportsParameter
}
// PiperEnvironmentParameter defines a parameter within the Piper environment
type ReportsParameter struct {
FilePattern string
ParamRef string
Type string
}
const reportsStructTemplate = `type {{ .StepName }}{{ .Name | title}} struct {
}
func (p *{{ .StepName }}{{ .Name | title}}) persist(stepConfig sonarExecuteScanOptions) {
content := []gcs.ReportOutputParam{
{{- range $notused, $param := .Parameters }}
{FilePattern: "{{ $param.FilePattern }}", ParamRef: "{{ $param.ParamRef }}", StepResultType: "{{ $param.Type }}"},
{{- end }}
}
envVars := []gcs.EnvVar{
{Name: "GOOGLE_APPLICATION_CREDENTIALS", Value: GeneralConfig.GCPJsonKeyFilePath, Modified: false},
}
gcsClient, err := gcs.NewClient(gcs.WithEnvVars(envVars))
if err != nil {
log.Entry().Errorf("creation of GCS client failed: %v", err)
}
defer gcsClient.Close()
structVal := reflect.ValueOf(&stepConfig).Elem()
inputParameters := map[string]string{}
for i := 0; i < structVal.NumField(); i++ {
field := structVal.Type().Field(i)
if field.Type.String() == "string" {
paramName := strings.Split(field.Tag.Get("json"), ",")
paramValue, _ := structVal.Field(i).Interface().(string)
inputParameters[paramName[0]] = paramValue
}
}
if err := gcs.PersistReportsToGCS(gcsClient, content, inputParameters, GeneralConfig.GCSFolderPath, GeneralConfig.GCSBucketId, GeneralConfig.GCSSubFolder, doublestar.Glob, os.Stat); err != nil {
log.Entry().Errorf("failed to persist reports: %v", err)
}
}`
// StructName returns the name of the environment resource struct
func (p *ReportsResource) StructName() string {
return fmt.Sprintf("%v%v", p.StepName, strings.Title(p.Name))
}
// StructString returns the golang coding for the struct definition of the environment resource
func (p *ReportsResource) StructString() (string, error) {
funcMap := template.FuncMap{
"title": strings.Title,
"golangName": golangName,
"resourceFieldType": resourceFieldType,
}
tmpl, err := template.New("resources").Funcs(funcMap).Parse(reportsStructTemplate)
if err != nil {
return "", err
}
var generatedCode bytes.Buffer
err = tmpl.Execute(&generatedCode, &p)
if err != nil {
return "", err
}
return string(generatedCode.String()), nil
}

View File

@ -7,7 +7,7 @@ import (
"github.com/stretchr/testify/assert"
)
func TestStructString(t *testing.T) {
func TestInfluxResource_StructString(t *testing.T) {
tt := []struct {
in InfluxResource
expected string
@ -93,3 +93,69 @@ func (i *TestStepTestInflux) persist(path, resourceName string) {
}
}
func TestReportsResource_StructString(t *testing.T) {
tt := []struct {
in ReportsResource
expected string
}{
{
in: ReportsResource{
Name: "reports",
StepName: "testStep",
Parameters: []ReportsParameter{
{
FilePattern: "pattern1",
Type: "general",
},
{
FilePattern: "pattern2",
},
{
ParamRef: "testParam",
},
},
},
expected: `type testStepReports struct {
}
func (p *testStepReports) persist(stepConfig sonarExecuteScanOptions) {
content := []gcs.ReportOutputParam{
{FilePattern: "pattern1", ParamRef: "", StepResultType: "general"},
{FilePattern: "pattern2", ParamRef: "", StepResultType: ""},
{FilePattern: "", ParamRef: "testParam", StepResultType: ""},
}
envVars := []gcs.EnvVar{
{Name: "GOOGLE_APPLICATION_CREDENTIALS", Value: GeneralConfig.GCPJsonKeyFilePath, Modified: false},
}
gcsClient, err := gcs.NewClient(gcs.WithEnvVars(envVars))
if err != nil {
log.Entry().Errorf("creation of GCS client failed: %v", err)
}
defer gcsClient.Close()
structVal := reflect.ValueOf(&stepConfig).Elem()
inputParameters := map[string]string{}
for i := 0; i < structVal.NumField(); i++ {
field := structVal.Type().Field(i)
if field.Type.String() == "string" {
paramName := strings.Split(field.Tag.Get("json"), ",")
paramValue, _ := structVal.Field(i).Interface().(string)
inputParameters[paramName[0]] = paramValue
}
}
if err := gcs.PersistReportsToGCS(gcsClient, content, inputParameters, GeneralConfig.GCSFolderPath, GeneralConfig.GCSBucketId, GeneralConfig.GCSSubFolder, doublestar.Glob, os.Stat); err != nil {
log.Entry().Errorf("failed to persist reports: %v", err)
}
}`,
},
}
for run, test := range tt {
t.Run(fmt.Sprintf("Run %v", run), func(t *testing.T) {
got, err := test.in.StructString()
assert.NoError(t, err)
assert.Equal(t, test.expected, got)
})
}
}

View File

@ -11,6 +11,10 @@ import (
piperOsCmd "github.com/SAP/jenkins-library/cmd"
"github.com/SAP/jenkins-library/pkg/config"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/bmatcuk/doublestar"
"github.com/SAP/jenkins-library/pkg/gcs"
"reflect"
"strings"
"github.com/SAP/jenkins-library/pkg/piperenv"
"github.com/SAP/jenkins-library/pkg/telemetry"
"github.com/SAP/jenkins-library/pkg/splunk"
@ -26,6 +30,37 @@ type testStepOptions struct {
}
type testStepReports struct {
}
func (p *testStepReports) persist(stepConfig sonarExecuteScanOptions) {
content := []gcs.ReportOutputParam{
{FilePattern: "test-report_*.json", ParamRef: "", StepResultType: ""},
{FilePattern: "report1", ParamRef: "", StepResultType: "general"},
}
envVars := []gcs.EnvVar{
{Name: "GOOGLE_APPLICATION_CREDENTIALS", Value: GeneralConfig.GCPJsonKeyFilePath, Modified: false},
}
gcsClient, err := gcs.NewClient(gcs.WithEnvVars(envVars))
if err != nil {
log.Entry().Errorf("creation of GCS client failed: %v", err)
}
defer gcsClient.Close()
structVal := reflect.ValueOf(&stepConfig).Elem()
inputParameters := map[string]string{}
for i := 0; i < structVal.NumField(); i++ {
field := structVal.Type().Field(i)
if field.Type.String() == "string" {
paramName := strings.Split(field.Tag.Get("json"), ",")
paramValue, _ := structVal.Field(i).Interface().(string)
inputParameters[paramName[0]] = paramValue
}
}
if err := gcs.PersistReportsToGCS(gcsClient, content, inputParameters, GeneralConfig.GCSFolderPath, GeneralConfig.GCSBucketId, GeneralConfig.GCSSubFolder, doublestar.Glob, os.Stat); err != nil {
log.Entry().Errorf("failed to persist reports: %v", err)
}
}
type testStepCommonPipelineEnvironment struct {
artifactVersion string
git struct {
@ -107,6 +142,7 @@ func TestStepCommand() *cobra.Command {
metadata := testStepMetadata()
var stepConfig testStepOptions
var startTime time.Time
var reports testStepReports
var commonPipelineEnvironment testStepCommonPipelineEnvironment
var influxTest testStepInfluxTest
var logCollector *log.CollectorHook
@ -160,9 +196,10 @@ func TestStepCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
reports.persist(stepConfig)
commonPipelineEnvironment.persist(piperOsCmd.GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
influxTest.persist(piperOsCmd.GeneralConfig.EnvRootPath, "influxTest")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = piperOsCmd.GitCommit
@ -256,6 +293,14 @@ func testStepMetadata() config.StepData {
},
Outputs: config.StepOutputs{
Resources: []config.StepResources{
{
Name: "reports",
Type: "reports",
Parameters: []map[string]interface{}{
{"filePattern": "test-report_*.json","subFolder": "sonarExecuteScan",},
{"filePattern": "report1","type": "general",},
},
},
{
Name: "commonPipelineEnvironment",
Type: "piperEnvironment",
@ -264,7 +309,7 @@ func testStepMetadata() config.StepData {
{"name": "git/commitId",},
{"name": "git/headCommitId",},
{"name": "git/branch",},
{"name": "custom/customList",},
{"name": "custom/customList","type": "[]string",},
},
},
{

View File

@ -10,6 +10,10 @@ import (
"github.com/SAP/jenkins-library/pkg/config"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/bmatcuk/doublestar"
"github.com/SAP/jenkins-library/pkg/gcs"
"reflect"
"strings"
"github.com/SAP/jenkins-library/pkg/piperenv"
"github.com/SAP/jenkins-library/pkg/telemetry"
"github.com/SAP/jenkins-library/pkg/splunk"
@ -25,6 +29,37 @@ type testStepOptions struct {
}
type testStepReports struct {
}
func (p *testStepReports) persist(stepConfig sonarExecuteScanOptions) {
content := []gcs.ReportOutputParam{
{FilePattern: "test-report_*.json", ParamRef: "", StepResultType: ""},
{FilePattern: "report1", ParamRef: "", StepResultType: "general"},
}
envVars := []gcs.EnvVar{
{Name: "GOOGLE_APPLICATION_CREDENTIALS", Value: GeneralConfig.GCPJsonKeyFilePath, Modified: false},
}
gcsClient, err := gcs.NewClient(gcs.WithEnvVars(envVars))
if err != nil {
log.Entry().Errorf("creation of GCS client failed: %v", err)
}
defer gcsClient.Close()
structVal := reflect.ValueOf(&stepConfig).Elem()
inputParameters := map[string]string{}
for i := 0; i < structVal.NumField(); i++ {
field := structVal.Type().Field(i)
if field.Type.String() == "string" {
paramName := strings.Split(field.Tag.Get("json"), ",")
paramValue, _ := structVal.Field(i).Interface().(string)
inputParameters[paramName[0]] = paramValue
}
}
if err := gcs.PersistReportsToGCS(gcsClient, content, inputParameters, GeneralConfig.GCSFolderPath, GeneralConfig.GCSBucketId, GeneralConfig.GCSSubFolder, doublestar.Glob, os.Stat); err != nil {
log.Entry().Errorf("failed to persist reports: %v", err)
}
}
type testStepCommonPipelineEnvironment struct {
artifactVersion string
git struct {
@ -106,6 +141,7 @@ func TestStepCommand() *cobra.Command {
metadata := testStepMetadata()
var stepConfig testStepOptions
var startTime time.Time
var reports testStepReports
var commonPipelineEnvironment testStepCommonPipelineEnvironment
var influxTest testStepInfluxTest
var logCollector *log.CollectorHook
@ -159,9 +195,10 @@ func TestStepCommand() *cobra.Command {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
reports.persist(stepConfig)
commonPipelineEnvironment.persist(GeneralConfig.EnvRootPath, "commonPipelineEnvironment")
influxTest.persist(GeneralConfig.EnvRootPath, "influxTest")
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit
@ -255,6 +292,14 @@ func testStepMetadata() config.StepData {
},
Outputs: config.StepOutputs{
Resources: []config.StepResources{
{
Name: "reports",
Type: "reports",
Parameters: []map[string]interface{}{
{"filePattern": "test-report_*.json","subFolder": "sonarExecuteScan",},
{"filePattern": "report1","type": "general",},
},
},
{
Name: "commonPipelineEnvironment",
Type: "piperEnvironment",
@ -263,7 +308,7 @@ func testStepMetadata() config.StepData {
{"name": "git/commitId",},
{"name": "git/headCommitId",},
{"name": "git/branch",},
{"name": "custom/customList",},
{"name": "custom/customList","type": "[]string",},
},
},
{

View File

@ -269,6 +269,13 @@ spec:
outputs:
resources:
- name: reports
type: reports
params:
- filePattern: "sonarscan.json"
type: sonarqube
- filePattern: "sonarExecuteScan_*.json"
type: sonarqube
- name: influx
type: influx
params: