1
0
mirror of https://github.com/SAP/jenkins-library.git synced 2024-12-12 10:55:20 +02:00
sap-jenkins-library/cmd/awsS3Upload_generated.go
Vyacheslav Starostin 3d63ce235c
Add the possibility to push data to Splunk Prod (#4459)
* Add Splunk Prod

* Update test data

* Update naming

* Unit-test: update naming

* Fix

* Fix typo

* Fix test

* go generate
2023-07-14 19:19:57 +06:00

179 lines
6.1 KiB
Go

// Code generated by piper's step-generator. DO NOT EDIT.
package cmd
import (
"fmt"
"os"
"time"
"github.com/SAP/jenkins-library/pkg/config"
"github.com/SAP/jenkins-library/pkg/log"
"github.com/SAP/jenkins-library/pkg/splunk"
"github.com/SAP/jenkins-library/pkg/telemetry"
"github.com/SAP/jenkins-library/pkg/validation"
"github.com/spf13/cobra"
)
type awsS3UploadOptions struct {
JSONCredentialsAWS string `json:"jsonCredentialsAWS,omitempty"`
FilePath string `json:"filePath,omitempty"`
}
// AwsS3UploadCommand Uploads a specified file or directory into a given AWS S3 Bucket
func AwsS3UploadCommand() *cobra.Command {
const STEP_NAME = "awsS3Upload"
metadata := awsS3UploadMetadata()
var stepConfig awsS3UploadOptions
var startTime time.Time
var logCollector *log.CollectorHook
var splunkClient *splunk.Splunk
telemetryClient := &telemetry.Telemetry{}
var createAwsS3UploadCmd = &cobra.Command{
Use: STEP_NAME,
Short: "Uploads a specified file or directory into a given AWS S3 Bucket",
Long: `Uploads a specified file or directory as S3 Objects into a given AWS S3 Bucket.
In case a file is uploaded that is already contained in the S3 bucket, it will be overwritten with the latest version.`,
PreRunE: func(cmd *cobra.Command, _ []string) error {
startTime = time.Now()
log.SetStepName(STEP_NAME)
log.SetVerbose(GeneralConfig.Verbose)
GeneralConfig.GitHubAccessTokens = ResolveAccessTokens(GeneralConfig.GitHubTokens)
path, _ := os.Getwd()
fatalHook := &log.FatalHook{CorrelationID: GeneralConfig.CorrelationID, Path: path}
log.RegisterHook(fatalHook)
err := PrepareConfig(cmd, &metadata, STEP_NAME, &stepConfig, config.OpenPiperFile)
if err != nil {
log.SetErrorCategory(log.ErrorConfiguration)
return err
}
log.RegisterSecret(stepConfig.JSONCredentialsAWS)
if len(GeneralConfig.HookConfig.SentryConfig.Dsn) > 0 {
sentryHook := log.NewSentryHook(GeneralConfig.HookConfig.SentryConfig.Dsn, GeneralConfig.CorrelationID)
log.RegisterHook(&sentryHook)
}
if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 {
splunkClient = &splunk.Splunk{}
logCollector = &log.CollectorHook{CorrelationID: GeneralConfig.CorrelationID}
log.RegisterHook(logCollector)
}
if err = log.RegisterANSHookIfConfigured(GeneralConfig.CorrelationID); err != nil {
log.Entry().WithError(err).Warn("failed to set up SAP Alert Notification Service log hook")
}
validation, err := validation.New(validation.WithJSONNamesForStructFields(), validation.WithPredefinedErrorMessages())
if err != nil {
return err
}
if err = validation.ValidateStruct(stepConfig); err != nil {
log.SetErrorCategory(log.ErrorConfiguration)
return err
}
return nil
},
Run: func(_ *cobra.Command, _ []string) {
stepTelemetryData := telemetry.CustomData{}
stepTelemetryData.ErrorCode = "1"
handler := func() {
config.RemoveVaultSecretFiles()
stepTelemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds())
stepTelemetryData.ErrorCategory = log.GetErrorCategory().String()
stepTelemetryData.PiperCommitHash = GitCommit
telemetryClient.SetData(&stepTelemetryData)
telemetryClient.Send()
if len(GeneralConfig.HookConfig.SplunkConfig.Dsn) > 0 {
splunkClient.Initialize(GeneralConfig.CorrelationID,
GeneralConfig.HookConfig.SplunkConfig.Dsn,
GeneralConfig.HookConfig.SplunkConfig.Token,
GeneralConfig.HookConfig.SplunkConfig.Index,
GeneralConfig.HookConfig.SplunkConfig.SendLogs)
splunkClient.Send(telemetryClient.GetData(), logCollector)
}
if len(GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint) > 0 {
splunkClient.Initialize(GeneralConfig.CorrelationID,
GeneralConfig.HookConfig.SplunkConfig.ProdCriblEndpoint,
GeneralConfig.HookConfig.SplunkConfig.ProdCriblToken,
GeneralConfig.HookConfig.SplunkConfig.ProdCriblIndex,
GeneralConfig.HookConfig.SplunkConfig.SendLogs)
splunkClient.Send(telemetryClient.GetData(), logCollector)
}
}
log.DeferExitHandler(handler)
defer handler()
telemetryClient.Initialize(GeneralConfig.NoTelemetry, STEP_NAME)
awsS3Upload(stepConfig, &stepTelemetryData)
stepTelemetryData.ErrorCode = "0"
log.Entry().Info("SUCCESS")
},
}
addAwsS3UploadFlags(createAwsS3UploadCmd, &stepConfig)
return createAwsS3UploadCmd
}
func addAwsS3UploadFlags(cmd *cobra.Command, stepConfig *awsS3UploadOptions) {
cmd.Flags().StringVar(&stepConfig.JSONCredentialsAWS, "jsonCredentialsAWS", os.Getenv("PIPER_jsonCredentialsAWS"), "JSON String Credentials to access AWS S3 Bucket")
cmd.Flags().StringVar(&stepConfig.FilePath, "filePath", os.Getenv("PIPER_filePath"), "Name/Path of the file which should be uploaded")
cmd.MarkFlagRequired("jsonCredentialsAWS")
cmd.MarkFlagRequired("filePath")
}
// retrieve step metadata
func awsS3UploadMetadata() config.StepData {
var theMetaData = config.StepData{
Metadata: config.StepMetadata{
Name: "awsS3Upload",
Aliases: []config.Alias{},
Description: "Uploads a specified file or directory into a given AWS S3 Bucket",
},
Spec: config.StepSpec{
Inputs: config.StepInputs{
Secrets: []config.StepSecrets{
{Name: "awsCredentialsId", Description: "Jenkins 'Secret Text' credentials ID containing the JSON file to authenticate to the AWS S3 Bucket", Type: "jenkins"},
},
Parameters: []config.StepParameters{
{
Name: "jsonCredentialsAWS",
ResourceRef: []config.ResourceReference{
{
Name: "awsCredentialsId",
Type: "secret",
},
},
Scope: []string{"PARAMETERS"},
Type: "string",
Mandatory: true,
Aliases: []config.Alias{},
Default: os.Getenv("PIPER_jsonCredentialsAWS"),
},
{
Name: "filePath",
ResourceRef: []config.ResourceReference{
{
Name: "commonPipelineEnvironment",
Param: "mtarFilePath",
},
},
Scope: []string{"PARAMETERS", "STAGES", "STEPS"},
Type: "string",
Mandatory: true,
Aliases: []config.Alias{},
Default: os.Getenv("PIPER_filePath"),
},
},
},
},
}
return theMetaData
}