1
0
mirror of https://github.com/woodpecker-ci/woodpecker.git synced 2024-11-24 08:02:18 +02:00

format do 'simplify' and check via CI (#509)

* `make format` simplify code now

* code format

* check simplified via linter
This commit is contained in:
6543 2021-11-14 22:33:45 +01:00 committed by GitHub
parent 2524c6900c
commit 86bb8f195c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 50 additions and 61 deletions

View File

@ -1,6 +1,6 @@
linters-settings:
gofmt:
simplify: false
simplify: true
linters:
disable-all: true

View File

@ -26,7 +26,7 @@ vendor:
go mod vendor
format:
@gofmt -w ${GOFILES_NOVENDOR}
@gofmt -s -w ${GOFILES_NOVENDOR}
.PHONY: clean
clean:
@ -36,7 +36,7 @@ clean:
.PHONY: lint
lint:
@echo "Running golangci-lint"
go run vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go run
go run vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go run --timeout 5m
@echo "Running zerolog linter"
go run vendor/github.com/rs/zerolog/cmd/lint/lint.go github.com/woodpecker-ci/woodpecker/cmd/agent
go run vendor/github.com/rs/zerolog/cmd/lint/lint.go github.com/woodpecker-ci/woodpecker/cmd/cli

View File

@ -93,27 +93,22 @@ func (r *Runner) Run(ctx context.Context) error {
Str("id", work.ID).
Logger()
logger.Debug().
Msg("received execution")
logger.Debug().Msg("received execution")
ctx, cancel := context.WithTimeout(ctxmeta, timeout)
defer cancel()
cancelled := abool.New()
go func() {
logger.Debug().
Msg("listen for cancel signal")
logger.Debug().Msg("listen for cancel signal")
if werr := r.client.Wait(ctx, work.ID); werr != nil {
cancelled.SetTo(true)
logger.Warn().
Err(werr).
Msg("cancel signal received")
logger.Warn().Err(werr).Msg("cancel signal received")
cancel()
} else {
logger.Debug().
Msg("stop listening for cancel signal")
logger.Debug().Msg("stop listening for cancel signal")
}
}()
@ -121,13 +116,11 @@ func (r *Runner) Run(ctx context.Context) error {
for {
select {
case <-ctx.Done():
logger.Debug().
Msg("pipeline done")
logger.Debug().Msg("pipeline done")
return
case <-time.After(time.Minute):
logger.Debug().
Msg("pipeline lease renewed")
logger.Debug().Msg("pipeline lease renewed")
r.client.Extend(ctx, work.ID)
}
@ -139,9 +132,7 @@ func (r *Runner) Run(ctx context.Context) error {
err = r.client.Init(ctxmeta, work.ID, state)
if err != nil {
logger.Error().
Err(err).
Msg("pipeline initialization failed")
logger.Error().Err(err).Msg("pipeline initialization failed")
}
var uploads sync.WaitGroup
@ -173,30 +164,29 @@ func (r *Runner) Run(ctx context.Context) error {
loglogger.Debug().Msg("log stream copied")
file := &rpc.File{}
file.Mime = "application/json+logs"
file.Proc = proc.Alias
file.Name = "logs.json"
file.Data, _ = json.Marshal(logstream.Lines())
file.Size = len(file.Data)
file.Time = time.Now().Unix()
loglogger.Debug().
Msg("log stream uploading")
if serr := r.client.Upload(ctxmeta, work.ID, file); serr != nil {
loglogger.Error().
Err(serr).
Msg("log stream upload error")
data, err := json.Marshal(logstream.Lines())
if err != nil {
loglogger.Err(err).Msg("could not marshal logstream")
}
loglogger.Debug().
Msg("log stream upload complete")
file := &rpc.File{
Mime: "application/json+logs",
Proc: proc.Alias,
Name: "logs.json",
Data: data,
Size: len(data),
Time: time.Now().Unix(),
}
loglogger.Debug().Msg("log stream uploading")
if serr := r.client.Upload(ctxmeta, work.ID, file); serr != nil {
loglogger.Error().Err(serr).Msg("log stream upload error")
} else {
loglogger.Debug().Msg("log stream upload complete")
}
defer func() {
loglogger.Debug().
Msg("log stream closed")
loglogger.Debug().Msg("log stream closed")
uploads.Done()
}()
@ -206,15 +196,20 @@ func (r *Runner) Run(ctx context.Context) error {
}
// TODO should be configurable
limitedPart = io.LimitReader(part, maxFileUpload)
file = &rpc.File{}
file.Mime = part.Header().Get("Content-Type")
file.Proc = proc.Alias
file.Name = part.FileName()
file.Data, _ = ioutil.ReadAll(limitedPart)
file.Size = len(file.Data)
file.Time = time.Now().Unix()
file.Meta = map[string]string{}
data, err = ioutil.ReadAll(limitedPart)
if err != nil {
loglogger.Err(err).Msg("could not read limited part")
}
file = &rpc.File{
Mime: part.Header().Get("Content-Type"),
Proc: proc.Alias,
Name: part.FileName(),
Data: data,
Size: len(data),
Time: time.Now().Unix(),
Meta: make(map[string]string),
}
for key, value := range part.Header() {
file.Meta[key] = value[0]
}
@ -255,8 +250,7 @@ func (r *Runner) Run(ctx context.Context) error {
Finished: time.Now().Unix(),
}
defer func() {
proclogger.Debug().
Msg("update step status")
proclogger.Debug().Msg("update step status")
if uerr := r.client.Update(ctxmeta, work.ID, procState); uerr != nil {
proclogger.Debug().
@ -264,8 +258,7 @@ func (r *Runner) Run(ctx context.Context) error {
Msg("update step status error")
}
proclogger.Debug().
Msg("update step status complete")
proclogger.Debug().Msg("update step status complete")
}()
if state.Process.Exited {
return nil
@ -325,13 +318,11 @@ func (r *Runner) Run(ctx context.Context) error {
Int("exit_code", state.ExitCode).
Msg("pipeline complete")
logger.Debug().
Msg("uploading logs")
logger.Debug().Msg("uploading logs")
uploads.Wait()
logger.Debug().
Msg("uploading logs complete")
logger.Debug().Msg("uploading logs complete")
logger.Debug().
Str("error", state.Error).
@ -340,11 +331,9 @@ func (r *Runner) Run(ctx context.Context) error {
err = r.client.Done(ctxmeta, work.ID, state)
if err != nil {
logger.Error().Err(err).
Msg("updating pipeline status failed")
logger.Error().Err(err).Msg("updating pipeline status failed")
} else {
logger.Debug().
Msg("updating pipeline status complete")
logger.Debug().Msg("updating pipeline status complete")
}
return nil

View File

@ -186,12 +186,12 @@ func TestPipelineName(t *testing.T) {
Regs: []*model.Registry{},
Link: "",
Yamls: []*remote.FileMeta{
&remote.FileMeta{Name: ".woodpecker/lint.yml", Data: []byte(`
{Name: ".woodpecker/lint.yml", Data: []byte(`
pipeline:
build:
image: scratch
`)},
&remote.FileMeta{Name: ".woodpecker/.test.yml", Data: []byte(`
{Name: ".woodpecker/.test.yml", Data: []byte(`
pipeline:
build:
image: scratch