1
0
mirror of https://github.com/woodpecker-ci/woodpecker.git synced 2025-11-23 21:44:44 +02:00

Rework log streaming and related functions (#1802)

closes #1801
closes #1815 
closes #1144
closes  #983
closes  #557
closes #1827
regression of #1791

# TODO
- [x] adjust log model
- [x] add migration for logs
- [x] send log line via grpc using step-id
- [x] save log-line to db
- [x] stream log-lines to UI
- [x] use less structs for log-data
- [x] make web UI work
  - [x] display logs loaded from db
  - [x] display streaming logs
- [ ] ~~make migration work~~ -> dedicated pull (#1828)

# TESTED
- [x] new logs are stored in database
- [x] log retrieval via cli (of new logs) works
- [x] log streaming works (tested via curl & webui)
- [x] log retrieval via web (of new logs) works

---------

Co-authored-by: 6543 <6543@obermui.de>
This commit is contained in:
Anbraten
2023-06-06 09:52:08 +02:00
committed by GitHub
parent 971cb52032
commit 556607b525
49 changed files with 1066 additions and 990 deletions

View File

@@ -50,9 +50,8 @@ func (r *Runner) createLogger(_ context.Context, logger zerolog.Logger, uploads
loglogger.Debug().Msg("log stream opened") loglogger.Debug().Msg("log stream opened")
limitedPart := io.LimitReader(part, maxLogsUpload) logStream := rpc.NewLineWriter(r.client, step.UUID, secrets...)
logStream := rpc.NewLineWriter(r.client, work.ID, step.Alias, secrets...) if _, err := io.Copy(logStream, part); err != nil {
if _, err := io.Copy(logStream, limitedPart); err != nil {
log.Error().Err(err).Msg("copy limited logStream part") log.Error().Err(err).Msg("copy limited logStream part")
} }

View File

@@ -278,14 +278,14 @@ func (c *client) Update(ctx context.Context, id string, state rpc.State) (err er
} }
// Log writes the pipeline log entry. // Log writes the pipeline log entry.
func (c *client) Log(ctx context.Context, id string, line *rpc.Line) (err error) { func (c *client) Log(ctx context.Context, logEntry *rpc.LogEntry) (err error) {
req := new(proto.LogRequest) req := new(proto.LogRequest)
req.Id = id req.LogEntry = new(proto.LogEntry)
req.Line = new(proto.Line) req.LogEntry.StepUuid = logEntry.StepUUID
req.Line.Out = line.Out req.LogEntry.Data = logEntry.Data
req.Line.Pos = int32(line.Pos) req.LogEntry.Line = int32(logEntry.Line)
req.Line.Step = line.Step req.LogEntry.Time = logEntry.Time
req.Line.Time = line.Time req.LogEntry.Type = int32(logEntry.Type)
for { for {
_, err = c.client.Log(ctx, req) _, err = c.client.Log(ctx, req)
if err == nil { if err == nil {

View File

@@ -31,14 +31,6 @@ import (
"github.com/woodpecker-ci/woodpecker/shared/utils" "github.com/woodpecker-ci/woodpecker/shared/utils"
) )
// TODO: Implement log streaming.
// Until now we need to limit the size of the logs and files that we upload.
// The maximum grpc payload size is 4194304. So we need to set these limits below the maximum.
const (
maxLogsUpload = 2000000 // this is per step
maxFileUpload = 1000000
)
type Runner struct { type Runner struct {
client rpc.Peer client rpc.Peer
filter rpc.Filter filter rpc.Filter

View File

@@ -248,7 +248,7 @@ var defaultLogger = pipeline.LogFunc(func(step *backendTypes.Step, rc multipart.
return err return err
} }
logStream := NewLineWriter(step.Alias) logStream := NewLineWriter(step.Alias, step.UUID)
_, err = io.Copy(logStream, part) _, err = io.Copy(logStream, part)
return err return err
}) })

View File

@@ -19,60 +19,44 @@ import (
"os" "os"
"strings" "strings"
"time" "time"
)
// Identifies the type of line in the logs. "github.com/woodpecker-ci/woodpecker/pipeline/rpc"
const (
LineStdout int = iota
LineStderr
LineExitCode
LineMetadata
LineProgress
) )
// Line is a line of console output.
type Line struct {
Step string `json:"step,omitempty"`
Time int64 `json:"time,omitempty"`
Type int `json:"type,omitempty"`
Pos int `json:"pos,omitempty"`
Out string `json:"out,omitempty"`
}
// LineWriter sends logs to the client. // LineWriter sends logs to the client.
type LineWriter struct { type LineWriter struct {
name string stepName string
num int stepUUID string
now time.Time num int
rep *strings.Replacer now time.Time
lines []*Line rep *strings.Replacer
lines []*rpc.LogEntry
} }
// NewLineWriter returns a new line reader. // NewLineWriter returns a new line reader.
func NewLineWriter(name string) *LineWriter { func NewLineWriter(stepName, stepUUID string) *LineWriter {
w := new(LineWriter) return &LineWriter{
w.name = name stepName: stepName,
w.num = 0 stepUUID: stepUUID,
w.now = time.Now().UTC() now: time.Now().UTC(),
}
return w
} }
func (w *LineWriter) Write(p []byte) (n int, err error) { func (w *LineWriter) Write(p []byte) (n int, err error) {
out := string(p) data := string(p)
if w.rep != nil { if w.rep != nil {
out = w.rep.Replace(out) data = w.rep.Replace(data)
} }
line := &Line{ line := &rpc.LogEntry{
Out: out, Data: data,
Step: w.name, StepUUID: w.stepUUID,
Pos: w.num, Line: w.num,
Time: int64(time.Since(w.now).Seconds()), Time: int64(time.Since(w.now).Seconds()),
Type: LineStdout, Type: rpc.LogEntryStdout,
} }
fmt.Fprintf(os.Stderr, "[%s:L%d:%ds] %s", w.name, w.num, int64(time.Since(w.now).Seconds()), out) fmt.Fprintf(os.Stderr, "[%s:L%d:%ds] %s", w.stepName, w.num, int64(time.Since(w.now).Seconds()), data)
w.num++ w.num++

View File

@@ -27,7 +27,7 @@ import (
var pipelineLogsCmd = &cli.Command{ var pipelineLogsCmd = &cli.Command{
Name: "logs", Name: "logs",
Usage: "show pipeline logs", Usage: "show pipeline logs",
ArgsUsage: "<repo/name> [pipeline] [step]", ArgsUsage: "<repo/name> [pipeline] [stepID]",
Action: pipelineLogs, Action: pipelineLogs,
Flags: common.GlobalFlags, Flags: common.GlobalFlags,
} }
@@ -54,13 +54,13 @@ func pipelineLogs(c *cli.Context) error {
return err return err
} }
logs, err := client.PipelineLogs(owner, name, number, step) logs, err := client.StepLogEntries(owner, name, number, step)
if err != nil { if err != nil {
return err return err
} }
for _, log := range logs { for _, log := range logs {
fmt.Print(log.Output) fmt.Print(string(log.Data))
} }
return nil return nil

View File

@@ -751,6 +751,52 @@ const docTemplate = `{
} }
} }
}, },
"/logs/{owner}/{name}/{pipeline}/{stepID}": {
"get": {
"produces": [
"text/plain"
],
"tags": [
"Pipeline logs"
],
"summary": "Log stream",
"parameters": [
{
"type": "string",
"description": "the repository owner's name",
"name": "owner",
"in": "path",
"required": true
},
{
"type": "string",
"description": "the repository name",
"name": "name",
"in": "path",
"required": true
},
{
"type": "integer",
"description": "the number of the pipeline",
"name": "pipeline",
"in": "path",
"required": true
},
{
"type": "integer",
"description": "the step id",
"name": "stepID",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "OK"
}
}
}
},
"/orgs/{owner}/permissions": { "/orgs/{owner}/permissions": {
"get": { "get": {
"produces": [ "produces": [
@@ -1795,10 +1841,10 @@ const docTemplate = `{
} }
} }
}, },
"/repos/{owner}/{name}/logs/{number}/{pid}": { "/repos/{owner}/{name}/logs/{number}/{stepID}": {
"get": { "get": {
"produces": [ "produces": [
"text/plain" "application/json"
], ],
"tags": [ "tags": [
"Pipeline logs" "Pipeline logs"
@@ -1836,76 +1882,21 @@ const docTemplate = `{
}, },
{ {
"type": "integer", "type": "integer",
"description": "the pipeline id", "description": "the step id",
"name": "pid", "name": "stepID",
"in": "path", "in": "path",
"required": true "required": true
} }
], ],
"responses": { "responses": {
"200": { "200": {
"description": "OK" "description": "OK",
} "schema": {
} "type": "array",
} "items": {
}, "$ref": "#/definitions/LogEntry"
"/repos/{owner}/{name}/logs/{number}/{pid}/{step}": { }
"get": { }
"produces": [
"text/plain"
],
"tags": [
"Pipeline logs"
],
"summary": "Log information per step",
"parameters": [
{
"type": "string",
"default": "Bearer \u003cpersonal access token\u003e",
"description": "Insert your personal access token",
"name": "Authorization",
"in": "header",
"required": true
},
{
"type": "string",
"description": "the repository owner's name",
"name": "owner",
"in": "path",
"required": true
},
{
"type": "string",
"description": "the repository name",
"name": "name",
"in": "path",
"required": true
},
{
"type": "integer",
"description": "the number of the pipeline",
"name": "number",
"in": "path",
"required": true
},
{
"type": "integer",
"description": "the pipeline id",
"name": "pid",
"in": "path",
"required": true
},
{
"type": "integer",
"description": "the step name",
"name": "step",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "OK"
} }
} }
} }
@@ -3800,6 +3791,32 @@ const docTemplate = `{
} }
} }
}, },
"LogEntry": {
"type": "object",
"properties": {
"data": {
"type": "array",
"items": {
"type": "integer"
}
},
"id": {
"type": "integer"
},
"line": {
"type": "integer"
},
"step_id": {
"type": "integer"
},
"time": {
"type": "integer"
},
"type": {
"$ref": "#/definitions/model.LogEntryType"
}
}
},
"OrgPerm": { "OrgPerm": {
"type": "object", "type": "object",
"properties": { "properties": {
@@ -4233,6 +4250,9 @@ const docTemplate = `{
}, },
"state": { "state": {
"$ref": "#/definitions/StatusValue" "$ref": "#/definitions/StatusValue"
},
"uuid": {
"type": "string"
} }
} }
}, },
@@ -4320,6 +4340,23 @@ const docTemplate = `{
"EventCron", "EventCron",
"EventManual" "EventManual"
] ]
},
"model.LogEntryType": {
"type": "integer",
"enum": [
0,
1,
2,
3,
4
],
"x-enum-varnames": [
"LogEntryStdout",
"LogEntryStderr",
"LogEntryExitCode",
"LogEntryMetadata",
"LogEntryProgress"
]
} }
} }
}` }`

2
go.mod
View File

@@ -23,6 +23,7 @@ require (
github.com/golang-jwt/jwt/v4 v4.5.0 github.com/golang-jwt/jwt/v4 v4.5.0
github.com/google/go-github/v39 v39.2.0 github.com/google/go-github/v39 v39.2.0
github.com/google/tink/go v1.7.0 github.com/google/tink/go v1.7.0
github.com/google/uuid v1.3.0
github.com/gorilla/securecookie v1.1.1 github.com/gorilla/securecookie v1.1.1
github.com/joho/godotenv v1.5.1 github.com/joho/godotenv v1.5.1
github.com/lafriks/ttlcache/v3 v3.2.0 github.com/lafriks/ttlcache/v3 v3.2.0
@@ -94,7 +95,6 @@ require (
github.com/google/go-cmp v0.5.9 // indirect github.com/google/go-cmp v0.5.9 // indirect
github.com/google/go-querystring v1.1.0 // indirect github.com/google/go-querystring v1.1.0 // indirect
github.com/google/gofuzz v1.1.0 // indirect github.com/google/gofuzz v1.1.0 // indirect
github.com/google/uuid v1.3.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-hclog v1.2.0 // indirect github.com/hashicorp/go-hclog v1.2.0 // indirect
github.com/hashicorp/go-retryablehttp v0.7.2 // indirect github.com/hashicorp/go-retryablehttp v0.7.2 // indirect

View File

@@ -30,6 +30,7 @@ import (
func toConfig(step *types.Step) *container.Config { func toConfig(step *types.Step) *container.Config {
config := &container.Config{ config := &container.Config{
Image: step.Image, Image: step.Image,
Labels: map[string]string{"wp_uuid": step.UUID},
WorkingDir: step.WorkingDir, WorkingDir: step.WorkingDir,
AttachStdout: true, AttachStdout: true,
AttachStderr: true, AttachStderr: true,

View File

@@ -3,6 +3,7 @@ package types
// Step defines a container process. // Step defines a container process.
type Step struct { type Step struct {
Name string `json:"name"` Name string `json:"name"`
UUID string `json:"uuid"`
Alias string `json:"alias,omitempty"` Alias string `json:"alias,omitempty"`
Image string `json:"image,omitempty"` Image string `json:"image,omitempty"`
Pull bool `json:"pull,omitempty"` Pull bool `json:"pull,omitempty"`

View File

@@ -6,6 +6,7 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/google/uuid"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
backend_types "github.com/woodpecker-ci/woodpecker/pipeline/backend/types" backend_types "github.com/woodpecker-ci/woodpecker/pipeline/backend/types"
@@ -16,6 +17,8 @@ import (
func (c *Compiler) createProcess(name string, container *yaml_types.Container, section string) *backend_types.Step { func (c *Compiler) createProcess(name string, container *yaml_types.Container, section string) *backend_types.Step {
var ( var (
uuid = uuid.New()
detached bool detached bool
workingdir string workingdir string
@@ -153,6 +156,7 @@ func (c *Compiler) createProcess(name string, container *yaml_types.Container, s
return &backend_types.Step{ return &backend_types.Step{
Name: name, Name: name,
UUID: uuid.String(),
Alias: container.Name, Alias: container.Name,
Image: container.Image, Image: container.Image,
Pull: container.Pull, Pull: container.Pull,

View File

@@ -1,121 +0,0 @@
// Copyright 2022 Woodpecker Authors
// Copyright 2011 Drone.IO Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package rpc
import (
"context"
"fmt"
"strings"
"time"
"github.com/rs/zerolog/log"
"github.com/woodpecker-ci/woodpecker/pipeline/shared"
)
// Identifies the type of line in the logs.
const (
LineStdout int = iota
LineStderr
LineExitCode
LineMetadata
LineProgress
)
// Line is a line of console output.
type Line struct {
Step string `json:"step,omitempty"`
Time int64 `json:"time,omitempty"`
Type int `json:"type,omitempty"`
Pos int `json:"pos,omitempty"`
Out string `json:"out,omitempty"`
}
func (l *Line) String() string {
switch l.Type {
case LineExitCode:
return fmt.Sprintf("[%s] exit code %s", l.Step, l.Out)
default:
return fmt.Sprintf("[%s:L%v:%vs] %s", l.Step, l.Pos, l.Time, l.Out)
}
}
// LineWriter sends logs to the client.
type LineWriter struct {
peer Peer
id string
name string
num int
now time.Time
rep *strings.Replacer
lines []*Line
}
// NewLineWriter returns a new line reader.
func NewLineWriter(peer Peer, id, name string, secret ...string) *LineWriter {
return &LineWriter{
peer: peer,
id: id,
name: name,
now: time.Now().UTC(),
rep: shared.NewSecretsReplacer(secret),
lines: nil,
}
}
func (w *LineWriter) Write(p []byte) (n int, err error) {
out := string(p)
if w.rep != nil {
out = w.rep.Replace(out)
}
log.Trace().Str("name", w.name).Str("ID", w.id).Msgf("grpc write line: %s", out)
line := &Line{
Out: out,
Step: w.name,
Pos: w.num,
Time: int64(time.Since(w.now).Seconds()),
Type: LineStdout,
}
if err := w.peer.Log(context.Background(), w.id, line); err != nil {
log.Error().Err(err).Msgf("fail to write pipeline log to peer '%s'", w.id)
}
w.num++
// for _, part := range bytes.Split(p, []byte{'\n'}) {
// line := &Line{
// Out: string(part),
// Step: w.name,
// Pos: w.num,
// Time: int64(time.Since(w.now).Seconds()),
// Type: LineStdout,
// }
// w.peer.Log(context.Background(), w.id, line)
// w.num++
// }
w.lines = append(w.lines, line)
return len(p), nil
}
// Lines returns the line history
func (w *LineWriter) Lines() []*Line {
return w.lines
}
// Clear clears the line history
func (w *LineWriter) Clear() {
w.lines = w.lines[:0]
}

108
pipeline/rpc/log_entry.go Normal file
View File

@@ -0,0 +1,108 @@
// Copyright 2022 Woodpecker Authors
// Copyright 2011 Drone.IO Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package rpc
import (
"context"
"fmt"
"strings"
"time"
"github.com/rs/zerolog/log"
"github.com/woodpecker-ci/woodpecker/pipeline/shared"
)
// Identifies the type of line in the logs.
const (
LogEntryStdout int = iota
LogEntryStderr
LogEntryExitCode
LogEntryMetadata
LogEntryProgress
)
// Line is a line of console output.
type LogEntry struct {
StepUUID string `json:"step_uuid,omitempty"`
Time int64 `json:"time,omitempty"`
Type int `json:"type,omitempty"`
Line int `json:"line,omitempty"`
Data string `json:"data,omitempty"`
}
func (l *LogEntry) String() string {
switch l.Type {
case LogEntryExitCode:
return fmt.Sprintf("[%s] exit code %s", l.StepUUID, l.Data)
default:
return fmt.Sprintf("[%s:L%v:%vs] %s", l.StepUUID, l.Line, l.Time, l.Data)
}
}
// LineWriter sends logs to the client.
type LineWriter struct {
peer Peer
stepUUID string
num int
now time.Time
rep *strings.Replacer
lines []*LogEntry
}
// NewLineWriter returns a new line reader.
func NewLineWriter(peer Peer, stepUUID string, secret ...string) *LineWriter {
return &LineWriter{
peer: peer,
stepUUID: stepUUID,
now: time.Now().UTC(),
rep: shared.NewSecretsReplacer(secret),
lines: nil,
}
}
func (w *LineWriter) Write(p []byte) (n int, err error) {
data := string(p)
if w.rep != nil {
data = w.rep.Replace(data)
}
log.Trace().Str("step-uuid", w.stepUUID).Msgf("grpc write line: %s", data)
line := &LogEntry{
Data: data,
StepUUID: w.stepUUID,
Time: int64(time.Since(w.now).Seconds()),
Type: LogEntryStdout,
Line: w.num,
}
if err := w.peer.Log(context.Background(), line); err != nil {
log.Error().Err(err).Str("step-uuid", w.stepUUID).Msg("fail to write pipeline log to peer")
}
w.num++
w.lines = append(w.lines, line)
return len(p), nil
}
// Lines returns the line history
func (w *LineWriter) Lines() []*LogEntry {
return w.lines
}
// Clear clears the line history
func (w *LineWriter) Clear() {
w.lines = w.lines[:0]
}

View File

@@ -18,14 +18,14 @@ import (
"testing" "testing"
) )
func TestLine(t *testing.T) { func TestLogEntry(t *testing.T) {
line := Line{ line := LogEntry{
Step: "redis", StepUUID: "e9ea76a5-44a1-4059-9c4a-6956c478b26d",
Time: 60, Time: 60,
Pos: 1, Line: 1,
Out: "starting redis server", Data: "starting redis server",
} }
got, want := line.String(), "[redis:L1:60s] starting redis server" got, want := line.String(), "[e9ea76a5-44a1-4059-9c4a-6956c478b26d:L1:60s] starting redis server"
if got != want { if got != want {
t.Errorf("Wanted line string %q, got %q", want, got) t.Errorf("Wanted line string %q, got %q", want, got)
} }

View File

@@ -74,7 +74,7 @@ type Peer interface {
Update(c context.Context, id string, state State) error Update(c context.Context, id string, state State) error
// Log writes the pipeline log entry. // Log writes the pipeline log entry.
Log(c context.Context, id string, line *Line) error Log(c context.Context, logEntry *LogEntry) error
// RegisterAgent register our agent to the server // RegisterAgent register our agent to the server
RegisterAgent(ctx context.Context, platform, backend, version string, capacity int) (int64, error) RegisterAgent(ctx context.Context, platform, backend, version string, capacity int) (int64, error)

View File

@@ -16,4 +16,4 @@ package proto
// Version is the version of the woodpecker.proto file, // Version is the version of the woodpecker.proto file,
// !IMPORTANT! increased by 1 each time it get changed !IMPORTANT! // !IMPORTANT! increased by 1 each time it get changed !IMPORTANT!
const Version int32 = 2 const Version int32 = 3

View File

@@ -122,19 +122,20 @@ func (x *State) GetError() string {
return "" return ""
} }
type Line struct { type LogEntry struct {
state protoimpl.MessageState state protoimpl.MessageState
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields unknownFields protoimpl.UnknownFields
Step string `protobuf:"bytes,1,opt,name=step,proto3" json:"step,omitempty"` StepUuid string `protobuf:"bytes,1,opt,name=step_uuid,json=stepUuid,proto3" json:"step_uuid,omitempty"`
Time int64 `protobuf:"varint,2,opt,name=time,proto3" json:"time,omitempty"` Time int64 `protobuf:"varint,2,opt,name=time,proto3" json:"time,omitempty"`
Pos int32 `protobuf:"varint,3,opt,name=pos,proto3" json:"pos,omitempty"` Line int32 `protobuf:"varint,3,opt,name=line,proto3" json:"line,omitempty"`
Out string `protobuf:"bytes,4,opt,name=out,proto3" json:"out,omitempty"` Type int32 `protobuf:"varint,4,opt,name=type,proto3" json:"type,omitempty"` // 0 = stdout, 1 = stderr, 2 = exit-code, 3 = metadata, 4 = progress
Data string `protobuf:"bytes,5,opt,name=data,proto3" json:"data,omitempty"`
} }
func (x *Line) Reset() { func (x *LogEntry) Reset() {
*x = Line{} *x = LogEntry{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_woodpecker_proto_msgTypes[1] mi := &file_woodpecker_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
@@ -142,13 +143,13 @@ func (x *Line) Reset() {
} }
} }
func (x *Line) String() string { func (x *LogEntry) String() string {
return protoimpl.X.MessageStringOf(x) return protoimpl.X.MessageStringOf(x)
} }
func (*Line) ProtoMessage() {} func (*LogEntry) ProtoMessage() {}
func (x *Line) ProtoReflect() protoreflect.Message { func (x *LogEntry) ProtoReflect() protoreflect.Message {
mi := &file_woodpecker_proto_msgTypes[1] mi := &file_woodpecker_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
@@ -160,35 +161,42 @@ func (x *Line) ProtoReflect() protoreflect.Message {
return mi.MessageOf(x) return mi.MessageOf(x)
} }
// Deprecated: Use Line.ProtoReflect.Descriptor instead. // Deprecated: Use LogEntry.ProtoReflect.Descriptor instead.
func (*Line) Descriptor() ([]byte, []int) { func (*LogEntry) Descriptor() ([]byte, []int) {
return file_woodpecker_proto_rawDescGZIP(), []int{1} return file_woodpecker_proto_rawDescGZIP(), []int{1}
} }
func (x *Line) GetStep() string { func (x *LogEntry) GetStepUuid() string {
if x != nil { if x != nil {
return x.Step return x.StepUuid
} }
return "" return ""
} }
func (x *Line) GetTime() int64 { func (x *LogEntry) GetTime() int64 {
if x != nil { if x != nil {
return x.Time return x.Time
} }
return 0 return 0
} }
func (x *Line) GetPos() int32 { func (x *LogEntry) GetLine() int32 {
if x != nil { if x != nil {
return x.Pos return x.Line
} }
return 0 return 0
} }
func (x *Line) GetOut() string { func (x *LogEntry) GetType() int32 {
if x != nil { if x != nil {
return x.Out return x.Type
}
return 0
}
func (x *LogEntry) GetData() string {
if x != nil {
return x.Data
} }
return "" return ""
} }
@@ -614,8 +622,7 @@ type LogRequest struct {
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields unknownFields protoimpl.UnknownFields
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` LogEntry *LogEntry `protobuf:"bytes,1,opt,name=logEntry,proto3" json:"logEntry,omitempty"`
Line *Line `protobuf:"bytes,2,opt,name=line,proto3" json:"line,omitempty"`
} }
func (x *LogRequest) Reset() { func (x *LogRequest) Reset() {
@@ -650,16 +657,9 @@ func (*LogRequest) Descriptor() ([]byte, []int) {
return file_woodpecker_proto_rawDescGZIP(), []int{10} return file_woodpecker_proto_rawDescGZIP(), []int{10}
} }
func (x *LogRequest) GetId() string { func (x *LogRequest) GetLogEntry() *LogEntry {
if x != nil { if x != nil {
return x.Id return x.LogEntry
}
return ""
}
func (x *LogRequest) GetLine() *Line {
if x != nil {
return x.Line
} }
return nil return nil
} }
@@ -1101,126 +1101,128 @@ var file_woodpecker_proto_rawDesc = []byte{
0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68,
0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68,
0x65, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x65, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28,
0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x52, 0x0a, 0x04, 0x4c, 0x69, 0x6e, 0x65, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x77, 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x45,
0x12, 0x12, 0x0a, 0x04, 0x73, 0x74, 0x65, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x5f, 0x75, 0x75, 0x69,
0x73, 0x74, 0x65, 0x70, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x73, 0x74, 0x65, 0x70, 0x55, 0x75, 0x69,
0x28, 0x03, 0x52, 0x04, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x70, 0x6f, 0x73, 0x18, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52,
0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x03, 0x70, 0x6f, 0x73, 0x12, 0x10, 0x0a, 0x03, 0x6f, 0x75, 0x04, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x03, 0x20,
0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6f, 0x75, 0x74, 0x22, 0x76, 0x0a, 0x06, 0x01, 0x28, 0x05, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70,
0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x31, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a,
0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x46, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x64, 0x61, 0x74,
0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x61, 0x22, 0x76, 0x0a, 0x06, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x31, 0x0a, 0x06, 0x6c,
0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72,
0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x6f, 0x74, 0x6f, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c,
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x1a, 0x39,
0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a,
0x3a, 0x02, 0x38, 0x01, 0x22, 0x4e, 0x0a, 0x08, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12,
0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05,
0x12, 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x4e, 0x0a, 0x08, 0x50, 0x69, 0x70,
0x03, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28,
0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x61, 0x79, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74,
0x6c, 0x6f, 0x61, 0x64, 0x22, 0x34, 0x0a, 0x0b, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x71, 0x75, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12,
0x65, 0x73, 0x74, 0x12, 0x25, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c,
0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x34, 0x0a, 0x0b, 0x4e, 0x65, 0x78,
0x65, 0x72, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0x41, 0x0a, 0x0b, 0x49, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x25, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74,
0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x22, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22,
0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x41, 0x0a, 0x0b, 0x49, 0x6e, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e,
0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x1d, 0x0a, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x22,
0x0b, 0x57, 0x61, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e,
0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x41, 0x0a, 0x0b, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61,
0x44, 0x6f, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x74, 0x65, 0x22, 0x1d, 0x0a, 0x0b, 0x57, 0x61, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x22, 0x0a, 0x05, 0x73,
0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22,
0x1f, 0x0a, 0x0d, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64,
0x22, 0x43, 0x0a, 0x0d, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69,
0x64, 0x12, 0x22, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x64, 0x22, 0x41, 0x0a, 0x0b, 0x44, 0x6f, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
0x32, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64,
0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x3d, 0x0a, 0x0a, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x12, 0x22, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73,
0x02, 0x69, 0x64, 0x12, 0x1f, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x74, 0x61, 0x74, 0x65, 0x22, 0x1f, 0x0a, 0x0d, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x52, 0x65,
0x0b, 0x32, 0x0b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4c, 0x69, 0x6e, 0x65, 0x52, 0x04, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28,
0x6c, 0x69, 0x6e, 0x65, 0x22, 0x07, 0x0a, 0x05, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x2d, 0x0a, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x43, 0x0a, 0x0d, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52,
0x13, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x71, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01,
0x75, 0x65, 0x73, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x22, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18,
0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x82, 0x01, 0x0a, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x74,
0x14, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x39, 0x0a, 0x0a, 0x4c, 0x6f,
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2b, 0x0a, 0x08, 0x6c, 0x6f, 0x67, 0x45,
0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72, 0x6e, 0x74, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x70, 0x72, 0x6f,
0x6d, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x61, 0x70, 0x61, 0x63, 0x69, 0x74, 0x79, 0x18, 0x02, 0x20, 0x74, 0x6f, 0x2e, 0x4c, 0x6f, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6c, 0x6f, 0x67,
0x01, 0x28, 0x05, 0x52, 0x08, 0x63, 0x61, 0x70, 0x61, 0x63, 0x69, 0x74, 0x79, 0x12, 0x18, 0x0a, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x22, 0x07, 0x0a, 0x05, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x2d,
0x07, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x0a, 0x13, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65,
0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18,
0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x82, 0x01,
0x6e, 0x22, 0x5b, 0x0a, 0x0f, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x0a, 0x14, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52,
0x6f, 0x6e, 0x73, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x67, 0x72, 0x70, 0x63, 0x5f, 0x76, 0x65, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f,
0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0b, 0x67, 0x72, 0x70, 0x63, 0x72, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f,
0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x6d, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x61, 0x70, 0x61, 0x63, 0x69, 0x74, 0x79, 0x18, 0x02,
0x72, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x63, 0x61, 0x70, 0x61, 0x63, 0x69, 0x74, 0x79, 0x12, 0x18,
0x0d, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x3b, 0x0a, 0x07, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52,
0x0a, 0x0c, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2b, 0x07, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73,
0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69,
0x32, 0x0f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x6f, 0x6e, 0x22, 0x5b, 0x0a, 0x0f, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73,
0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x32, 0x0a, 0x15, 0x52, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x67, 0x72, 0x70, 0x63, 0x5f, 0x76, 0x65,
0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0b, 0x67, 0x72, 0x70,
0x6f, 0x6e, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x63, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x65, 0x72, 0x76,
0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x22, 0x65, 0x72, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
0x49, 0x0a, 0x0b, 0x41, 0x75, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x52, 0x0d, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22,
0x0a, 0x0b, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x3b, 0x0a, 0x0c, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
0x01, 0x28, 0x09, 0x52, 0x0a, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x2b, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69,
0x03, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x22, 0x64, 0x0a, 0x0c, 0x41, 0x75, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x32, 0x0a, 0x15,
0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73,
0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x69,
0x75, 0x73, 0x12, 0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x49, 0x64,
0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x21, 0x0a, 0x22, 0x49, 0x0a, 0x0b, 0x41, 0x75, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
0x0c, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x1f, 0x0a, 0x0b, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01,
0x01, 0x28, 0x09, 0x52, 0x0b, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e,
0x32, 0x8a, 0x04, 0x0a, 0x0a, 0x57, 0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72, 0x12, 0x12, 0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01,
0x31, 0x0a, 0x07, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x28, 0x03, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x22, 0x64, 0x0a, 0x0c, 0x41,
0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x75, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73,
0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61,
0x22, 0x00, 0x12, 0x31, 0x0a, 0x04, 0x4e, 0x65, 0x78, 0x74, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x75, 0x73, 0x12, 0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18,
0x74, 0x6f, 0x2e, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x21,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x0a, 0x0c, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x03,
0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x2a, 0x0a, 0x04, 0x49, 0x6e, 0x69, 0x74, 0x12, 0x12, 0x2e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x6e, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x6e, 0x32, 0x8a, 0x04, 0x0a, 0x0a, 0x57, 0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72,
0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x12, 0x31, 0x0a, 0x07, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0c, 0x2e, 0x70, 0x72,
0x00, 0x12, 0x2a, 0x0a, 0x04, 0x57, 0x61, 0x69, 0x74, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x2e, 0x57, 0x61, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x6f, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x2a, 0x0a, 0x65, 0x22, 0x00, 0x12, 0x31, 0x0a, 0x04, 0x4e, 0x65, 0x78, 0x74, 0x12, 0x12, 0x2e, 0x70, 0x72,
0x04, 0x44, 0x6f, 0x6e, 0x65, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x44, 0x6f, 0x6f, 0x74, 0x6f, 0x2e, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x73, 0x70,
0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x2e, 0x0a, 0x06, 0x45, 0x78, 0x74, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x2a, 0x0a, 0x04, 0x49, 0x6e, 0x69, 0x74, 0x12, 0x12,
0x65, 0x6e, 0x64, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x6e, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65,
0x6e, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79,
0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x2e, 0x0a, 0x06, 0x55, 0x70, 0x64, 0x22, 0x00, 0x12, 0x2a, 0x0a, 0x04, 0x57, 0x61, 0x69, 0x74, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f,
0x61, 0x74, 0x65, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x6f, 0x2e, 0x57, 0x61, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c,
0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x2a,
0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x28, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x0a, 0x04, 0x44, 0x6f, 0x6e, 0x65, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x44,
0x12, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x6f, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f,
0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x2e, 0x0a, 0x06, 0x45, 0x78,
0x79, 0x22, 0x00, 0x12, 0x4c, 0x0a, 0x0d, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x41, 0x74, 0x65, 0x6e, 0x64, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x78, 0x74,
0x67, 0x65, 0x6e, 0x74, 0x12, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x67, 0x65, 0x6e, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f,
0x69, 0x73, 0x74, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x2e, 0x0a, 0x06, 0x55, 0x70,
0x74, 0x1a, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x64, 0x61, 0x74, 0x65, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55, 0x70, 0x64,
0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f,
0x00, 0x12, 0x3a, 0x0a, 0x0c, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x28, 0x0a, 0x03, 0x4c, 0x6f,
0x68, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x67, 0x12, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71,
0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x32, 0x43, 0x0a, 0x74, 0x79, 0x22, 0x00, 0x12, 0x4c, 0x0a, 0x0d, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72,
0x0e, 0x57, 0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72, 0x41, 0x75, 0x74, 0x68, 0x12, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x12, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65,
0x31, 0x0a, 0x04, 0x41, 0x75, 0x74, 0x68, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65,
0x41, 0x75, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x67, 0x69, 0x73,
0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x74, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
0x22, 0x00, 0x42, 0x38, 0x5a, 0x36, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x22, 0x00, 0x12, 0x3a, 0x0a, 0x0c, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x48, 0x65, 0x61, 0x6c,
0x2f, 0x77, 0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72, 0x2d, 0x63, 0x69, 0x2f, 0x77, 0x74, 0x68, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72,
0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x74, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c,
0x6e, 0x65, 0x2f, 0x72, 0x70, 0x63, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x32, 0x43,
0x6f, 0x74, 0x6f, 0x33, 0x0a, 0x0e, 0x57, 0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72, 0x41, 0x75, 0x74, 0x68,
0x12, 0x31, 0x0a, 0x04, 0x41, 0x75, 0x74, 0x68, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x2e, 0x41, 0x75, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
0x65, 0x22, 0x00, 0x42, 0x38, 0x5a, 0x36, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f,
0x6d, 0x2f, 0x77, 0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72, 0x2d, 0x63, 0x69, 0x2f,
0x77, 0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c,
0x69, 0x6e, 0x65, 0x2f, 0x72, 0x70, 0x63, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x33,
} }
var ( var (
@@ -1238,7 +1240,7 @@ func file_woodpecker_proto_rawDescGZIP() []byte {
var file_woodpecker_proto_msgTypes = make([]protoimpl.MessageInfo, 20) var file_woodpecker_proto_msgTypes = make([]protoimpl.MessageInfo, 20)
var file_woodpecker_proto_goTypes = []interface{}{ var file_woodpecker_proto_goTypes = []interface{}{
(*State)(nil), // 0: proto.State (*State)(nil), // 0: proto.State
(*Line)(nil), // 1: proto.Line (*LogEntry)(nil), // 1: proto.LogEntry
(*Filter)(nil), // 2: proto.Filter (*Filter)(nil), // 2: proto.Filter
(*Pipeline)(nil), // 3: proto.Pipeline (*Pipeline)(nil), // 3: proto.Pipeline
(*NextRequest)(nil), // 4: proto.NextRequest (*NextRequest)(nil), // 4: proto.NextRequest
@@ -1264,7 +1266,7 @@ var file_woodpecker_proto_depIdxs = []int32{
0, // 2: proto.InitRequest.state:type_name -> proto.State 0, // 2: proto.InitRequest.state:type_name -> proto.State
0, // 3: proto.DoneRequest.state:type_name -> proto.State 0, // 3: proto.DoneRequest.state:type_name -> proto.State
0, // 4: proto.UpdateRequest.state:type_name -> proto.State 0, // 4: proto.UpdateRequest.state:type_name -> proto.State
1, // 5: proto.LogRequest.line:type_name -> proto.Line 1, // 5: proto.LogRequest.logEntry:type_name -> proto.LogEntry
3, // 6: proto.NextResponse.pipeline:type_name -> proto.Pipeline 3, // 6: proto.NextResponse.pipeline:type_name -> proto.Pipeline
11, // 7: proto.Woodpecker.Version:input_type -> proto.Empty 11, // 7: proto.Woodpecker.Version:input_type -> proto.Empty
4, // 8: proto.Woodpecker.Next:input_type -> proto.NextRequest 4, // 8: proto.Woodpecker.Next:input_type -> proto.NextRequest
@@ -1314,7 +1316,7 @@ func file_woodpecker_proto_init() {
} }
} }
file_woodpecker_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { file_woodpecker_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Line); i { switch v := v.(*LogEntry); i {
case 0: case 0:
return &v.state return &v.state
case 1: case 1:

View File

@@ -49,11 +49,12 @@ message State {
string error = 6; string error = 6;
} }
message Line { message LogEntry {
string step = 1; string step_uuid = 1;
int64 time = 2; int64 time = 2;
int32 pos = 3; int32 line = 3;
string out = 4; int32 type = 4; // 0 = stdout, 1 = stderr, 2 = exit-code, 3 = metadata, 4 = progress
string data = 5;
} }
message Filter { message Filter {
@@ -98,8 +99,7 @@ message UpdateRequest {
} }
message LogRequest { message LogRequest {
string id = 1; LogEntry logEntry = 1;
Line line = 2;
} }
message Empty { message Empty {

View File

@@ -20,6 +20,7 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/google/uuid"
"github.com/oklog/ulid/v2" "github.com/oklog/ulid/v2"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
@@ -79,6 +80,7 @@ func (b *StepBuilder) Build() ([]*Item, error) {
for _, axis := range axes { for _, axis := range axes {
workflow := &model.Step{ workflow := &model.Step{
UUID: uuid.New().String(), // TODO(#1784): Remove once workflows are a separate entity in database
PipelineID: b.Curr.ID, PipelineID: b.Curr.ID,
PID: pidSequence, PID: pidSequence,
PGID: pidSequence, PGID: pidSequence,
@@ -277,6 +279,9 @@ func (b *StepBuilder) toInternalRepresentation(parsed *yaml_types.Workflow, envi
).Compile(parsed) ).Compile(parsed)
} }
// SetPipelineStepsOnPipeline is the link between pipeline representation in "pipeline package" and server
// to be specific this func currently is used to convert the pipeline.Item list (crafted by StepBuilder.Build()) into
// a pipeline that can be stored in the database by the server
func SetPipelineStepsOnPipeline(pipeline *model.Pipeline, pipelineItems []*Item) *model.Pipeline { func SetPipelineStepsOnPipeline(pipeline *model.Pipeline, pipelineItems []*Item) *model.Pipeline {
var pidSequence int var pidSequence int
for _, item := range pipelineItems { for _, item := range pipelineItems {
@@ -295,8 +300,9 @@ func SetPipelineStepsOnPipeline(pipeline *model.Pipeline, pipelineItems []*Item)
gid = pidSequence gid = pidSequence
} }
step := &model.Step{ step := &model.Step{
PipelineID: pipeline.ID,
Name: step.Alias, Name: step.Alias,
UUID: step.UUID,
PipelineID: pipeline.ID,
PID: pidSequence, PID: pidSequence,
PPID: item.Workflow.PID, PPID: item.Workflow.PID,
PGID: gid, PGID: gid,

View File

@@ -19,17 +19,14 @@
package api package api
import ( import (
"bytes"
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"io"
"net/http" "net/http"
"strconv" "strconv"
"time" "time"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/rs/zerolog/log"
"github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server"
"github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/model"
@@ -190,75 +187,29 @@ func GetPipelineLast(c *gin.Context) {
c.JSON(http.StatusOK, pl) c.JSON(http.StatusOK, pl)
} }
// GetPipelineLogs
//
// @Summary Log information per step
// @Router /repos/{owner}/{name}/logs/{number}/{pid}/{step} [get]
// @Produce plain
// @Success 200
// @Tags Pipeline logs
// @Param Authorization header string true "Insert your personal access token" default(Bearer <personal access token>)
// @Param owner path string true "the repository owner's name"
// @Param name path string true "the repository name"
// @Param number path int true "the number of the pipeline"
// @Param pid path int true "the pipeline id"
// @Param step path int true "the step name"
func GetPipelineLogs(c *gin.Context) {
_store := store.FromContext(c)
repo := session.Repo(c)
// parse the pipeline number and step sequence number from
// the request parameter.
num, _ := strconv.ParseInt(c.Params.ByName("number"), 10, 64)
ppid, _ := strconv.Atoi(c.Params.ByName("pid"))
name := c.Params.ByName("step")
pl, err := _store.GetPipelineNumber(repo, num)
if err != nil {
handleDbGetError(c, err)
return
}
step, err := _store.StepChild(pl, ppid, name)
if err != nil {
handleDbGetError(c, err)
return
}
rc, err := _store.LogFind(step)
if err != nil {
handleDbGetError(c, err)
return
}
defer rc.Close()
c.Header("Content-Type", "application/json")
if _, err := io.Copy(c.Writer, rc); err != nil {
log.Error().Err(err).Msg("could not copy log to http response")
}
}
// GetStepLogs // GetStepLogs
// //
// @Summary Log information // @Summary Log information
// @Router /repos/{owner}/{name}/logs/{number}/{pid} [get] // @Router /repos/{owner}/{name}/logs/{number}/{stepID} [get]
// @Produce plain // @Produce json
// @Success 200 // @Success 200 {array} LogEntry
// @Tags Pipeline logs // @Tags Pipeline logs
// @Param Authorization header string true "Insert your personal access token" default(Bearer <personal access token>) // @Param Authorization header string true "Insert your personal access token" default(Bearer <personal access token>)
// @Param owner path string true "the repository owner's name" // @Param owner path string true "the repository owner's name"
// @Param name path string true "the repository name" // @Param name path string true "the repository name"
// @Param number path int true "the number of the pipeline" // @Param number path int true "the number of the pipeline"
// @Param pid path int true "the pipeline id" // @Param stepID path int true "the step id"
func GetStepLogs(c *gin.Context) { func GetStepLogs(c *gin.Context) {
_store := store.FromContext(c) _store := store.FromContext(c)
repo := session.Repo(c) repo := session.Repo(c)
// parse the pipeline number and step sequence number from // parse the pipeline number and step sequence number from
// the request parameter. // the request parameter.
num, _ := strconv.ParseInt(c.Params.ByName("number"), 10, 64) num, err := strconv.ParseInt(c.Params.ByName("number"), 10, 64)
pid, _ := strconv.Atoi(c.Params.ByName("pid")) if err != nil {
_ = c.AbortWithError(http.StatusBadRequest, err)
return
}
pl, err := _store.GetPipelineNumber(repo, num) pl, err := _store.GetPipelineNumber(repo, num)
if err != nil { if err != nil {
@@ -266,24 +217,31 @@ func GetStepLogs(c *gin.Context) {
return return
} }
step, err := _store.StepFind(pl, pid) stepID, err := strconv.ParseInt(c.Params.ByName("stepId"), 10, 64)
if err != nil {
_ = c.AbortWithError(http.StatusBadRequest, err)
return
}
step, err := _store.StepLoad(stepID)
if err != nil { if err != nil {
handleDbGetError(c, err) handleDbGetError(c, err)
return return
} }
rc, err := _store.LogFind(step) if step.PipelineID != pl.ID {
// make sure we can not read arbitrary logs by id
_ = c.AbortWithError(http.StatusBadRequest, fmt.Errorf("step with id %d is not part of repo %s", stepID, repo.FullName))
return
}
logs, err := _store.LogFind(step)
if err != nil { if err != nil {
handleDbGetError(c, err) handleDbGetError(c, err)
return return
} }
defer rc.Close() c.JSON(http.StatusOK, logs)
c.Header("Content-Type", "application/json")
if _, err := io.Copy(c.Writer, rc); err != nil {
log.Error().Err(err).Msg("could not copy log to http response")
}
} }
// GetPipelineConfig // GetPipelineConfig
@@ -532,7 +490,6 @@ func DeletePipelineLogs(c *gin.Context) {
_store := store.FromContext(c) _store := store.FromContext(c)
repo := session.Repo(c) repo := session.Repo(c)
user := session.User(c)
num, _ := strconv.ParseInt(c.Params.ByName("number"), 10, 64) num, _ := strconv.ParseInt(c.Params.ByName("number"), 10, 64)
pl, err := _store.GetPipelineNumber(repo, num) pl, err := _store.GetPipelineNumber(repo, num)
@@ -554,11 +511,8 @@ func DeletePipelineLogs(c *gin.Context) {
} }
for _, step := range steps { for _, step := range steps {
t := time.Now().UTC() if lErr := _store.LogDelete(step); err != nil {
buf := bytes.NewBufferString(fmt.Sprintf(deleteStr, step.Name, user.Login, t.Format(time.UnixDate))) err = errors.Join(err, lErr)
lerr := _store.LogSave(step, buf)
if lerr != nil {
err = lerr
} }
} }
if err != nil { if err != nil {
@@ -568,11 +522,3 @@ func DeletePipelineLogs(c *gin.Context) {
c.String(http.StatusNoContent, "") c.String(http.StatusNoContent, "")
} }
var deleteStr = `[
{
"step": %q,
"pos": 0,
"out": "logs purged by %s on %s\n"
}
]`

View File

@@ -17,6 +17,7 @@ package api
import ( import (
"context" "context"
"encoding/json"
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
@@ -27,7 +28,6 @@ import (
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server"
"github.com/woodpecker-ci/woodpecker/server/logging"
"github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/model"
"github.com/woodpecker-ci/woodpecker/server/pubsub" "github.com/woodpecker-ci/woodpecker/server/pubsub"
"github.com/woodpecker-ci/woodpecker/server/router/middleware/session" "github.com/woodpecker-ci/woodpecker/server/router/middleware/session"
@@ -121,6 +121,17 @@ func EventStreamSSE(c *gin.Context) {
} }
} }
// LogStream
//
// @Summary Log stream
// @Router /logs/{owner}/{name}/{pipeline}/{stepID} [get]
// @Produce plain
// @Success 200
// @Tags Pipeline logs
// @Param owner path string true "the repository owner's name"
// @Param name path string true "the repository name"
// @Param pipeline path int true "the number of the pipeline"
// @Param stepID path int true "the step id"
func LogStreamSSE(c *gin.Context) { func LogStreamSSE(c *gin.Context) {
c.Header("Content-Type", "text/event-stream") c.Header("Content-Type", "text/event-stream")
c.Header("Cache-Control", "no-cache") c.Header("Cache-Control", "no-cache")
@@ -138,26 +149,43 @@ func LogStreamSSE(c *gin.Context) {
logWriteStringErr(io.WriteString(rw, ": ping\n\n")) logWriteStringErr(io.WriteString(rw, ": ping\n\n"))
flusher.Flush() flusher.Flush()
repo := session.Repo(c)
_store := store.FromContext(c) _store := store.FromContext(c)
repo := session.Repo(c)
// // parse the pipeline number and step sequence number from pipeline, err := strconv.ParseInt(c.Param("pipeline"), 10, 64)
// // the request parameter. if err != nil {
pipelinen, _ := strconv.ParseInt(c.Param("pipeline"), 10, 64) log.Debug().Err(err).Msg("pipeline number invalid")
stepn, _ := strconv.Atoi(c.Param("number")) logWriteStringErr(io.WriteString(rw, "event: error\ndata: pipeline number invalid\n\n"))
return
pipeline, err := _store.GetPipelineNumber(repo, pipelinen) }
pl, err := _store.GetPipelineNumber(repo, pipeline)
if err != nil { if err != nil {
log.Debug().Msgf("stream cannot get pipeline number: %v", err) log.Debug().Msgf("stream cannot get pipeline number: %v", err)
logWriteStringErr(io.WriteString(rw, "event: error\ndata: pipeline not found\n\n")) logWriteStringErr(io.WriteString(rw, "event: error\ndata: pipeline not found\n\n"))
return return
} }
step, err := _store.StepFind(pipeline, stepn)
stepID, err := strconv.ParseInt(c.Param("stepId"), 10, 64)
if err != nil {
log.Debug().Err(err).Msg("step id invalid")
logWriteStringErr(io.WriteString(rw, "event: error\ndata: step id invalid\n\n"))
return
}
step, err := _store.StepLoad(stepID)
if err != nil { if err != nil {
log.Debug().Msgf("stream cannot get step number: %v", err) log.Debug().Msgf("stream cannot get step number: %v", err)
logWriteStringErr(io.WriteString(rw, "event: error\ndata: process not found\n\n")) logWriteStringErr(io.WriteString(rw, "event: error\ndata: process not found\n\n"))
return return
} }
if step.PipelineID != pl.ID {
// make sure we can not read arbitrary logs by id
err = fmt.Errorf("step with id %d is not part of repo %s", stepID, repo.FullName)
log.Debug().Err(err).Msg("event error")
logWriteStringErr(io.WriteString(rw, "event: error\ndata: "+err.Error()+"\n\n"))
return
}
if step.State != model.StatusRunning { if step.State != model.StatusRunning {
log.Debug().Msg("stream not found.") log.Debug().Msg("stream not found.")
logWriteStringErr(io.WriteString(rw, "event: error\ndata: stream not found\n\n")) logWriteStringErr(io.WriteString(rw, "event: error\ndata: stream not found\n\n"))
@@ -178,18 +206,14 @@ func LogStreamSSE(c *gin.Context) {
}() }()
go func() { go func() {
// TODO remove global variable err := server.Config.Services.Logs.Tail(ctx, step.ID, func(entries ...*model.LogEntry) {
err := server.Config.Services.Logs.Tail(ctx, fmt.Sprint(step.ID), func(entries ...*logging.Entry) {
defer func() {
obj := recover() // fix #2480 // TODO: check if it's still needed
log.Trace().Msgf("pubsub subscribe recover return: %v", obj)
}()
for _, entry := range entries { for _, entry := range entries {
select { select {
case <-ctx.Done(): case <-ctx.Done():
return return
default: default:
logc <- entry.Data ee, _ := json.Marshal(entry)
logc <- ee
} }
} }
}) })

View File

@@ -238,40 +238,41 @@ func (s *RPC) Done(c context.Context, id string, state rpc.State) error {
workflow, err := s.store.StepLoad(workflowID) workflow, err := s.store.StepLoad(workflowID)
if err != nil { if err != nil {
log.Error().Msgf("error: cannot find step with id %d: %s", workflowID, err) log.Error().Err(err).Msgf("cannot find step with id %d", workflowID)
return err return err
} }
currentPipeline, err := s.store.GetPipeline(workflow.PipelineID) currentPipeline, err := s.store.GetPipeline(workflow.PipelineID)
if err != nil { if err != nil {
log.Error().Msgf("error: cannot find pipeline with id %d: %s", workflow.PipelineID, err) log.Error().Err(err).Msgf("cannot find pipeline with id %d", workflow.PipelineID)
return err return err
} }
repo, err := s.store.GetRepo(currentPipeline.RepoID) repo, err := s.store.GetRepo(currentPipeline.RepoID)
if err != nil { if err != nil {
log.Error().Msgf("error: cannot find repo with id %d: %s", currentPipeline.RepoID, err) log.Error().Err(err).Msgf("cannot find repo with id %d", currentPipeline.RepoID)
return err return err
} }
log.Trace(). logger := log.With().
Str("repo_id", fmt.Sprint(repo.ID)). Str("repo_id", fmt.Sprint(repo.ID)).
Str("build_id", fmt.Sprint(currentPipeline.ID)). Str("pipeline_id", fmt.Sprint(currentPipeline.ID)).
Str("step_id", id). Str("workflow_id", id).Logger()
Msgf("gRPC Done with state: %#v", state)
logger.Trace().Msgf("gRPC Done with state: %#v", state)
if workflow, err = pipeline.UpdateStepStatusToDone(s.store, *workflow, state); err != nil { if workflow, err = pipeline.UpdateStepStatusToDone(s.store, *workflow, state); err != nil {
log.Error().Msgf("error: done: cannot update step_id %d state: %s", workflow.ID, err) logger.Error().Err(err).Msgf("pipeline.UpdateStepStatusToDone: cannot update workflow state: %s", err)
} }
var queueErr error var queueErr error
if workflow.Failing() { if workflow.Failing() {
queueErr = s.queue.Error(c, id, fmt.Errorf("Step finished with exitcode %d, %s", state.ExitCode, state.Error)) queueErr = s.queue.Error(c, id, fmt.Errorf("Step finished with exit code %d, %s", state.ExitCode, state.Error))
} else { } else {
queueErr = s.queue.Done(c, id, workflow.State) queueErr = s.queue.Done(c, id, workflow.State)
} }
if queueErr != nil { if queueErr != nil {
log.Error().Msgf("error: done: cannot ack step_id %d: %s", workflowID, err) logger.Error().Err(queueErr).Msg("queue.Done: cannot ack workflow")
} }
steps, err := s.store.StepList(currentPipeline) steps, err := s.store.StepList(currentPipeline)
@@ -282,15 +283,20 @@ func (s *RPC) Done(c context.Context, id string, state rpc.State) error {
if !model.IsThereRunningStage(steps) { if !model.IsThereRunningStage(steps) {
if currentPipeline, err = pipeline.UpdateStatusToDone(s.store, *currentPipeline, model.PipelineStatus(steps), workflow.Stopped); err != nil { if currentPipeline, err = pipeline.UpdateStatusToDone(s.store, *currentPipeline, model.PipelineStatus(steps), workflow.Stopped); err != nil {
log.Error().Err(err).Msgf("error: done: cannot update build_id %d final state", currentPipeline.ID) logger.Error().Err(err).Msgf("pipeline.UpdateStatusToDone: cannot update workflow final state")
} }
} }
s.updateForgeStatus(c, repo, currentPipeline, workflow) s.updateForgeStatus(c, repo, currentPipeline, workflow)
if err := s.logger.Close(c, id); err != nil { // make sure writes to pubsub are non blocking (https://github.com/woodpecker-ci/woodpecker/blob/c919f32e0b6432a95e1a6d3d0ad662f591adf73f/server/logging/log.go#L9)
log.Error().Err(err).Msgf("done: cannot close build_id %d logger", workflow.ID) go func() {
} for _, step := range steps {
if err := s.logger.Close(c, step.ID); err != nil {
logger.Error().Err(err).Msgf("done: cannot close log stream for step %d", step.ID)
}
}
}()
if err := s.notify(c, repo, currentPipeline, steps); err != nil { if err := s.notify(c, repo, currentPipeline, steps); err != nil {
return err return err
@@ -308,13 +314,28 @@ func (s *RPC) Done(c context.Context, id string, state rpc.State) error {
} }
// Log implements the rpc.Log function // Log implements the rpc.Log function
func (s *RPC) Log(c context.Context, id string, line *rpc.Line) error { func (s *RPC) Log(c context.Context, _logEntry *rpc.LogEntry) error {
entry := new(logging.Entry) // convert rpc log_entry to model.log_entry
entry.Data, _ = json.Marshal(line) step, err := s.store.StepByUUID(_logEntry.StepUUID)
if err := s.logger.Write(c, id, entry); err != nil { if err != nil {
log.Error().Err(err).Msgf("rpc server could not write to logger") return fmt.Errorf("could not find step with uuid %s in store: %w", _logEntry.StepUUID, err)
} }
return nil logEntry := &model.LogEntry{
StepID: step.ID,
Time: _logEntry.Time,
Line: _logEntry.Line,
Data: []byte(_logEntry.Data),
Type: model.LogEntryType(_logEntry.Type),
}
// make sure writes to pubsub are non blocking (https://github.com/woodpecker-ci/woodpecker/blob/c919f32e0b6432a95e1a6d3d0ad662f591adf73f/server/logging/log.go#L9)
go func() {
// write line to listening web clients
if err := s.logger.Write(c, logEntry.StepID, logEntry); err != nil {
log.Error().Err(err).Msgf("rpc server could not write to logger")
}
}()
// make line persistent in database
return s.store.LogAppend(logEntry)
} }
func (s *RPC) RegisterAgent(ctx context.Context, platform, backend, version string, capacity int32) (int64, error) { func (s *RPC) RegisterAgent(ctx context.Context, platform, backend, version string, capacity int32) (int64, error) {

View File

@@ -145,14 +145,15 @@ func (s *WoodpeckerServer) Extend(c context.Context, req *proto.ExtendRequest) (
} }
func (s *WoodpeckerServer) Log(c context.Context, req *proto.LogRequest) (*proto.Empty, error) { func (s *WoodpeckerServer) Log(c context.Context, req *proto.LogRequest) (*proto.Empty, error) {
line := &rpc.Line{ logEntry := &rpc.LogEntry{
Out: req.GetLine().GetOut(), Data: req.GetLogEntry().GetData(),
Pos: int(req.GetLine().GetPos()), Line: int(req.GetLogEntry().GetLine()),
Time: req.GetLine().GetTime(), Time: req.GetLogEntry().GetTime(),
Step: req.GetLine().GetStep(), StepUUID: req.GetLogEntry().GetStepUuid(),
Type: int(req.GetLogEntry().GetType()),
} }
res := new(proto.Empty) res := new(proto.Empty)
err := s.peer.Log(c, req.GetId(), line) err := s.peer.Log(c, logEntry)
return res, err return res, err
} }

View File

@@ -2,8 +2,9 @@ package logging
import ( import (
"context" "context"
"io"
"sync" "sync"
"github.com/woodpecker-ci/woodpecker/server/model"
) )
// TODO (bradrydzewski) writing to subscribers is currently a blocking // TODO (bradrydzewski) writing to subscribers is currently a blocking
@@ -27,58 +28,58 @@ type subscriber struct {
type stream struct { type stream struct {
sync.Mutex sync.Mutex
path string stepID int64
list []*Entry list []*model.LogEntry
subs map[*subscriber]struct{} subs map[*subscriber]struct{}
done chan struct{} done chan struct{}
} }
type log struct { type log struct {
sync.Mutex sync.Mutex
streams map[string]*stream streams map[int64]*stream
} }
// New returns a new logger. // New returns a new logger.
func New() Log { func New() Log {
return &log{ return &log{
streams: map[string]*stream{}, streams: map[int64]*stream{},
} }
} }
func (l *log) Open(_ context.Context, path string) error { func (l *log) Open(_ context.Context, stepID int64) error {
l.Lock() l.Lock()
_, ok := l.streams[path] _, ok := l.streams[stepID]
if !ok { if !ok {
l.streams[path] = &stream{ l.streams[stepID] = &stream{
path: path, stepID: stepID,
subs: make(map[*subscriber]struct{}), subs: make(map[*subscriber]struct{}),
done: make(chan struct{}), done: make(chan struct{}),
} }
} }
l.Unlock() l.Unlock()
return nil return nil
} }
func (l *log) Write(_ context.Context, path string, entry *Entry) error { func (l *log) Write(_ context.Context, stepID int64, logEntry *model.LogEntry) error {
l.Lock() l.Lock()
s, ok := l.streams[path] s, ok := l.streams[stepID]
l.Unlock() l.Unlock()
if !ok { if !ok {
return ErrNotFound return ErrNotFound
} }
s.Lock() s.Lock()
s.list = append(s.list, entry) s.list = append(s.list, logEntry)
for sub := range s.subs { for sub := range s.subs {
go sub.handler(entry) go sub.handler(logEntry)
} }
s.Unlock() s.Unlock()
return nil return nil
} }
func (l *log) Tail(c context.Context, path string, handler Handler) error { func (l *log) Tail(c context.Context, stepID int64, handler Handler) error {
l.Lock() l.Lock()
s, ok := l.streams[path] s, ok := l.streams[stepID]
l.Unlock() l.Unlock()
if !ok { if !ok {
return ErrNotFound return ErrNotFound
@@ -105,9 +106,9 @@ func (l *log) Tail(c context.Context, path string, handler Handler) error {
return nil return nil
} }
func (l *log) Close(_ context.Context, path string) error { func (l *log) Close(_ context.Context, stepID int64) error {
l.Lock() l.Lock()
s, ok := l.streams[path] s, ok := l.streams[stepID]
l.Unlock() l.Unlock()
if !ok { if !ok {
return ErrNotFound return ErrNotFound
@@ -118,29 +119,7 @@ func (l *log) Close(_ context.Context, path string) error {
s.Unlock() s.Unlock()
l.Lock() l.Lock()
delete(l.streams, path) delete(l.streams, stepID)
l.Unlock() l.Unlock()
return nil return nil
} }
func (l *log) Snapshot(_ context.Context, path string, w io.Writer) error {
l.Lock()
s, ok := l.streams[path]
l.Unlock()
if !ok {
return ErrNotFound
}
s.Lock()
defer s.Unlock()
for _, entry := range s.list {
if _, err := w.Write(entry.Data); err != nil {
return err
}
if _, err := w.Write(cr); err != nil {
return err
}
}
return nil
}
var cr = []byte{'\n'}

View File

@@ -7,14 +7,15 @@ import (
"time" "time"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/woodpecker-ci/woodpecker/server/model"
) )
func TestLogging(t *testing.T) { func TestLogging(t *testing.T) {
var ( var (
wg sync.WaitGroup wg sync.WaitGroup
testPath = "test" testStepID = int64(123)
testEntry = &Entry{ testEntry = &model.LogEntry{
Data: []byte("test"), Data: []byte("test"),
} }
) )
@@ -24,27 +25,27 @@ func TestLogging(t *testing.T) {
) )
logger := New() logger := New()
assert.NoError(t, logger.Open(ctx, testPath)) assert.NoError(t, logger.Open(ctx, testStepID))
go func() { go func() {
assert.NoError(t, logger.Tail(ctx, testPath, func(entry ...*Entry) { wg.Done() })) assert.NoError(t, logger.Tail(ctx, testStepID, func(entry ...*model.LogEntry) { wg.Done() }))
}() }()
go func() { go func() {
assert.NoError(t, logger.Tail(ctx, testPath, func(entry ...*Entry) { wg.Done() })) assert.NoError(t, logger.Tail(ctx, testStepID, func(entry ...*model.LogEntry) { wg.Done() }))
}() }()
<-time.After(500 * time.Millisecond) <-time.After(500 * time.Millisecond)
wg.Add(4) wg.Add(4)
go func() { go func() {
assert.NoError(t, logger.Write(ctx, testPath, testEntry)) assert.NoError(t, logger.Write(ctx, testStepID, testEntry))
assert.NoError(t, logger.Write(ctx, testPath, testEntry)) assert.NoError(t, logger.Write(ctx, testStepID, testEntry))
}() }()
wg.Wait() wg.Wait()
wg.Add(1) wg.Add(1)
go func() { go func() {
assert.NoError(t, logger.Tail(ctx, testPath, func(entry ...*Entry) { wg.Done() })) assert.NoError(t, logger.Tail(ctx, testStepID, func(entry ...*model.LogEntry) { wg.Done() }))
}() }()
<-time.After(500 * time.Millisecond) <-time.After(500 * time.Millisecond)

View File

@@ -3,78 +3,27 @@ package logging
import ( import (
"context" "context"
"errors" "errors"
"io"
"github.com/woodpecker-ci/woodpecker/server/model"
) )
// ErrNotFound is returned when the log does not exist. // ErrNotFound is returned when the log does not exist.
var ErrNotFound = errors.New("stream: not found") var ErrNotFound = errors.New("stream: not found")
// Entry defines a log entry.
type Entry struct {
// ID identifies this message.
ID string `json:"id,omitempty"`
// Data is the actual data in the entry.
Data []byte `json:"data"`
// Tags represents the key-value pairs the
// entry is tagged with.
Tags map[string]string `json:"tags,omitempty"`
}
// Handler defines a callback function for handling log entries. // Handler defines a callback function for handling log entries.
type Handler func(...*Entry) type Handler func(...*model.LogEntry)
// Log defines a log multiplexer. // Log defines a log multiplexer.
type Log interface { type Log interface {
// Open opens the log. // Open opens the log.
Open(c context.Context, path string) error Open(c context.Context, stepID int64) error
// Write writes the entry to the log. // Write writes the entry to the log.
Write(c context.Context, path string, entry *Entry) error Write(c context.Context, stepID int64, entry *model.LogEntry) error
// Tail tails the log. // Tail tails the log.
Tail(c context.Context, path string, handler Handler) error Tail(c context.Context, stepID int64, handler Handler) error
// Close closes the log. // Close closes the log.
Close(c context.Context, path string) error Close(c context.Context, stepID int64) error
// Snapshot snapshots the stream to Writer w.
Snapshot(c context.Context, path string, w io.Writer) error
// Info returns runtime information about the multiplexer.
// Info(c context.Context) (interface{}, error)
} }
// // global streamer
// var global = New()
//
// // Set sets a default global logger.
// func Set(log Log) {
// global = log
// }
//
// // Open opens the log stream.
// func Open(c context.Context, path string) error {
// return global.Open(c, path)
// }
//
// // Write writes the log entry to the stream.
// func Write(c context.Context, path string, entry *Entry) error {
// return global.Write(c, path, entry)
// }
//
// // Tail tails the log stream.
// func Tail(c context.Context, path string, handler Handler) error {
// return global.Tail(c, path, handler)
// }
//
// // Close closes the log stream.
// func Close(c context.Context, path string) error {
// return global.Close(c, path)
// }
//
// // Snapshot snapshots the stream to Writer w.
// func Snapshot(c context.Context, path string, w io.Writer) error {
// return global.Snapshot(c, path, w)
// }

View File

@@ -14,9 +14,29 @@
package model package model
type Logs struct { // LogEntryType identifies the type of line in the logs.
ID int64 `xorm:"pk autoincr 'log_id'"` type LogEntryType int // @name LogEntryType
StepID int64 `xorm:"UNIQUE 'log_step_id'"`
Data []byte `xorm:"LONGBLOB 'log_data'"` const (
// TODO: add create timestamp LogEntryStdout LogEntryType = iota
LogEntryStderr
LogEntryExitCode
LogEntryMetadata
LogEntryProgress
)
type LogEntry struct {
ID int64 `json:"id" xorm:"pk autoincr 'id'"`
StepID int64 `json:"step_id" xorm:"'step_id'"`
Time int64 `json:"time"`
Line int `json:"line"`
Data []byte `json:"data" xorm:"LONGBLOB"`
Created int64 `json:"-" xorm:"created"`
Type LogEntryType `json:"type"`
} // @name LogEntry
// TODO: store info what specific command the line belongs to (must be optional and impl. by backend)
func (LogEntry) TableName() string {
return "log_entries"
} }

View File

@@ -31,6 +31,7 @@ type StepStore interface {
// Step represents a process in the pipeline. // Step represents a process in the pipeline.
type Step struct { type Step struct {
ID int64 `json:"id" xorm:"pk autoincr 'step_id'"` ID int64 `json:"id" xorm:"pk autoincr 'step_id'"`
UUID string `json:"uuid" xorm:"UNIQUE INDEX 'step_uuid'"`
PipelineID int64 `json:"pipeline_id" xorm:"UNIQUE(s) INDEX 'step_pipeline_id'"` PipelineID int64 `json:"pipeline_id" xorm:"UNIQUE(s) INDEX 'step_pipeline_id'"`
PID int `json:"pid" xorm:"UNIQUE(s) 'step_pid'"` PID int `json:"pid" xorm:"UNIQUE(s) 'step_pid'"`
PPID int `json:"ppid" xorm:"step_ppid"` PPID int `json:"ppid" xorm:"step_ppid"`

View File

@@ -23,6 +23,7 @@ import (
func TestTree(t *testing.T) { func TestTree(t *testing.T) {
steps := []*Step{{ steps := []*Step{{
ID: 25, ID: 25,
UUID: "f80df0bb-77a7-4964-9412-2e1049872d57",
PID: 2, PID: 2,
PipelineID: 6, PipelineID: 6,
PPID: 1, PPID: 1,
@@ -32,6 +33,7 @@ func TestTree(t *testing.T) {
Error: "0", Error: "0",
}, { }, {
ID: 24, ID: 24,
UUID: "c19b49c5-990d-4722-ba9c-1c4fe9db1f91",
PipelineID: 6, PipelineID: 6,
PID: 1, PID: 1,
PPID: 0, PPID: 0,
@@ -41,6 +43,7 @@ func TestTree(t *testing.T) {
Error: "1", Error: "1",
}, { }, {
ID: 26, ID: 26,
UUID: "4380146f-c0ff-4482-8107-c90937d1faba",
PipelineID: 6, PipelineID: 6,
PID: 3, PID: 3,
PPID: 1, PPID: 1,
@@ -56,6 +59,7 @@ func TestTree(t *testing.T) {
steps = []*Step{{ steps = []*Step{{
ID: 25, ID: 25,
UUID: "f80df0bb-77a7-4964-9412-2e1049872d57",
PID: 2, PID: 2,
PipelineID: 6, PipelineID: 6,
PPID: 1, PPID: 1,

View File

@@ -131,18 +131,18 @@ func cancelPreviousPipelines(
return err return err
} }
pipelineNeedsCancel := func(active *model.Pipeline) (bool, error) { pipelineNeedsCancel := func(active *model.Pipeline) bool {
// always filter on same event // always filter on same event
if active.Event != pipeline.Event { if active.Event != pipeline.Event {
return false, nil return false
} }
// find events for the same context // find events for the same context
switch pipeline.Event { switch pipeline.Event {
case model.EventPush: case model.EventPush:
return pipeline.Branch == active.Branch, nil return pipeline.Branch == active.Branch
default: default:
return pipeline.Refspec == active.Refspec, nil return pipeline.Refspec == active.Refspec
} }
} }
@@ -152,14 +152,7 @@ func cancelPreviousPipelines(
continue continue
} }
cancel, err := pipelineNeedsCancel(active) cancel := pipelineNeedsCancel(active)
if err != nil {
log.Error().
Err(err).
Str("Ref", active.Ref).
Msg("Error while trying to cancel pipeline, skipping")
continue
}
if !cancel { if !cancel {
continue continue

View File

@@ -51,7 +51,7 @@ func zeroSteps(currentPipeline *model.Pipeline, forgeYamlConfigs []*forge_types.
return false return false
} }
// TODO: parse yaml once and not for each filter function // TODO: parse yaml once and not for each filter function (-> move server/pipeline/filter* into pipeline/step_builder)
// Check if at least one pipeline step will be execute otherwise we will just ignore this webhook // Check if at least one pipeline step will be execute otherwise we will just ignore this webhook
func checkIfFiltered(repo *model.Repo, p *model.Pipeline, forgeYamlConfigs []*forge_types.FileMeta) (bool, error) { func checkIfFiltered(repo *model.Repo, p *model.Pipeline, forgeYamlConfigs []*forge_types.FileMeta) (bool, error) {
log.Trace().Msgf("hook.branchFiltered(): pipeline branch: '%s' pipeline event: '%s' config count: %d", p.Branch, p.Event, len(forgeYamlConfigs)) log.Trace().Msgf("hook.branchFiltered(): pipeline branch: '%s' pipeline event: '%s' config count: %d", p.Branch, p.Event, len(forgeYamlConfigs))

View File

@@ -49,9 +49,6 @@ func queuePipeline(repo *model.Repo, pipelineItems []*pipeline.Item) error {
Timeout: repo.Timeout, Timeout: repo.Timeout,
}) })
if err := server.Config.Services.Logs.Open(context.Background(), task.ID); err != nil {
return err
}
tasks = append(tasks, task) tasks = append(tasks, task)
} }
return server.Config.Services.Queue.PushAtOnce(context.Background(), tasks) return server.Config.Services.Queue.PushAtOnce(context.Background(), tasks)

View File

@@ -20,6 +20,7 @@ import (
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/woodpecker-ci/woodpecker/pipeline" "github.com/woodpecker-ci/woodpecker/pipeline"
"github.com/woodpecker-ci/woodpecker/server"
"github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/model"
"github.com/woodpecker-ci/woodpecker/server/store" "github.com/woodpecker-ci/woodpecker/server/store"
) )
@@ -46,6 +47,16 @@ func start(ctx context.Context, store store.Store, activePipeline *model.Pipelin
return nil, err return nil, err
} }
// open logs streamer for each step
go func() {
steps := activePipeline.Steps
for _, step := range steps {
if err := server.Config.Services.Logs.Open(context.Background(), step.ID); err != nil {
log.Error().Err(err).Msgf("could not open log stream for step %d", step.ID)
}
}
}()
updatePipelineStatus(ctx, activePipeline, repo, user) updatePipelineStatus(ctx, activePipeline, repo, user)
return activePipeline, nil return activePipeline, nil

View File

@@ -89,8 +89,7 @@ func apiRoutes(e *gin.Engine) {
repo.POST("/pipelines/:number/approve", session.MustPush, api.PostApproval) repo.POST("/pipelines/:number/approve", session.MustPush, api.PostApproval)
repo.POST("/pipelines/:number/decline", session.MustPush, api.PostDecline) repo.POST("/pipelines/:number/decline", session.MustPush, api.PostDecline)
repo.GET("/logs/:number/:pid", api.GetStepLogs) repo.GET("/logs/:number/:stepId", api.GetStepLogs)
repo.GET("/logs/:number/:pid/:step", api.GetPipelineLogs)
// requires push permissions // requires push permissions
repo.DELETE("/logs/:number", session.MustPush, api.DeletePipelineLogs) repo.DELETE("/logs/:number", session.MustPush, api.DeletePipelineLogs)
@@ -179,6 +178,15 @@ func apiRoutes(e *gin.Engine) {
apiBase.POST("/hook", api.PostHook) apiBase.POST("/hook", api.PostHook)
stream := apiBase.Group("/stream")
{
stream.GET("/logs/:owner/:name/:pipeline/:stepId",
session.SetRepo(),
session.SetPerm(),
session.MustPull,
api.LogStreamSSE)
}
if zerolog.GlobalLevel() <= zerolog.DebugLevel { if zerolog.GlobalLevel() <= zerolog.DebugLevel {
debugger := apiBase.Group("/debug") debugger := apiBase.Group("/debug")
{ {
@@ -204,11 +212,5 @@ func apiRoutes(e *gin.Engine) {
sse := e.Group("/stream") sse := e.Group("/stream")
{ {
sse.GET("/events", api.EventStreamSSE) sse.GET("/events", api.EventStreamSSE)
sse.GET("/logs/:owner/:name/:pipeline/:number",
session.SetRepo(),
session.SetPerm(),
session.MustPull,
api.LogStreamSSE,
)
} }
} }

View File

@@ -15,50 +15,41 @@
package datastore package datastore
import ( import (
"bytes" "fmt"
"io"
"github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/model"
) )
func (s storage) LogFind(step *model.Step) (io.ReadCloser, error) { func (s storage) LogFind(step *model.Step) ([]*model.LogEntry, error) {
logs := &model.Logs{ var logEntries []*model.LogEntry
StepID: step.ID, return logEntries, s.engine.Asc("id").Where("step_id = ?", step.ID).Find(&logEntries)
}
if err := wrapGet(s.engine.Get(logs)); err != nil {
return nil, err
}
buf := bytes.NewBuffer(logs.Data)
return io.NopCloser(buf), nil
} }
func (s storage) LogSave(step *model.Step, reader io.Reader) error { func (s storage) LogSave(step *model.Step, logEntries []*model.LogEntry) error {
data, _ := io.ReadAll(reader)
sess := s.engine.NewSession() sess := s.engine.NewSession()
defer sess.Close() defer sess.Close()
if err := sess.Begin(); err != nil { if err := sess.Begin(); err != nil {
return err return err
} }
logs := new(model.Logs) for _, logEntry := range logEntries {
exist, err := sess.Where("log_step_id = ?", step.ID).Get(logs) if logEntry.StepID != step.ID {
if err != nil { return fmt.Errorf("got a log-entry with step id '%d' but expected '%d'", logEntry.StepID, step.ID)
return err
}
if exist {
if _, err := sess.ID(logs.ID).Cols("log_data").Update(&model.Logs{Data: data}); err != nil {
return err
} }
} else { if _, err := sess.Insert(logEntry); err != nil {
if _, err := sess.Insert(&model.Logs{
StepID: step.ID,
Data: data,
}); err != nil {
return err return err
} }
} }
return sess.Commit() return sess.Commit()
} }
func (s storage) LogAppend(logEntry *model.LogEntry) error {
_, err := s.engine.Insert(logEntry)
return err
}
func (s storage) LogDelete(step *model.Step) error {
_, err := s.engine.Where("step_id = ?", step.ID).Delete(new(model.LogEntry))
return err
}

View File

@@ -15,64 +15,84 @@
package datastore package datastore
import ( import (
"bytes"
"io"
"testing" "testing"
"github.com/stretchr/testify/assert"
"github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/model"
) )
func TestLogCreateFind(t *testing.T) { func TestLogCreateFindDelete(t *testing.T) {
store, closer := newTestStore(t, new(model.Step), new(model.Logs)) store, closer := newTestStore(t, new(model.Step), new(model.LogEntry))
defer closer() defer closer()
step := model.Step{ step := model.Step{
ID: 1, ID: 1,
} }
buf := bytes.NewBufferString("echo hi")
err := store.LogSave(&step, buf) logEntries := []*model.LogEntry{
if err != nil { {
t.Errorf("Unexpected error: log create: %s", err) StepID: step.ID,
Data: []byte("hello"),
Line: 1,
Time: 0,
},
{
StepID: step.ID,
Data: []byte("world"),
Line: 2,
Time: 10,
},
} }
rc, err := store.LogFind(&step) // first insert should just work
if err != nil { assert.NoError(t, store.LogSave(&step, logEntries))
t.Errorf("Unexpected error: log create: %s", err)
}
defer rc.Close() // we want to find our inserted logs
out, _ := io.ReadAll(rc) _logEntries, err := store.LogFind(&step)
if got, want := string(out), "echo hi"; got != want { assert.NoError(t, err)
t.Errorf("Want log data %s, got %s", want, got) assert.Len(t, _logEntries, len(logEntries))
}
// delete and check
assert.NoError(t, store.LogDelete(&step))
_logEntries, err = store.LogFind(&step)
assert.NoError(t, err)
assert.Len(t, _logEntries, 0)
} }
func TestLogUpdate(t *testing.T) { func TestLogAppend(t *testing.T) {
store, closer := newTestStore(t, new(model.Step), new(model.Logs)) store, closer := newTestStore(t, new(model.Step), new(model.LogEntry))
defer closer() defer closer()
step := model.Step{ step := model.Step{
ID: 1, ID: 1,
} }
buf1 := bytes.NewBufferString("echo hi") logEntries := []*model.LogEntry{
buf2 := bytes.NewBufferString("echo allo?") {
err1 := store.LogSave(&step, buf1) StepID: step.ID,
err2 := store.LogSave(&step, buf2) Data: []byte("hello"),
if err1 != nil { Line: 1,
t.Errorf("Unexpected error: log create: %s", err1) Time: 0,
} },
if err2 != nil { {
t.Errorf("Unexpected error: log update: %s", err2) StepID: step.ID,
Data: []byte("world"),
Line: 2,
Time: 10,
},
} }
rc, err := store.LogFind(&step) assert.NoError(t, store.LogSave(&step, logEntries))
if err != nil {
t.Errorf("Unexpected error: log create: %s", err) logEntry := &model.LogEntry{
StepID: step.ID,
Data: []byte("allo?"),
Line: 3,
Time: 20,
} }
defer rc.Close() assert.NoError(t, store.LogAppend(logEntry))
out, _ := io.ReadAll(rc)
if got, want := string(out), "echo allo?"; got != want { _logEntries, err := store.LogFind(&step)
t.Errorf("Want log data %s, got %s", want, got) assert.NoError(t, err)
} assert.Len(t, _logEntries, len(logEntries)+1)
} }

View File

@@ -54,7 +54,7 @@ var allBeans = []interface{}{
new(model.Pipeline), new(model.Pipeline),
new(model.PipelineConfig), new(model.PipelineConfig),
new(model.Config), new(model.Config),
new(model.Logs), new(model.LogEntry),
new(model.Perm), new(model.Perm),
new(model.Step), new(model.Step),
new(model.Registry), new(model.Registry),

View File

@@ -299,7 +299,7 @@ func TestRepoCrud(t *testing.T) {
new(model.Perm), new(model.Perm),
new(model.Pipeline), new(model.Pipeline),
new(model.PipelineConfig), new(model.PipelineConfig),
new(model.Logs), new(model.LogEntry),
new(model.Step), new(model.Step),
new(model.Secret), new(model.Secret),
new(model.Registry), new(model.Registry),
@@ -334,6 +334,7 @@ func TestRepoCrud(t *testing.T) {
RepoID: repoUnrelated.ID, RepoID: repoUnrelated.ID,
} }
stepUnrelated := model.Step{ stepUnrelated := model.Step{
UUID: "44c0de71-a6be-41c9-b860-e3716d1dfcef",
Name: "a unrelated step", Name: "a unrelated step",
} }
assert.NoError(t, store.CreatePipeline(&pipelineUnrelated, &stepUnrelated)) assert.NoError(t, store.CreatePipeline(&pipelineUnrelated, &stepUnrelated))

View File

@@ -33,6 +33,11 @@ func (s storage) StepFind(pipeline *model.Pipeline, pid int) (*model.Step, error
return step, wrapGet(s.engine.Get(step)) return step, wrapGet(s.engine.Get(step))
} }
func (s storage) StepByUUID(uuid string) (*model.Step, error) {
step := new(model.Step)
return step, wrapGet(s.engine.Where("step_uuid = ?", uuid).Get(step))
}
func (s storage) StepChild(pipeline *model.Pipeline, ppid int, child string) (*model.Step, error) { func (s storage) StepChild(pipeline *model.Pipeline, ppid int, child string) (*model.Step, error) {
step := &model.Step{ step := &model.Step{
PipelineID: pipeline.ID, PipelineID: pipeline.ID,
@@ -87,7 +92,7 @@ func (s storage) StepClear(pipeline *model.Pipeline) error {
} }
func deleteStep(sess *xorm.Session, stepID int64) error { func deleteStep(sess *xorm.Session, stepID int64) error {
if _, err := sess.Where("log_step_id = ?", stepID).Delete(new(model.Logs)); err != nil { if _, err := sess.Where("step_id = ?", stepID).Delete(new(model.LogEntry)); err != nil {
return err return err
} }
_, err := sess.ID(stepID).Delete(new(model.Step)) _, err := sess.ID(stepID).Delete(new(model.Step))

View File

@@ -21,6 +21,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/model"
"github.com/woodpecker-ci/woodpecker/server/store/types"
) )
func TestStepFind(t *testing.T) { func TestStepFind(t *testing.T) {
@@ -29,6 +30,7 @@ func TestStepFind(t *testing.T) {
steps := []*model.Step{ steps := []*model.Step{
{ {
UUID: "8d89104f-d44e-4b45-b86e-17f8b5e74a0e",
PipelineID: 1000, PipelineID: 1000,
PID: 1, PID: 1,
PPID: 2, PPID: 2,
@@ -59,6 +61,7 @@ func TestStepChild(t *testing.T) {
err := store.StepCreate([]*model.Step{ err := store.StepCreate([]*model.Step{
{ {
UUID: "ea6d4008-8ace-4f8a-ad03-53f1756465d9",
PipelineID: 1, PipelineID: 1,
PID: 1, PID: 1,
PPID: 1, PPID: 1,
@@ -66,6 +69,7 @@ func TestStepChild(t *testing.T) {
State: "success", State: "success",
}, },
{ {
UUID: "2bf387f7-2913-4907-814c-c9ada88707c0",
PipelineID: 1, PipelineID: 1,
PID: 2, PID: 2,
PGID: 2, PGID: 2,
@@ -98,6 +102,7 @@ func TestStepList(t *testing.T) {
err := store.StepCreate([]*model.Step{ err := store.StepCreate([]*model.Step{
{ {
UUID: "2bf387f7-2913-4907-814c-c9ada88707c0",
PipelineID: 2, PipelineID: 2,
PID: 1, PID: 1,
PPID: 1, PPID: 1,
@@ -105,6 +110,7 @@ func TestStepList(t *testing.T) {
State: "success", State: "success",
}, },
{ {
UUID: "4b04073c-1827-4aa4-a5f5-c7b21c5e44a6",
PipelineID: 1, PipelineID: 1,
PID: 1, PID: 1,
PPID: 1, PPID: 1,
@@ -112,6 +118,7 @@ func TestStepList(t *testing.T) {
State: "success", State: "success",
}, },
{ {
UUID: "40aab045-970b-4892-b6df-6f825a7ec97a",
PipelineID: 1, PipelineID: 1,
PID: 2, PID: 2,
PGID: 2, PGID: 2,
@@ -139,6 +146,7 @@ func TestStepUpdate(t *testing.T) {
defer closer() defer closer()
step := &model.Step{ step := &model.Step{
UUID: "fc7c7fd6-553e-480b-8ed7-30d8563d0b79",
PipelineID: 1, PipelineID: 1,
PID: 1, PID: 1,
PPID: 2, PPID: 2,
@@ -176,6 +184,7 @@ func TestStepIndexes(t *testing.T) {
if err := store.StepCreate([]*model.Step{ if err := store.StepCreate([]*model.Step{
{ {
UUID: "4db7e5fc-5312-4d02-9e14-b51b9e3242cc",
PipelineID: 1, PipelineID: 1,
PID: 1, PID: 1,
PPID: 1, PPID: 1,
@@ -191,6 +200,7 @@ func TestStepIndexes(t *testing.T) {
// fail due to duplicate pid // fail due to duplicate pid
if err := store.StepCreate([]*model.Step{ if err := store.StepCreate([]*model.Step{
{ {
UUID: "c1f33a9e-2a02-4579-95ec-90255d785a12",
PipelineID: 1, PipelineID: 1,
PID: 1, PID: 1,
PPID: 1, PPID: 1,
@@ -201,6 +211,60 @@ func TestStepIndexes(t *testing.T) {
}); err == nil { }); err == nil {
t.Errorf("Unexpected error: duplicate pid") t.Errorf("Unexpected error: duplicate pid")
} }
// fail due to duplicate uuid
if err := store.StepCreate([]*model.Step{
{
UUID: "4db7e5fc-5312-4d02-9e14-b51b9e3242cc",
PipelineID: 5,
PID: 4,
PPID: 3,
PGID: 2,
State: "success",
Name: "clone",
},
}); err == nil {
t.Errorf("Unexpected error: duplicate pid")
}
}
func TestStepByUUID(t *testing.T) {
store, closer := newTestStore(t, new(model.Step), new(model.Pipeline))
defer closer()
assert.NoError(t, store.StepCreate([]*model.Step{
{
UUID: "4db7e5fc-5312-4d02-9e14-b51b9e3242cc",
PipelineID: 1,
PID: 1,
PPID: 1,
PGID: 1,
State: "running",
Name: "build",
},
{
UUID: "fc7c7fd6-553e-480b-8ed7-30d8563d0b79",
PipelineID: 4,
PID: 6,
PPID: 7,
PGID: 8,
Name: "build",
State: "pending",
Error: "pc load letter",
ExitCode: 255,
AgentID: 1,
Platform: "linux/amd64",
Environ: map[string]string{"GOLANG": "tip"},
},
}))
step, err := store.StepByUUID("4db7e5fc-5312-4d02-9e14-b51b9e3242cc")
assert.NoError(t, err)
assert.NotEmpty(t, step)
step, err = store.StepByUUID("52feb6f5-8ce2-40c0-9937-9d0e3349c98c")
assert.ErrorIs(t, err, types.RecordNotExist)
assert.Empty(t, step)
} }
// TODO: func TestStepCascade(t *testing.T) {} // TODO: func TestStepCascade(t *testing.T) {}

View File

@@ -3,8 +3,6 @@
package mocks package mocks
import ( import (
io "io"
mock "github.com/stretchr/testify/mock" mock "github.com/stretchr/testify/mock"
model "github.com/woodpecker-ci/woodpecker/server/model" model "github.com/woodpecker-ci/woodpecker/server/model"
) )
@@ -1089,20 +1087,48 @@ func (_m *Store) HasRedirectionForRepo(_a0 int64, _a1 string) (bool, error) {
return r0, r1 return r0, r1
} }
// LogFind provides a mock function with given fields: _a0 // LogAppend provides a mock function with given fields: logEntry
func (_m *Store) LogFind(_a0 *model.Step) (io.ReadCloser, error) { func (_m *Store) LogAppend(logEntry *model.LogEntry) error {
ret := _m.Called(logEntry)
var r0 error
if rf, ok := ret.Get(0).(func(*model.LogEntry) error); ok {
r0 = rf(logEntry)
} else {
r0 = ret.Error(0)
}
return r0
}
// LogDelete provides a mock function with given fields: _a0
func (_m *Store) LogDelete(_a0 *model.Step) error {
ret := _m.Called(_a0) ret := _m.Called(_a0)
var r0 io.ReadCloser var r0 error
if rf, ok := ret.Get(0).(func(*model.Step) error); ok {
r0 = rf(_a0)
} else {
r0 = ret.Error(0)
}
return r0
}
// LogFind provides a mock function with given fields: _a0
func (_m *Store) LogFind(_a0 *model.Step) ([]*model.LogEntry, error) {
ret := _m.Called(_a0)
var r0 []*model.LogEntry
var r1 error var r1 error
if rf, ok := ret.Get(0).(func(*model.Step) (io.ReadCloser, error)); ok { if rf, ok := ret.Get(0).(func(*model.Step) ([]*model.LogEntry, error)); ok {
return rf(_a0) return rf(_a0)
} }
if rf, ok := ret.Get(0).(func(*model.Step) io.ReadCloser); ok { if rf, ok := ret.Get(0).(func(*model.Step) []*model.LogEntry); ok {
r0 = rf(_a0) r0 = rf(_a0)
} else { } else {
if ret.Get(0) != nil { if ret.Get(0) != nil {
r0 = ret.Get(0).(io.ReadCloser) r0 = ret.Get(0).([]*model.LogEntry)
} }
} }
@@ -1116,11 +1142,11 @@ func (_m *Store) LogFind(_a0 *model.Step) (io.ReadCloser, error) {
} }
// LogSave provides a mock function with given fields: _a0, _a1 // LogSave provides a mock function with given fields: _a0, _a1
func (_m *Store) LogSave(_a0 *model.Step, _a1 io.Reader) error { func (_m *Store) LogSave(_a0 *model.Step, _a1 []*model.LogEntry) error {
ret := _m.Called(_a0, _a1) ret := _m.Called(_a0, _a1)
var r0 error var r0 error
if rf, ok := ret.Get(0).(func(*model.Step, io.Reader) error); ok { if rf, ok := ret.Get(0).(func(*model.Step, []*model.LogEntry) error); ok {
r0 = rf(_a0, _a1) r0 = rf(_a0, _a1)
} else { } else {
r0 = ret.Error(0) r0 = ret.Error(0)
@@ -1609,6 +1635,32 @@ func (_m *Store) ServerConfigSet(_a0 string, _a1 string) error {
return r0 return r0
} }
// StepByUUID provides a mock function with given fields: _a0
func (_m *Store) StepByUUID(_a0 string) (*model.Step, error) {
ret := _m.Called(_a0)
var r0 *model.Step
var r1 error
if rf, ok := ret.Get(0).(func(string) (*model.Step, error)); ok {
return rf(_a0)
}
if rf, ok := ret.Get(0).(func(string) *model.Step); ok {
r0 = rf(_a0)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*model.Step)
}
}
if rf, ok := ret.Get(1).(func(string) error); ok {
r1 = rf(_a0)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// StepChild provides a mock function with given fields: _a0, _a1, _a2 // StepChild provides a mock function with given fields: _a0, _a1, _a2
func (_m *Store) StepChild(_a0 *model.Pipeline, _a1 int, _a2 string) (*model.Step, error) { func (_m *Store) StepChild(_a0 *model.Pipeline, _a1 int, _a2 string) (*model.Step, error) {
ret := _m.Called(_a0, _a1, _a2) ret := _m.Called(_a0, _a1, _a2)

View File

@@ -18,8 +18,6 @@ package store
//go:generate mockery --name Store --output mocks --case underscore //go:generate mockery --name Store --output mocks --case underscore
import ( import (
"io"
"github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/model"
) )
@@ -138,6 +136,7 @@ type Store interface {
// Steps // Steps
StepLoad(int64) (*model.Step, error) StepLoad(int64) (*model.Step, error)
StepFind(*model.Pipeline, int) (*model.Step, error) StepFind(*model.Pipeline, int) (*model.Step, error)
StepByUUID(string) (*model.Step, error)
StepChild(*model.Pipeline, int, string) (*model.Step, error) StepChild(*model.Pipeline, int, string) (*model.Step, error)
StepList(*model.Pipeline) ([]*model.Step, error) StepList(*model.Pipeline) ([]*model.Step, error)
StepCreate([]*model.Step) error StepCreate([]*model.Step) error
@@ -145,10 +144,10 @@ type Store interface {
StepClear(*model.Pipeline) error StepClear(*model.Pipeline) error
// Logs // Logs
LogFind(*model.Step) (io.ReadCloser, error) LogFind(*model.Step) ([]*model.LogEntry, error)
// TODO: since we do ReadAll in any case a ioReader is not the best idea LogSave(*model.Step, []*model.LogEntry) error
// so either find a way to write log in chunks by xorm ... LogAppend(logEntry *model.LogEntry) error
LogSave(*model.Step, io.Reader) error LogDelete(*model.Step) error
// Tasks // Tasks
// TaskList TODO: paginate & opt filter // TaskList TODO: paginate & opt filter

View File

@@ -64,13 +64,13 @@
</div> </div>
</template> </template>
<script lang="ts"> <script lang="ts" setup>
import '~/style/console.css'; import '~/style/console.css';
import { useStorage } from '@vueuse/core'; import { useStorage } from '@vueuse/core';
import AnsiUp from 'ansi_up'; import AnsiUp from 'ansi_up';
import { debounce } from 'lodash'; import { debounce } from 'lodash';
import { computed, defineComponent, inject, nextTick, onMounted, PropType, Ref, ref, toRef, watch } from 'vue'; import { computed, inject, nextTick, onMounted, Ref, ref, toRef, watch } from 'vue';
import { useI18n } from 'vue-i18n'; import { useI18n } from 'vue-i18n';
import Button from '~/components/atomic/Button.vue'; import Button from '~/components/atomic/Button.vue';
@@ -86,227 +86,191 @@ type LogLine = {
time?: number; time?: number;
}; };
export default defineComponent({ const props = defineProps<{
name: 'PipelineLog', pipeline: Pipeline;
stepId: number;
}>();
components: { Icon, Button }, defineEmits<{
(event: 'update:step-id', stepId: number | null): true;
}>();
props: { const notifications = useNotifications();
pipeline: { const i18n = useI18n();
type: Object as PropType<Pipeline>, const pipeline = toRef(props, 'pipeline');
required: true, const stepId = toRef(props, 'stepId');
}, const repo = inject<Ref<Repo>>('repo');
const apiClient = useApiClient();
stepId: { const loadedStepSlug = ref<string>();
type: Number, const stepSlug = computed(() => `${repo?.value.owner} - ${repo?.value.name} - ${pipeline.value.id} - ${stepId.value}`);
required: true, const step = computed(() => pipeline.value && findStep(pipeline.value.steps || [], stepId.value));
}, const stream = ref<EventSource>();
}, const log = ref<LogLine[]>();
const consoleElement = ref<Element>();
emits: { const loadedLogs = computed(() => !!log.value);
// eslint-disable-next-line @typescript-eslint/no-unused-vars const hasLogs = computed(
'update:step-id': (stepId: number | null) => true, () =>
}, // we do not have logs for skipped steps
repo?.value && pipeline.value && step.value && step.value.state !== 'skipped' && step.value.state !== 'killed',
);
const autoScroll = useStorage('log-auto-scroll', false);
const showActions = ref(false);
const downloadInProgress = ref(false);
const ansiUp = ref(new AnsiUp());
ansiUp.value.use_classes = true;
const logBuffer = ref<LogLine[]>([]);
setup(props) { const maxLineCount = 500; // TODO: think about way to support lazy-loading more than last 300 logs (#776)
const notifications = useNotifications();
const i18n = useI18n();
const pipeline = toRef(props, 'pipeline');
const stepId = toRef(props, 'stepId');
const repo = inject<Ref<Repo>>('repo');
const apiClient = useApiClient();
const loadedStepSlug = ref<string>(); function formatTime(time?: number): string {
const stepSlug = computed( return time === undefined ? '' : `${time}s`;
() => `${repo?.value.owner} - ${repo?.value.name} - ${pipeline.value.id} - ${stepId.value}`, }
function writeLog(line: LogLine) {
logBuffer.value.push({
index: line.index ?? 0,
text: ansiUp.value.ansi_to_html(line.text),
time: line.time ?? 0,
});
}
function scrollDown() {
nextTick(() => {
if (!consoleElement.value) {
return;
}
consoleElement.value.scrollTop = consoleElement.value.scrollHeight;
});
}
const flushLogs = debounce((scroll: boolean) => {
let buffer = logBuffer.value.slice(-maxLineCount);
logBuffer.value = [];
if (buffer.length === 0) {
if (!log.value) {
log.value = [];
}
return;
}
// append old logs lines
if (buffer.length < maxLineCount && log.value) {
buffer = [...log.value.slice(-(maxLineCount - buffer.length)), ...buffer];
}
// deduplicate repeating times
buffer = buffer.reduce(
(acc, line) => ({
lastTime: line.time ?? 0,
lines: [
...acc.lines,
{
...line,
time: acc.lastTime === line.time ? undefined : line.time,
},
],
}),
{ lastTime: -1, lines: [] as LogLine[] },
).lines;
log.value = buffer;
if (scroll && autoScroll.value) {
scrollDown();
}
}, 500);
async function download() {
if (!repo?.value || !pipeline.value || !step.value) {
throw new Error('The repository, pipeline or step was undefined');
}
let logs;
try {
downloadInProgress.value = true;
logs = await apiClient.getLogs(repo.value.owner, repo.value.name, pipeline.value.number, step.value.id);
} catch (e) {
notifications.notifyError(e, i18n.t('repo.pipeline.log_download_error'));
return;
} finally {
downloadInProgress.value = false;
}
const fileURL = window.URL.createObjectURL(
new Blob([logs.map((line) => atob(line.data)).join('')], {
type: 'text/plain',
}),
);
const fileLink = document.createElement('a');
fileLink.href = fileURL;
fileLink.setAttribute(
'download',
`${repo.value.owner}-${repo.value.name}-${pipeline.value.number}-${step.value.name}.log`,
);
document.body.appendChild(fileLink);
fileLink.click();
document.body.removeChild(fileLink);
window.URL.revokeObjectURL(fileURL);
}
async function loadLogs() {
if (loadedStepSlug.value === stepSlug.value) {
return;
}
loadedStepSlug.value = stepSlug.value;
log.value = undefined;
logBuffer.value = [];
ansiUp.value = new AnsiUp();
ansiUp.value.use_classes = true;
if (!repo) {
throw new Error('Unexpected: "repo" should be provided at this place');
}
if (stream.value) {
stream.value.close();
}
if (!hasLogs.value || !step.value) {
return;
}
if (isStepFinished(step.value)) {
const logs = await apiClient.getLogs(repo.value.owner, repo.value.name, pipeline.value.number, step.value.id);
logs?.forEach((line) => writeLog({ index: line.line, text: atob(line.data), time: line.time }));
flushLogs(false);
}
if (isStepRunning(step.value)) {
stream.value = apiClient.streamLogs(
repo.value.owner,
repo.value.name,
pipeline.value.number,
step.value.id,
(line) => {
writeLog({ index: line.line, text: atob(line.data), time: line.time });
flushLogs(true);
},
); );
const step = computed(() => pipeline.value && findStep(pipeline.value.steps || [], stepId.value)); }
const stream = ref<EventSource>(); }
const log = ref<LogLine[]>();
const consoleElement = ref<Element>();
const loadedLogs = computed(() => !!log.value); onMounted(async () => {
const hasLogs = computed( loadLogs();
() => });
// we do not have logs for skipped steps
repo?.value && pipeline.value && step.value && step.value.state !== 'skipped' && step.value.state !== 'killed',
);
const autoScroll = useStorage('log-auto-scroll', false);
const showActions = ref(false);
const downloadInProgress = ref(false);
const ansiUp = ref(new AnsiUp());
ansiUp.value.use_classes = true;
const logBuffer = ref<LogLine[]>([]);
const maxLineCount = 500; // TODO: think about way to support lazy-loading more than last 300 logs (#776) watch(stepSlug, () => {
loadLogs();
});
function formatTime(time?: number): string { watch(step, (oldStep, newStep) => {
return time === undefined ? '' : `${time}s`; if (oldStep && oldStep.name === newStep?.name && oldStep?.end_time !== newStep?.end_time) {
if (autoScroll.value) {
scrollDown();
} }
}
function writeLog(line: LogLine) {
logBuffer.value.push({
index: line.index ?? 0,
text: ansiUp.value.ansi_to_html(line.text),
time: line.time ?? 0,
});
}
function scrollDown() {
nextTick(() => {
if (!consoleElement.value) {
return;
}
consoleElement.value.scrollTop = consoleElement.value.scrollHeight;
});
}
const flushLogs = debounce((scroll: boolean) => {
let buffer = logBuffer.value.slice(-maxLineCount);
logBuffer.value = [];
if (buffer.length === 0) {
if (!log.value) {
log.value = [];
}
return;
}
// append old logs lines
if (buffer.length < maxLineCount && log.value) {
buffer = [...log.value.slice(-(maxLineCount - buffer.length)), ...buffer];
}
// deduplicate repeating times
buffer = buffer.reduce(
(acc, line) => ({
lastTime: line.time ?? 0,
lines: [
...acc.lines,
{
...line,
time: acc.lastTime === line.time ? undefined : line.time,
},
],
}),
{ lastTime: -1, lines: [] as LogLine[] },
).lines;
log.value = buffer;
if (scroll && autoScroll.value) {
scrollDown();
}
}, 500);
async function download() {
if (!repo?.value || !pipeline.value || !step.value) {
throw new Error('The repository, pipeline or step was undefined');
}
let logs;
try {
downloadInProgress.value = true;
logs = await apiClient.getLogs(repo.value.owner, repo.value.name, pipeline.value.number, step.value.pid);
} catch (e) {
notifications.notifyError(e, i18n.t('repo.pipeline.log_download_error'));
return;
} finally {
downloadInProgress.value = false;
}
const fileURL = window.URL.createObjectURL(
new Blob([logs.map((line) => line.out).join('')], {
type: 'text/plain',
}),
);
const fileLink = document.createElement('a');
fileLink.href = fileURL;
fileLink.setAttribute(
'download',
`${repo.value.owner}-${repo.value.name}-${pipeline.value.number}-${step.value.name}.log`,
);
document.body.appendChild(fileLink);
fileLink.click();
document.body.removeChild(fileLink);
window.URL.revokeObjectURL(fileURL);
}
async function loadLogs() {
if (loadedStepSlug.value === stepSlug.value) {
return;
}
loadedStepSlug.value = stepSlug.value;
log.value = undefined;
logBuffer.value = [];
ansiUp.value = new AnsiUp();
ansiUp.value.use_classes = true;
if (!repo) {
throw new Error('Unexpected: "repo" should be provided at this place');
}
if (stream.value) {
stream.value.close();
}
if (!hasLogs.value || !step.value) {
return;
}
if (isStepFinished(step.value)) {
const logs = await apiClient.getLogs(repo.value.owner, repo.value.name, pipeline.value.number, step.value.pid);
logs?.forEach((line) => writeLog({ index: line.pos, text: line.out, time: line.time }));
flushLogs(false);
}
if (isStepRunning(step.value)) {
// load stream of parent process (which receives all child processes logs)
// TODO: change stream to only send data of single child process
stream.value = apiClient.streamLogs(
repo.value.owner,
repo.value.name,
pipeline.value.number,
step.value.ppid,
(line) => {
if (line?.step !== step.value?.name) {
return;
}
writeLog({ index: line.pos, text: line.out, time: line.time });
flushLogs(true);
},
);
}
}
onMounted(async () => {
loadLogs();
});
watch(stepSlug, () => {
loadLogs();
});
watch(step, (oldStep, newStep) => {
if (oldStep && oldStep.name === newStep?.name && oldStep?.end_time !== newStep?.end_time) {
if (autoScroll.value) {
scrollDown();
}
}
});
return {
consoleElement,
step,
log,
loadedLogs,
hasLogs,
formatTime,
showActions,
download,
downloadInProgress,
autoScroll,
};
},
}); });
</script> </script>

View File

@@ -129,8 +129,8 @@ export default class WoodpeckerClient extends ApiClient {
return this._post(`/api/repos/${owner}/${repo}/pipelines/${pipeline}?${query}`) as Promise<Pipeline>; return this._post(`/api/repos/${owner}/${repo}/pipelines/${pipeline}?${query}`) as Promise<Pipeline>;
} }
getLogs(owner: string, repo: string, pipeline: number, step: number): Promise<PipelineLog[]> { getLogs(owner: string, repo: string, pipeline: number, stepId: number): Promise<PipelineLog[]> {
return this._get(`/api/repos/${owner}/${repo}/logs/${pipeline}/${step}`) as Promise<PipelineLog[]>; return this._get(`/api/repos/${owner}/${repo}/logs/${pipeline}/${stepId}`) as Promise<PipelineLog[]>;
} }
getSecretList(owner: string, repo: string, page: number): Promise<Secret[] | null> { getSecretList(owner: string, repo: string, page: number): Promise<Secret[] | null> {
@@ -300,7 +300,7 @@ export default class WoodpeckerClient extends ApiClient {
// eslint-disable-next-line promise/prefer-await-to-callbacks // eslint-disable-next-line promise/prefer-await-to-callbacks
callback: (data: PipelineLog) => void, callback: (data: PipelineLog) => void,
): EventSource { ): EventSource {
return this._subscribe(`/stream/logs/${owner}/${repo}/${pipeline}/${step}`, callback, { return this._subscribe(`/api/stream/logs/${owner}/${repo}/${pipeline}/${step}`, callback, {
reconnect: true, reconnect: true,
}); });
} }

View File

@@ -102,6 +102,7 @@ export type PipelineStatus =
export type PipelineStep = { export type PipelineStep = {
id: number; id: number;
uuid: string;
pipeline_id: number; pipeline_id: number;
pid: number; pid: number;
ppid: number; ppid: number;
@@ -118,10 +119,12 @@ export type PipelineStep = {
}; };
export type PipelineLog = { export type PipelineLog = {
step: string; id: number;
pos: number; step_id: number;
out: string; time: number;
time?: number; line: number;
data: string; // base64 encoded
type: number;
}; };
export type PipelineFeed = Pipeline & { export type PipelineFeed = Pipeline & {

View File

@@ -286,9 +286,9 @@ func (c *client) PipelineKill(owner, name string, num int) error {
} }
// PipelineLogs returns the pipeline logs for the specified step. // PipelineLogs returns the pipeline logs for the specified step.
func (c *client) PipelineLogs(owner, name string, num, step int) ([]*Logs, error) { func (c *client) StepLogEntries(owner, name string, num, step int) ([]*LogEntry, error) {
uri := fmt.Sprintf(pathLogs, c.addr, owner, name, num, step) uri := fmt.Sprintf(pathLogs, c.addr, owner, name, num, step)
var out []*Logs var out []*LogEntry
err := c.get(uri, &out) err := c.get(uri, &out)
return out, err return out, err
} }

View File

@@ -33,3 +33,14 @@ const (
StatusKilled = "killed" StatusKilled = "killed"
StatusError = "error" StatusError = "error"
) )
// LogEntryType identifies the type of line in the logs.
type LogEntryType int
const (
LogEntryStdout LogEntryType = iota
LogEntryStderr
LogEntryExitCode
LogEntryMetadata
LogEntryProgress
)

View File

@@ -105,8 +105,8 @@ type Client interface {
// PipelineKill force kills the running pipeline. // PipelineKill force kills the running pipeline.
PipelineKill(string, string, int) error PipelineKill(string, string, int) error
// PipelineLogs returns the logs for the given pipeline // StepLogEntries returns the LogEntries for the given pipeline step
PipelineLogs(string, string, int, int) ([]*Logs, error) StepLogEntries(string, string, int, int) ([]*LogEntry, error)
// Deploy triggers a deployment for an existing pipeline using the specified // Deploy triggers a deployment for an existing pipeline using the specified
// target environment. // target environment.

View File

@@ -173,10 +173,14 @@ type (
Level string `json:"log-level"` Level string `json:"log-level"`
} }
// Logs is the JSON data for a logs response // LogEntry is a single log entry
Logs struct { LogEntry struct {
Step string `json:"step"` ID int64 `json:"id"`
Output string `json:"out"` StepID int64 `json:"step_id"`
Time int64 `json:"time"`
Line int `json:"line"`
Data []byte `json:"data"`
Type LogEntryType `json:"type"`
} }
// Cron is the JSON data of a cron job // Cron is the JSON data of a cron job