2023-12-29 22:32:03 +02:00
|
|
|
package taskfile
|
2018-07-22 21:05:47 +02:00
|
|
|
|
|
|
|
import (
|
2023-09-12 23:42:54 +02:00
|
|
|
"context"
|
2018-07-22 21:05:47 +02:00
|
|
|
"fmt"
|
|
|
|
"os"
|
2023-11-17 22:51:10 +02:00
|
|
|
"time"
|
2018-07-22 21:05:47 +02:00
|
|
|
|
2023-09-12 23:42:54 +02:00
|
|
|
"gopkg.in/yaml.v3"
|
|
|
|
|
2023-04-15 22:22:25 +02:00
|
|
|
"github.com/go-task/task/v3/errors"
|
2022-08-06 23:19:07 +02:00
|
|
|
"github.com/go-task/task/v3/internal/filepathext"
|
2023-09-12 23:42:54 +02:00
|
|
|
"github.com/go-task/task/v3/internal/logger"
|
2021-01-07 16:48:33 +02:00
|
|
|
"github.com/go-task/task/v3/internal/templater"
|
2023-12-29 22:32:03 +02:00
|
|
|
"github.com/go-task/task/v3/taskfile/ast"
|
2018-07-22 21:05:47 +02:00
|
|
|
)
|
|
|
|
|
2024-01-04 12:14:33 +02:00
|
|
|
const (
|
|
|
|
taskfileUntrustedPrompt = `The task you are attempting to run depends on the remote Taskfile at %q.
|
2024-01-27 23:45:13 +02:00
|
|
|
--- Make sure you trust the source of this Taskfile before continuing ---
|
|
|
|
Continue?`
|
2024-01-04 12:14:33 +02:00
|
|
|
taskfileChangedPrompt = `The Taskfile at %q has changed since you last used it!
|
2024-01-27 23:45:13 +02:00
|
|
|
--- Make sure you trust the source of this Taskfile before continuing ---
|
|
|
|
Continue?`
|
2024-01-04 12:14:33 +02:00
|
|
|
)
|
|
|
|
|
2023-12-29 22:32:03 +02:00
|
|
|
// Read reads a Read for a given directory
|
|
|
|
// Uses current dir when dir is left empty. Uses Read.yml
|
|
|
|
// or Read.yaml when entrypoint is left empty
|
|
|
|
func Read(
|
2023-09-12 23:42:54 +02:00
|
|
|
node Node,
|
|
|
|
insecure bool,
|
|
|
|
download bool,
|
|
|
|
offline bool,
|
2023-11-17 22:51:10 +02:00
|
|
|
timeout time.Duration,
|
2023-09-12 23:42:54 +02:00
|
|
|
tempDir string,
|
|
|
|
l *logger.Logger,
|
2023-12-29 22:32:03 +02:00
|
|
|
) (*ast.Taskfile, error) {
|
|
|
|
var _taskfile func(Node) (*ast.Taskfile, error)
|
|
|
|
_taskfile = func(node Node) (*ast.Taskfile, error) {
|
2024-01-22 00:10:12 +02:00
|
|
|
tf, err := readTaskfile(node, download, offline, timeout, tempDir, l)
|
2021-12-04 17:37:52 +02:00
|
|
|
if err != nil {
|
2023-09-02 22:24:01 +02:00
|
|
|
return nil, err
|
2021-12-04 17:37:52 +02:00
|
|
|
}
|
2022-07-26 00:10:16 +02:00
|
|
|
|
2023-09-19 20:21:40 +02:00
|
|
|
// Check that the Taskfile is set and has a schema version
|
2024-01-22 00:10:12 +02:00
|
|
|
if tf == nil || tf.Version == nil {
|
2024-01-12 00:30:02 +02:00
|
|
|
return nil, &errors.TaskfileVersionCheckError{URI: node.Location()}
|
2023-09-19 20:21:40 +02:00
|
|
|
}
|
|
|
|
|
2024-01-22 00:10:12 +02:00
|
|
|
err = tf.Includes.Range(func(namespace string, include ast.Include) error {
|
2024-03-10 19:11:07 +02:00
|
|
|
cache := &templater.Cache{Vars: tf.Vars}
|
2024-01-04 02:04:53 +02:00
|
|
|
include = ast.Include{
|
2024-01-04 02:17:30 +02:00
|
|
|
Namespace: include.Namespace,
|
2024-03-10 19:11:07 +02:00
|
|
|
Taskfile: templater.Replace(include.Taskfile, cache),
|
|
|
|
Dir: templater.Replace(include.Dir, cache),
|
2024-01-04 02:04:53 +02:00
|
|
|
Optional: include.Optional,
|
|
|
|
Internal: include.Internal,
|
|
|
|
Aliases: include.Aliases,
|
|
|
|
AdvancedImport: include.AdvancedImport,
|
|
|
|
Vars: include.Vars,
|
2023-12-29 22:26:02 +02:00
|
|
|
}
|
2024-03-10 19:11:07 +02:00
|
|
|
if err := cache.Err(); err != nil {
|
2023-12-29 22:26:02 +02:00
|
|
|
return err
|
2020-05-17 21:03:03 +02:00
|
|
|
}
|
2023-09-02 22:24:01 +02:00
|
|
|
|
2024-02-13 21:29:28 +02:00
|
|
|
entrypoint, err := node.ResolveEntrypoint(include.Taskfile)
|
2023-09-12 23:42:54 +02:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-02-13 21:29:28 +02:00
|
|
|
dir, err := node.ResolveDir(include.Dir)
|
2024-02-13 03:07:00 +02:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
includeReaderNode, err := NewNode(l, entrypoint, dir, insecure,
|
2023-09-12 23:42:54 +02:00
|
|
|
WithParent(node),
|
2024-01-04 02:04:53 +02:00
|
|
|
WithOptional(include.Optional),
|
2023-09-12 23:42:54 +02:00
|
|
|
)
|
2023-09-02 22:24:01 +02:00
|
|
|
if err != nil {
|
2024-01-04 02:04:53 +02:00
|
|
|
if include.Optional {
|
2023-09-02 22:24:01 +02:00
|
|
|
return nil
|
|
|
|
}
|
2021-01-01 23:27:50 +02:00
|
|
|
return err
|
2020-05-17 21:03:03 +02:00
|
|
|
}
|
|
|
|
|
2023-09-02 22:24:01 +02:00
|
|
|
if err := checkCircularIncludes(includeReaderNode); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2022-07-26 00:10:16 +02:00
|
|
|
|
2023-09-02 22:24:01 +02:00
|
|
|
includedTaskfile, err := _taskfile(includeReaderNode)
|
|
|
|
if err != nil {
|
2024-01-04 02:04:53 +02:00
|
|
|
if include.Optional {
|
2023-09-02 22:24:01 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return err
|
2021-09-25 14:40:03 +02:00
|
|
|
}
|
2021-12-04 17:37:52 +02:00
|
|
|
|
2023-12-29 22:26:02 +02:00
|
|
|
if len(includedTaskfile.Dotenv) > 0 {
|
2023-09-02 22:24:01 +02:00
|
|
|
return ErrIncludedTaskfilesCantHaveDotenvs
|
|
|
|
}
|
2022-01-15 05:38:37 +02:00
|
|
|
|
2024-01-04 02:04:53 +02:00
|
|
|
if include.AdvancedImport {
|
2023-09-02 22:24:01 +02:00
|
|
|
// nolint: errcheck
|
2023-12-29 22:32:03 +02:00
|
|
|
includedTaskfile.Vars.Range(func(k string, v ast.Var) error {
|
2023-09-02 22:24:01 +02:00
|
|
|
o := v
|
|
|
|
o.Dir = dir
|
|
|
|
includedTaskfile.Vars.Set(k, o)
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
// nolint: errcheck
|
2023-12-29 22:32:03 +02:00
|
|
|
includedTaskfile.Env.Range(func(k string, v ast.Var) error {
|
2023-09-02 22:24:01 +02:00
|
|
|
o := v
|
|
|
|
o.Dir = dir
|
|
|
|
includedTaskfile.Env.Set(k, o)
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
|
|
|
|
for _, task := range includedTaskfile.Tasks.Values() {
|
|
|
|
task.Dir = filepathext.SmartJoin(dir, task.Dir)
|
|
|
|
if task.IncludeVars == nil {
|
2023-12-29 22:32:03 +02:00
|
|
|
task.IncludeVars = &ast.Vars{}
|
2023-09-02 22:24:01 +02:00
|
|
|
}
|
2024-01-04 02:04:53 +02:00
|
|
|
task.IncludeVars.Merge(include.Vars)
|
2023-09-02 22:24:01 +02:00
|
|
|
task.IncludedTaskfileVars = includedTaskfile.Vars
|
|
|
|
}
|
2022-01-15 05:38:37 +02:00
|
|
|
}
|
2020-01-29 09:03:06 +02:00
|
|
|
|
2024-01-22 00:10:12 +02:00
|
|
|
if err = tf.Merge(includedTaskfile, &include); err != nil {
|
2022-07-26 00:10:16 +02:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-09-02 22:24:01 +02:00
|
|
|
return nil
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-01-22 00:10:12 +02:00
|
|
|
for _, task := range tf.Tasks.Values() {
|
2023-09-02 22:24:01 +02:00
|
|
|
// If the task is not defined, create a new one
|
|
|
|
if task == nil {
|
2023-12-29 22:32:03 +02:00
|
|
|
task = &ast.Task{}
|
2020-05-17 20:42:27 +02:00
|
|
|
}
|
2023-09-02 22:24:01 +02:00
|
|
|
// Set the location of the taskfile for each task
|
|
|
|
if task.Location.Taskfile == "" {
|
2024-01-22 00:10:12 +02:00
|
|
|
task.Location.Taskfile = tf.Location
|
2020-05-17 20:42:27 +02:00
|
|
|
}
|
2018-07-22 21:05:47 +02:00
|
|
|
}
|
2022-12-23 02:27:16 +02:00
|
|
|
|
2024-01-22 00:10:12 +02:00
|
|
|
return tf, nil
|
2022-10-07 12:18:53 +02:00
|
|
|
}
|
2023-09-02 22:24:01 +02:00
|
|
|
return _taskfile(node)
|
2018-07-22 21:05:47 +02:00
|
|
|
}
|
2021-12-04 17:37:52 +02:00
|
|
|
|
2024-01-04 12:12:16 +02:00
|
|
|
func readTaskfile(
|
|
|
|
node Node,
|
|
|
|
download,
|
|
|
|
offline bool,
|
|
|
|
timeout time.Duration,
|
|
|
|
tempDir string,
|
|
|
|
l *logger.Logger,
|
|
|
|
) (*ast.Taskfile, error) {
|
|
|
|
var b []byte
|
|
|
|
var err error
|
|
|
|
var cache *Cache
|
2021-12-04 17:37:52 +02:00
|
|
|
|
2024-01-04 12:12:16 +02:00
|
|
|
if node.Remote() {
|
|
|
|
cache, err = NewCache(tempDir)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2021-12-04 17:37:52 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-04 12:12:16 +02:00
|
|
|
// If the file is remote and we're in offline mode, check if we have a cached copy
|
|
|
|
if node.Remote() && offline {
|
|
|
|
if b, err = cache.read(node); errors.Is(err, os.ErrNotExist) {
|
2024-01-12 00:30:52 +02:00
|
|
|
return nil, &errors.TaskfileCacheNotFoundError{URI: node.Location()}
|
2024-01-04 12:12:16 +02:00
|
|
|
} else if err != nil {
|
|
|
|
return nil, err
|
2022-12-06 02:58:20 +02:00
|
|
|
}
|
2024-01-04 12:12:16 +02:00
|
|
|
l.VerboseOutf(logger.Magenta, "task: [%s] Fetched cached copy\n", node.Location())
|
2022-12-06 02:58:20 +02:00
|
|
|
|
2024-01-04 12:12:16 +02:00
|
|
|
} else {
|
|
|
|
|
|
|
|
downloaded := false
|
|
|
|
ctx, cf := context.WithTimeout(context.Background(), timeout)
|
|
|
|
defer cf()
|
|
|
|
|
|
|
|
// Read the file
|
|
|
|
b, err = node.Read(ctx)
|
|
|
|
// If we timed out then we likely have a network issue
|
|
|
|
if node.Remote() && errors.Is(ctx.Err(), context.DeadlineExceeded) {
|
|
|
|
// If a download was requested, then we can't use a cached copy
|
|
|
|
if download {
|
2024-01-12 00:30:52 +02:00
|
|
|
return nil, &errors.TaskfileNetworkTimeoutError{URI: node.Location(), Timeout: timeout}
|
2024-01-04 12:12:16 +02:00
|
|
|
}
|
|
|
|
// Search for any cached copies
|
|
|
|
if b, err = cache.read(node); errors.Is(err, os.ErrNotExist) {
|
2024-01-12 00:30:52 +02:00
|
|
|
return nil, &errors.TaskfileNetworkTimeoutError{URI: node.Location(), Timeout: timeout, CheckedCache: true}
|
2024-01-04 12:12:16 +02:00
|
|
|
} else if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
l.VerboseOutf(logger.Magenta, "task: [%s] Network timeout. Fetched cached copy\n", node.Location())
|
|
|
|
} else if err != nil {
|
|
|
|
return nil, err
|
|
|
|
} else {
|
|
|
|
downloaded = true
|
2022-12-06 02:58:20 +02:00
|
|
|
}
|
|
|
|
|
2024-01-04 12:12:16 +02:00
|
|
|
// If the node was remote, we need to check the checksum
|
|
|
|
if node.Remote() && downloaded {
|
|
|
|
l.VerboseOutf(logger.Magenta, "task: [%s] Fetched remote copy\n", node.Location())
|
|
|
|
|
|
|
|
// Get the checksums
|
|
|
|
checksum := checksum(b)
|
|
|
|
cachedChecksum := cache.readChecksum(node)
|
|
|
|
|
2024-01-04 12:14:33 +02:00
|
|
|
var prompt string
|
2024-01-04 12:12:16 +02:00
|
|
|
if cachedChecksum == "" {
|
|
|
|
// If the checksum doesn't exist, prompt the user to continue
|
2024-01-04 12:14:33 +02:00
|
|
|
prompt = fmt.Sprintf(taskfileUntrustedPrompt, node.Location())
|
2024-01-04 12:12:16 +02:00
|
|
|
} else if checksum != cachedChecksum {
|
|
|
|
// If there is a cached hash, but it doesn't match the expected hash, prompt the user to continue
|
2024-01-04 12:14:33 +02:00
|
|
|
prompt = fmt.Sprintf(taskfileChangedPrompt, node.Location())
|
2024-01-04 12:12:16 +02:00
|
|
|
}
|
2024-01-27 23:45:13 +02:00
|
|
|
if prompt != "" {
|
2024-01-04 12:14:33 +02:00
|
|
|
if err := l.Prompt(logger.Yellow, prompt, "n", "y", "yes"); err != nil {
|
2024-01-04 12:12:16 +02:00
|
|
|
return nil, &errors.TaskfileNotTrustedError{URI: node.Location()}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// If the hash has changed (or is new)
|
|
|
|
if checksum != cachedChecksum {
|
|
|
|
// Store the checksum
|
|
|
|
if err := cache.writeChecksum(node, checksum); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
// Cache the file
|
|
|
|
l.VerboseOutf(logger.Magenta, "task: [%s] Caching downloaded file\n", node.Location())
|
|
|
|
if err = cache.write(node, b); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
2022-12-06 02:58:20 +02:00
|
|
|
}
|
2024-01-04 12:12:16 +02:00
|
|
|
}
|
2022-12-06 02:58:20 +02:00
|
|
|
|
2024-01-04 12:12:16 +02:00
|
|
|
var t ast.Taskfile
|
|
|
|
if err := yaml.Unmarshal(b, &t); err != nil {
|
|
|
|
return nil, &errors.TaskfileInvalidError{URI: filepathext.TryAbsToRel(node.Location()), Err: err}
|
2022-12-06 02:58:20 +02:00
|
|
|
}
|
2024-01-04 12:12:16 +02:00
|
|
|
t.Location = node.Location()
|
|
|
|
|
|
|
|
return &t, nil
|
2022-12-06 02:58:20 +02:00
|
|
|
}
|
|
|
|
|
2023-09-02 22:24:01 +02:00
|
|
|
func checkCircularIncludes(node Node) error {
|
2022-01-16 06:34:59 +02:00
|
|
|
if node == nil {
|
2022-02-04 05:12:58 +02:00
|
|
|
return errors.New("task: failed to check for include cycle: node was nil")
|
2022-01-16 06:34:59 +02:00
|
|
|
}
|
2023-09-02 22:24:01 +02:00
|
|
|
if node.Parent() == nil {
|
2022-02-04 05:12:58 +02:00
|
|
|
return errors.New("task: failed to check for include cycle: node.Parent was nil")
|
2022-01-16 06:34:59 +02:00
|
|
|
}
|
2023-03-31 21:13:29 +02:00
|
|
|
curNode := node
|
2023-09-02 22:24:01 +02:00
|
|
|
location := node.Location()
|
|
|
|
for curNode.Parent() != nil {
|
|
|
|
curNode = curNode.Parent()
|
|
|
|
curLocation := curNode.Location()
|
|
|
|
if curLocation == location {
|
2022-02-04 05:12:58 +02:00
|
|
|
return fmt.Errorf("task: include cycle detected between %s <--> %s",
|
2023-09-02 22:24:01 +02:00
|
|
|
curLocation,
|
|
|
|
node.Parent().Location(),
|
2022-01-16 06:34:59 +02:00
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|