mirror of
https://github.com/go-task/task.git
synced 2025-08-08 22:36:57 +02:00
update dependencies
This commit is contained in:
34
Gopkg.lock
generated
34
Gopkg.lock
generated
@@ -1,12 +1,6 @@
|
||||
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
||||
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/BurntSushi/toml"
|
||||
packages = ["."]
|
||||
revision = "8b58b6030fce084b58a61e2bc3fdf183d5881ab4"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/Masterminds/semver"
|
||||
packages = ["."]
|
||||
@@ -25,6 +19,12 @@
|
||||
revision = "3391d3790d23d03408670993e957e8f408993c34"
|
||||
version = "v1.0.1"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/davecgh/go-spew"
|
||||
packages = ["spew"]
|
||||
revision = "346938d642f2ec3594ed81d874461961cd0faa76"
|
||||
version = "v1.1.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/fsnotify/fsnotify"
|
||||
@@ -53,7 +53,13 @@
|
||||
branch = "master"
|
||||
name = "github.com/mvdan/sh"
|
||||
packages = ["interp","syntax"]
|
||||
revision = "7545ea3a7ad3eb62f4f879da2c07e9c13f53d0ef"
|
||||
revision = "6773c283e820450b8ab307ac68e502960d473470"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/pmezard/go-difflib"
|
||||
packages = ["difflib"]
|
||||
revision = "792786c7400a136282c1664665ae0a8db921c6c2"
|
||||
version = "v1.0.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/satori/go.uuid"
|
||||
@@ -67,17 +73,23 @@
|
||||
packages = ["."]
|
||||
revision = "e57e3eeb33f795204c1ca35f56c44f83227c6e66"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/stretchr/testify"
|
||||
packages = ["assert"]
|
||||
revision = "69483b4bd14f5845b5a1e55bca19e954e827f1d0"
|
||||
version = "v1.1.4"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/crypto"
|
||||
packages = ["pbkdf2","scrypt"]
|
||||
revision = "adbae1b6b6fb4b02448a0fc0dbbc9ba2b95b294d"
|
||||
revision = "b286ef4198388fdb0e4ae62be12820df5da9b4c2"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/net"
|
||||
packages = ["context"]
|
||||
revision = "5f8847ae0d0e90b6a9dc8148e7ad616874625171"
|
||||
revision = "570fa1c91359c1869590e9cedf3b53162a51a167"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
@@ -89,7 +101,7 @@
|
||||
branch = "master"
|
||||
name = "golang.org/x/sys"
|
||||
packages = ["unix"]
|
||||
revision = "c23410a886927bab8ca5e80b08af6a56faeb330d"
|
||||
revision = "6faef541c73732f438fb660a212750a9ba9f9362"
|
||||
|
||||
[[projects]]
|
||||
branch = "v2"
|
||||
@@ -100,6 +112,6 @@
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
inputs-digest = "2f626389e1dc4d5ac2e6b83c6b9a50cab0dd3187bbc4e03694810df03a08b3fc"
|
||||
inputs-digest = "93839de063626661a216a313ab71e2ad920afb2528f69ca6110c2155276e6dab"
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
||||
|
@@ -66,10 +66,6 @@
|
||||
|
||||
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/BurntSushi/toml"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/Masterminds/sprig"
|
||||
|
3
vendor/github.com/BurntSushi/toml/COMPATIBLE
generated
vendored
3
vendor/github.com/BurntSushi/toml/COMPATIBLE
generated
vendored
@@ -1,3 +0,0 @@
|
||||
Compatible with TOML version
|
||||
[v0.4.0](https://github.com/toml-lang/toml/blob/v0.4.0/versions/en/toml-v0.4.0.md)
|
||||
|
21
vendor/github.com/BurntSushi/toml/COPYING
generated
vendored
21
vendor/github.com/BurntSushi/toml/COPYING
generated
vendored
@@ -1,21 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2013 TOML authors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
19
vendor/github.com/BurntSushi/toml/Makefile
generated
vendored
19
vendor/github.com/BurntSushi/toml/Makefile
generated
vendored
@@ -1,19 +0,0 @@
|
||||
install:
|
||||
go install ./...
|
||||
|
||||
test: install
|
||||
go test -v
|
||||
toml-test toml-test-decoder
|
||||
toml-test -encoder toml-test-encoder
|
||||
|
||||
fmt:
|
||||
gofmt -w *.go */*.go
|
||||
colcheck *.go */*.go
|
||||
|
||||
tags:
|
||||
find ./ -name '*.go' -print0 | xargs -0 gotags > TAGS
|
||||
|
||||
push:
|
||||
git push origin master
|
||||
git push github master
|
||||
|
218
vendor/github.com/BurntSushi/toml/README.md
generated
vendored
218
vendor/github.com/BurntSushi/toml/README.md
generated
vendored
@@ -1,218 +0,0 @@
|
||||
## TOML parser and encoder for Go with reflection
|
||||
|
||||
TOML stands for Tom's Obvious, Minimal Language. This Go package provides a
|
||||
reflection interface similar to Go's standard library `json` and `xml`
|
||||
packages. This package also supports the `encoding.TextUnmarshaler` and
|
||||
`encoding.TextMarshaler` interfaces so that you can define custom data
|
||||
representations. (There is an example of this below.)
|
||||
|
||||
Spec: https://github.com/toml-lang/toml
|
||||
|
||||
Compatible with TOML version
|
||||
[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md)
|
||||
|
||||
Documentation: https://godoc.org/github.com/BurntSushi/toml
|
||||
|
||||
Installation:
|
||||
|
||||
```bash
|
||||
go get github.com/BurntSushi/toml
|
||||
```
|
||||
|
||||
Try the toml validator:
|
||||
|
||||
```bash
|
||||
go get github.com/BurntSushi/toml/cmd/tomlv
|
||||
tomlv some-toml-file.toml
|
||||
```
|
||||
|
||||
[](https://travis-ci.org/BurntSushi/toml) [](https://godoc.org/github.com/BurntSushi/toml)
|
||||
|
||||
### Testing
|
||||
|
||||
This package passes all tests in
|
||||
[toml-test](https://github.com/BurntSushi/toml-test) for both the decoder
|
||||
and the encoder.
|
||||
|
||||
### Examples
|
||||
|
||||
This package works similarly to how the Go standard library handles `XML`
|
||||
and `JSON`. Namely, data is loaded into Go values via reflection.
|
||||
|
||||
For the simplest example, consider some TOML file as just a list of keys
|
||||
and values:
|
||||
|
||||
```toml
|
||||
Age = 25
|
||||
Cats = [ "Cauchy", "Plato" ]
|
||||
Pi = 3.14
|
||||
Perfection = [ 6, 28, 496, 8128 ]
|
||||
DOB = 1987-07-05T05:45:00Z
|
||||
```
|
||||
|
||||
Which could be defined in Go as:
|
||||
|
||||
```go
|
||||
type Config struct {
|
||||
Age int
|
||||
Cats []string
|
||||
Pi float64
|
||||
Perfection []int
|
||||
DOB time.Time // requires `import time`
|
||||
}
|
||||
```
|
||||
|
||||
And then decoded with:
|
||||
|
||||
```go
|
||||
var conf Config
|
||||
if _, err := toml.Decode(tomlData, &conf); err != nil {
|
||||
// handle error
|
||||
}
|
||||
```
|
||||
|
||||
You can also use struct tags if your struct field name doesn't map to a TOML
|
||||
key value directly:
|
||||
|
||||
```toml
|
||||
some_key_NAME = "wat"
|
||||
```
|
||||
|
||||
```go
|
||||
type TOML struct {
|
||||
ObscureKey string `toml:"some_key_NAME"`
|
||||
}
|
||||
```
|
||||
|
||||
### Using the `encoding.TextUnmarshaler` interface
|
||||
|
||||
Here's an example that automatically parses duration strings into
|
||||
`time.Duration` values:
|
||||
|
||||
```toml
|
||||
[[song]]
|
||||
name = "Thunder Road"
|
||||
duration = "4m49s"
|
||||
|
||||
[[song]]
|
||||
name = "Stairway to Heaven"
|
||||
duration = "8m03s"
|
||||
```
|
||||
|
||||
Which can be decoded with:
|
||||
|
||||
```go
|
||||
type song struct {
|
||||
Name string
|
||||
Duration duration
|
||||
}
|
||||
type songs struct {
|
||||
Song []song
|
||||
}
|
||||
var favorites songs
|
||||
if _, err := toml.Decode(blob, &favorites); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
for _, s := range favorites.Song {
|
||||
fmt.Printf("%s (%s)\n", s.Name, s.Duration)
|
||||
}
|
||||
```
|
||||
|
||||
And you'll also need a `duration` type that satisfies the
|
||||
`encoding.TextUnmarshaler` interface:
|
||||
|
||||
```go
|
||||
type duration struct {
|
||||
time.Duration
|
||||
}
|
||||
|
||||
func (d *duration) UnmarshalText(text []byte) error {
|
||||
var err error
|
||||
d.Duration, err = time.ParseDuration(string(text))
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
### More complex usage
|
||||
|
||||
Here's an example of how to load the example from the official spec page:
|
||||
|
||||
```toml
|
||||
# This is a TOML document. Boom.
|
||||
|
||||
title = "TOML Example"
|
||||
|
||||
[owner]
|
||||
name = "Tom Preston-Werner"
|
||||
organization = "GitHub"
|
||||
bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
|
||||
dob = 1979-05-27T07:32:00Z # First class dates? Why not?
|
||||
|
||||
[database]
|
||||
server = "192.168.1.1"
|
||||
ports = [ 8001, 8001, 8002 ]
|
||||
connection_max = 5000
|
||||
enabled = true
|
||||
|
||||
[servers]
|
||||
|
||||
# You can indent as you please. Tabs or spaces. TOML don't care.
|
||||
[servers.alpha]
|
||||
ip = "10.0.0.1"
|
||||
dc = "eqdc10"
|
||||
|
||||
[servers.beta]
|
||||
ip = "10.0.0.2"
|
||||
dc = "eqdc10"
|
||||
|
||||
[clients]
|
||||
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
||||
|
||||
# Line breaks are OK when inside arrays
|
||||
hosts = [
|
||||
"alpha",
|
||||
"omega"
|
||||
]
|
||||
```
|
||||
|
||||
And the corresponding Go types are:
|
||||
|
||||
```go
|
||||
type tomlConfig struct {
|
||||
Title string
|
||||
Owner ownerInfo
|
||||
DB database `toml:"database"`
|
||||
Servers map[string]server
|
||||
Clients clients
|
||||
}
|
||||
|
||||
type ownerInfo struct {
|
||||
Name string
|
||||
Org string `toml:"organization"`
|
||||
Bio string
|
||||
DOB time.Time
|
||||
}
|
||||
|
||||
type database struct {
|
||||
Server string
|
||||
Ports []int
|
||||
ConnMax int `toml:"connection_max"`
|
||||
Enabled bool
|
||||
}
|
||||
|
||||
type server struct {
|
||||
IP string
|
||||
DC string
|
||||
}
|
||||
|
||||
type clients struct {
|
||||
Data [][]interface{}
|
||||
Hosts []string
|
||||
}
|
||||
```
|
||||
|
||||
Note that a case insensitive match will be tried if an exact match can't be
|
||||
found.
|
||||
|
||||
A working example of the above can be found in `_examples/example.{go,toml}`.
|
509
vendor/github.com/BurntSushi/toml/decode.go
generated
vendored
509
vendor/github.com/BurntSushi/toml/decode.go
generated
vendored
@@ -1,509 +0,0 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func e(format string, args ...interface{}) error {
|
||||
return fmt.Errorf("toml: "+format, args...)
|
||||
}
|
||||
|
||||
// Unmarshaler is the interface implemented by objects that can unmarshal a
|
||||
// TOML description of themselves.
|
||||
type Unmarshaler interface {
|
||||
UnmarshalTOML(interface{}) error
|
||||
}
|
||||
|
||||
// Unmarshal decodes the contents of `p` in TOML format into a pointer `v`.
|
||||
func Unmarshal(p []byte, v interface{}) error {
|
||||
_, err := Decode(string(p), v)
|
||||
return err
|
||||
}
|
||||
|
||||
// Primitive is a TOML value that hasn't been decoded into a Go value.
|
||||
// When using the various `Decode*` functions, the type `Primitive` may
|
||||
// be given to any value, and its decoding will be delayed.
|
||||
//
|
||||
// A `Primitive` value can be decoded using the `PrimitiveDecode` function.
|
||||
//
|
||||
// The underlying representation of a `Primitive` value is subject to change.
|
||||
// Do not rely on it.
|
||||
//
|
||||
// N.B. Primitive values are still parsed, so using them will only avoid
|
||||
// the overhead of reflection. They can be useful when you don't know the
|
||||
// exact type of TOML data until run time.
|
||||
type Primitive struct {
|
||||
undecoded interface{}
|
||||
context Key
|
||||
}
|
||||
|
||||
// DEPRECATED!
|
||||
//
|
||||
// Use MetaData.PrimitiveDecode instead.
|
||||
func PrimitiveDecode(primValue Primitive, v interface{}) error {
|
||||
md := MetaData{decoded: make(map[string]bool)}
|
||||
return md.unify(primValue.undecoded, rvalue(v))
|
||||
}
|
||||
|
||||
// PrimitiveDecode is just like the other `Decode*` functions, except it
|
||||
// decodes a TOML value that has already been parsed. Valid primitive values
|
||||
// can *only* be obtained from values filled by the decoder functions,
|
||||
// including this method. (i.e., `v` may contain more `Primitive`
|
||||
// values.)
|
||||
//
|
||||
// Meta data for primitive values is included in the meta data returned by
|
||||
// the `Decode*` functions with one exception: keys returned by the Undecoded
|
||||
// method will only reflect keys that were decoded. Namely, any keys hidden
|
||||
// behind a Primitive will be considered undecoded. Executing this method will
|
||||
// update the undecoded keys in the meta data. (See the example.)
|
||||
func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error {
|
||||
md.context = primValue.context
|
||||
defer func() { md.context = nil }()
|
||||
return md.unify(primValue.undecoded, rvalue(v))
|
||||
}
|
||||
|
||||
// Decode will decode the contents of `data` in TOML format into a pointer
|
||||
// `v`.
|
||||
//
|
||||
// TOML hashes correspond to Go structs or maps. (Dealer's choice. They can be
|
||||
// used interchangeably.)
|
||||
//
|
||||
// TOML arrays of tables correspond to either a slice of structs or a slice
|
||||
// of maps.
|
||||
//
|
||||
// TOML datetimes correspond to Go `time.Time` values.
|
||||
//
|
||||
// All other TOML types (float, string, int, bool and array) correspond
|
||||
// to the obvious Go types.
|
||||
//
|
||||
// An exception to the above rules is if a type implements the
|
||||
// encoding.TextUnmarshaler interface. In this case, any primitive TOML value
|
||||
// (floats, strings, integers, booleans and datetimes) will be converted to
|
||||
// a byte string and given to the value's UnmarshalText method. See the
|
||||
// Unmarshaler example for a demonstration with time duration strings.
|
||||
//
|
||||
// Key mapping
|
||||
//
|
||||
// TOML keys can map to either keys in a Go map or field names in a Go
|
||||
// struct. The special `toml` struct tag may be used to map TOML keys to
|
||||
// struct fields that don't match the key name exactly. (See the example.)
|
||||
// A case insensitive match to struct names will be tried if an exact match
|
||||
// can't be found.
|
||||
//
|
||||
// The mapping between TOML values and Go values is loose. That is, there
|
||||
// may exist TOML values that cannot be placed into your representation, and
|
||||
// there may be parts of your representation that do not correspond to
|
||||
// TOML values. This loose mapping can be made stricter by using the IsDefined
|
||||
// and/or Undecoded methods on the MetaData returned.
|
||||
//
|
||||
// This decoder will not handle cyclic types. If a cyclic type is passed,
|
||||
// `Decode` will not terminate.
|
||||
func Decode(data string, v interface{}) (MetaData, error) {
|
||||
rv := reflect.ValueOf(v)
|
||||
if rv.Kind() != reflect.Ptr {
|
||||
return MetaData{}, e("Decode of non-pointer %s", reflect.TypeOf(v))
|
||||
}
|
||||
if rv.IsNil() {
|
||||
return MetaData{}, e("Decode of nil %s", reflect.TypeOf(v))
|
||||
}
|
||||
p, err := parse(data)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
md := MetaData{
|
||||
p.mapping, p.types, p.ordered,
|
||||
make(map[string]bool, len(p.ordered)), nil,
|
||||
}
|
||||
return md, md.unify(p.mapping, indirect(rv))
|
||||
}
|
||||
|
||||
// DecodeFile is just like Decode, except it will automatically read the
|
||||
// contents of the file at `fpath` and decode it for you.
|
||||
func DecodeFile(fpath string, v interface{}) (MetaData, error) {
|
||||
bs, err := ioutil.ReadFile(fpath)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
return Decode(string(bs), v)
|
||||
}
|
||||
|
||||
// DecodeReader is just like Decode, except it will consume all bytes
|
||||
// from the reader and decode it for you.
|
||||
func DecodeReader(r io.Reader, v interface{}) (MetaData, error) {
|
||||
bs, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
return Decode(string(bs), v)
|
||||
}
|
||||
|
||||
// unify performs a sort of type unification based on the structure of `rv`,
|
||||
// which is the client representation.
|
||||
//
|
||||
// Any type mismatch produces an error. Finding a type that we don't know
|
||||
// how to handle produces an unsupported type error.
|
||||
func (md *MetaData) unify(data interface{}, rv reflect.Value) error {
|
||||
|
||||
// Special case. Look for a `Primitive` value.
|
||||
if rv.Type() == reflect.TypeOf((*Primitive)(nil)).Elem() {
|
||||
// Save the undecoded data and the key context into the primitive
|
||||
// value.
|
||||
context := make(Key, len(md.context))
|
||||
copy(context, md.context)
|
||||
rv.Set(reflect.ValueOf(Primitive{
|
||||
undecoded: data,
|
||||
context: context,
|
||||
}))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Special case. Unmarshaler Interface support.
|
||||
if rv.CanAddr() {
|
||||
if v, ok := rv.Addr().Interface().(Unmarshaler); ok {
|
||||
return v.UnmarshalTOML(data)
|
||||
}
|
||||
}
|
||||
|
||||
// Special case. Handle time.Time values specifically.
|
||||
// TODO: Remove this code when we decide to drop support for Go 1.1.
|
||||
// This isn't necessary in Go 1.2 because time.Time satisfies the encoding
|
||||
// interfaces.
|
||||
if rv.Type().AssignableTo(rvalue(time.Time{}).Type()) {
|
||||
return md.unifyDatetime(data, rv)
|
||||
}
|
||||
|
||||
// Special case. Look for a value satisfying the TextUnmarshaler interface.
|
||||
if v, ok := rv.Interface().(TextUnmarshaler); ok {
|
||||
return md.unifyText(data, v)
|
||||
}
|
||||
// BUG(burntsushi)
|
||||
// The behavior here is incorrect whenever a Go type satisfies the
|
||||
// encoding.TextUnmarshaler interface but also corresponds to a TOML
|
||||
// hash or array. In particular, the unmarshaler should only be applied
|
||||
// to primitive TOML values. But at this point, it will be applied to
|
||||
// all kinds of values and produce an incorrect error whenever those values
|
||||
// are hashes or arrays (including arrays of tables).
|
||||
|
||||
k := rv.Kind()
|
||||
|
||||
// laziness
|
||||
if k >= reflect.Int && k <= reflect.Uint64 {
|
||||
return md.unifyInt(data, rv)
|
||||
}
|
||||
switch k {
|
||||
case reflect.Ptr:
|
||||
elem := reflect.New(rv.Type().Elem())
|
||||
err := md.unify(data, reflect.Indirect(elem))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rv.Set(elem)
|
||||
return nil
|
||||
case reflect.Struct:
|
||||
return md.unifyStruct(data, rv)
|
||||
case reflect.Map:
|
||||
return md.unifyMap(data, rv)
|
||||
case reflect.Array:
|
||||
return md.unifyArray(data, rv)
|
||||
case reflect.Slice:
|
||||
return md.unifySlice(data, rv)
|
||||
case reflect.String:
|
||||
return md.unifyString(data, rv)
|
||||
case reflect.Bool:
|
||||
return md.unifyBool(data, rv)
|
||||
case reflect.Interface:
|
||||
// we only support empty interfaces.
|
||||
if rv.NumMethod() > 0 {
|
||||
return e("unsupported type %s", rv.Type())
|
||||
}
|
||||
return md.unifyAnything(data, rv)
|
||||
case reflect.Float32:
|
||||
fallthrough
|
||||
case reflect.Float64:
|
||||
return md.unifyFloat64(data, rv)
|
||||
}
|
||||
return e("unsupported type %s", rv.Kind())
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error {
|
||||
tmap, ok := mapping.(map[string]interface{})
|
||||
if !ok {
|
||||
if mapping == nil {
|
||||
return nil
|
||||
}
|
||||
return e("type mismatch for %s: expected table but found %T",
|
||||
rv.Type().String(), mapping)
|
||||
}
|
||||
|
||||
for key, datum := range tmap {
|
||||
var f *field
|
||||
fields := cachedTypeFields(rv.Type())
|
||||
for i := range fields {
|
||||
ff := &fields[i]
|
||||
if ff.name == key {
|
||||
f = ff
|
||||
break
|
||||
}
|
||||
if f == nil && strings.EqualFold(ff.name, key) {
|
||||
f = ff
|
||||
}
|
||||
}
|
||||
if f != nil {
|
||||
subv := rv
|
||||
for _, i := range f.index {
|
||||
subv = indirect(subv.Field(i))
|
||||
}
|
||||
if isUnifiable(subv) {
|
||||
md.decoded[md.context.add(key).String()] = true
|
||||
md.context = append(md.context, key)
|
||||
if err := md.unify(datum, subv); err != nil {
|
||||
return err
|
||||
}
|
||||
md.context = md.context[0 : len(md.context)-1]
|
||||
} else if f.name != "" {
|
||||
// Bad user! No soup for you!
|
||||
return e("cannot write unexported field %s.%s",
|
||||
rv.Type().String(), f.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error {
|
||||
tmap, ok := mapping.(map[string]interface{})
|
||||
if !ok {
|
||||
if tmap == nil {
|
||||
return nil
|
||||
}
|
||||
return badtype("map", mapping)
|
||||
}
|
||||
if rv.IsNil() {
|
||||
rv.Set(reflect.MakeMap(rv.Type()))
|
||||
}
|
||||
for k, v := range tmap {
|
||||
md.decoded[md.context.add(k).String()] = true
|
||||
md.context = append(md.context, k)
|
||||
|
||||
rvkey := indirect(reflect.New(rv.Type().Key()))
|
||||
rvval := reflect.Indirect(reflect.New(rv.Type().Elem()))
|
||||
if err := md.unify(v, rvval); err != nil {
|
||||
return err
|
||||
}
|
||||
md.context = md.context[0 : len(md.context)-1]
|
||||
|
||||
rvkey.SetString(k)
|
||||
rv.SetMapIndex(rvkey, rvval)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error {
|
||||
datav := reflect.ValueOf(data)
|
||||
if datav.Kind() != reflect.Slice {
|
||||
if !datav.IsValid() {
|
||||
return nil
|
||||
}
|
||||
return badtype("slice", data)
|
||||
}
|
||||
sliceLen := datav.Len()
|
||||
if sliceLen != rv.Len() {
|
||||
return e("expected array length %d; got TOML array of length %d",
|
||||
rv.Len(), sliceLen)
|
||||
}
|
||||
return md.unifySliceArray(datav, rv)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error {
|
||||
datav := reflect.ValueOf(data)
|
||||
if datav.Kind() != reflect.Slice {
|
||||
if !datav.IsValid() {
|
||||
return nil
|
||||
}
|
||||
return badtype("slice", data)
|
||||
}
|
||||
n := datav.Len()
|
||||
if rv.IsNil() || rv.Cap() < n {
|
||||
rv.Set(reflect.MakeSlice(rv.Type(), n, n))
|
||||
}
|
||||
rv.SetLen(n)
|
||||
return md.unifySliceArray(datav, rv)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifySliceArray(data, rv reflect.Value) error {
|
||||
sliceLen := data.Len()
|
||||
for i := 0; i < sliceLen; i++ {
|
||||
v := data.Index(i).Interface()
|
||||
sliceval := indirect(rv.Index(i))
|
||||
if err := md.unify(v, sliceval); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyDatetime(data interface{}, rv reflect.Value) error {
|
||||
if _, ok := data.(time.Time); ok {
|
||||
rv.Set(reflect.ValueOf(data))
|
||||
return nil
|
||||
}
|
||||
return badtype("time.Time", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error {
|
||||
if s, ok := data.(string); ok {
|
||||
rv.SetString(s)
|
||||
return nil
|
||||
}
|
||||
return badtype("string", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error {
|
||||
if num, ok := data.(float64); ok {
|
||||
switch rv.Kind() {
|
||||
case reflect.Float32:
|
||||
fallthrough
|
||||
case reflect.Float64:
|
||||
rv.SetFloat(num)
|
||||
default:
|
||||
panic("bug")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return badtype("float", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error {
|
||||
if num, ok := data.(int64); ok {
|
||||
if rv.Kind() >= reflect.Int && rv.Kind() <= reflect.Int64 {
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int64:
|
||||
// No bounds checking necessary.
|
||||
case reflect.Int8:
|
||||
if num < math.MinInt8 || num > math.MaxInt8 {
|
||||
return e("value %d is out of range for int8", num)
|
||||
}
|
||||
case reflect.Int16:
|
||||
if num < math.MinInt16 || num > math.MaxInt16 {
|
||||
return e("value %d is out of range for int16", num)
|
||||
}
|
||||
case reflect.Int32:
|
||||
if num < math.MinInt32 || num > math.MaxInt32 {
|
||||
return e("value %d is out of range for int32", num)
|
||||
}
|
||||
}
|
||||
rv.SetInt(num)
|
||||
} else if rv.Kind() >= reflect.Uint && rv.Kind() <= reflect.Uint64 {
|
||||
unum := uint64(num)
|
||||
switch rv.Kind() {
|
||||
case reflect.Uint, reflect.Uint64:
|
||||
// No bounds checking necessary.
|
||||
case reflect.Uint8:
|
||||
if num < 0 || unum > math.MaxUint8 {
|
||||
return e("value %d is out of range for uint8", num)
|
||||
}
|
||||
case reflect.Uint16:
|
||||
if num < 0 || unum > math.MaxUint16 {
|
||||
return e("value %d is out of range for uint16", num)
|
||||
}
|
||||
case reflect.Uint32:
|
||||
if num < 0 || unum > math.MaxUint32 {
|
||||
return e("value %d is out of range for uint32", num)
|
||||
}
|
||||
}
|
||||
rv.SetUint(unum)
|
||||
} else {
|
||||
panic("unreachable")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return badtype("integer", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error {
|
||||
if b, ok := data.(bool); ok {
|
||||
rv.SetBool(b)
|
||||
return nil
|
||||
}
|
||||
return badtype("boolean", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error {
|
||||
rv.Set(reflect.ValueOf(data))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyText(data interface{}, v TextUnmarshaler) error {
|
||||
var s string
|
||||
switch sdata := data.(type) {
|
||||
case TextMarshaler:
|
||||
text, err := sdata.MarshalText()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s = string(text)
|
||||
case fmt.Stringer:
|
||||
s = sdata.String()
|
||||
case string:
|
||||
s = sdata
|
||||
case bool:
|
||||
s = fmt.Sprintf("%v", sdata)
|
||||
case int64:
|
||||
s = fmt.Sprintf("%d", sdata)
|
||||
case float64:
|
||||
s = fmt.Sprintf("%f", sdata)
|
||||
default:
|
||||
return badtype("primitive (string-like)", data)
|
||||
}
|
||||
if err := v.UnmarshalText([]byte(s)); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// rvalue returns a reflect.Value of `v`. All pointers are resolved.
|
||||
func rvalue(v interface{}) reflect.Value {
|
||||
return indirect(reflect.ValueOf(v))
|
||||
}
|
||||
|
||||
// indirect returns the value pointed to by a pointer.
|
||||
// Pointers are followed until the value is not a pointer.
|
||||
// New values are allocated for each nil pointer.
|
||||
//
|
||||
// An exception to this rule is if the value satisfies an interface of
|
||||
// interest to us (like encoding.TextUnmarshaler).
|
||||
func indirect(v reflect.Value) reflect.Value {
|
||||
if v.Kind() != reflect.Ptr {
|
||||
if v.CanSet() {
|
||||
pv := v.Addr()
|
||||
if _, ok := pv.Interface().(TextUnmarshaler); ok {
|
||||
return pv
|
||||
}
|
||||
}
|
||||
return v
|
||||
}
|
||||
if v.IsNil() {
|
||||
v.Set(reflect.New(v.Type().Elem()))
|
||||
}
|
||||
return indirect(reflect.Indirect(v))
|
||||
}
|
||||
|
||||
func isUnifiable(rv reflect.Value) bool {
|
||||
if rv.CanSet() {
|
||||
return true
|
||||
}
|
||||
if _, ok := rv.Interface().(TextUnmarshaler); ok {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func badtype(expected string, data interface{}) error {
|
||||
return e("cannot load TOML value of type %T into a Go %s", data, expected)
|
||||
}
|
121
vendor/github.com/BurntSushi/toml/decode_meta.go
generated
vendored
121
vendor/github.com/BurntSushi/toml/decode_meta.go
generated
vendored
@@ -1,121 +0,0 @@
|
||||
package toml
|
||||
|
||||
import "strings"
|
||||
|
||||
// MetaData allows access to meta information about TOML data that may not
|
||||
// be inferrable via reflection. In particular, whether a key has been defined
|
||||
// and the TOML type of a key.
|
||||
type MetaData struct {
|
||||
mapping map[string]interface{}
|
||||
types map[string]tomlType
|
||||
keys []Key
|
||||
decoded map[string]bool
|
||||
context Key // Used only during decoding.
|
||||
}
|
||||
|
||||
// IsDefined returns true if the key given exists in the TOML data. The key
|
||||
// should be specified hierarchially. e.g.,
|
||||
//
|
||||
// // access the TOML key 'a.b.c'
|
||||
// IsDefined("a", "b", "c")
|
||||
//
|
||||
// IsDefined will return false if an empty key given. Keys are case sensitive.
|
||||
func (md *MetaData) IsDefined(key ...string) bool {
|
||||
if len(key) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
var hash map[string]interface{}
|
||||
var ok bool
|
||||
var hashOrVal interface{} = md.mapping
|
||||
for _, k := range key {
|
||||
if hash, ok = hashOrVal.(map[string]interface{}); !ok {
|
||||
return false
|
||||
}
|
||||
if hashOrVal, ok = hash[k]; !ok {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Type returns a string representation of the type of the key specified.
|
||||
//
|
||||
// Type will return the empty string if given an empty key or a key that
|
||||
// does not exist. Keys are case sensitive.
|
||||
func (md *MetaData) Type(key ...string) string {
|
||||
fullkey := strings.Join(key, ".")
|
||||
if typ, ok := md.types[fullkey]; ok {
|
||||
return typ.typeString()
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// Key is the type of any TOML key, including key groups. Use (MetaData).Keys
|
||||
// to get values of this type.
|
||||
type Key []string
|
||||
|
||||
func (k Key) String() string {
|
||||
return strings.Join(k, ".")
|
||||
}
|
||||
|
||||
func (k Key) maybeQuotedAll() string {
|
||||
var ss []string
|
||||
for i := range k {
|
||||
ss = append(ss, k.maybeQuoted(i))
|
||||
}
|
||||
return strings.Join(ss, ".")
|
||||
}
|
||||
|
||||
func (k Key) maybeQuoted(i int) string {
|
||||
quote := false
|
||||
for _, c := range k[i] {
|
||||
if !isBareKeyChar(c) {
|
||||
quote = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if quote {
|
||||
return "\"" + strings.Replace(k[i], "\"", "\\\"", -1) + "\""
|
||||
}
|
||||
return k[i]
|
||||
}
|
||||
|
||||
func (k Key) add(piece string) Key {
|
||||
newKey := make(Key, len(k)+1)
|
||||
copy(newKey, k)
|
||||
newKey[len(k)] = piece
|
||||
return newKey
|
||||
}
|
||||
|
||||
// Keys returns a slice of every key in the TOML data, including key groups.
|
||||
// Each key is itself a slice, where the first element is the top of the
|
||||
// hierarchy and the last is the most specific.
|
||||
//
|
||||
// The list will have the same order as the keys appeared in the TOML data.
|
||||
//
|
||||
// All keys returned are non-empty.
|
||||
func (md *MetaData) Keys() []Key {
|
||||
return md.keys
|
||||
}
|
||||
|
||||
// Undecoded returns all keys that have not been decoded in the order in which
|
||||
// they appear in the original TOML document.
|
||||
//
|
||||
// This includes keys that haven't been decoded because of a Primitive value.
|
||||
// Once the Primitive value is decoded, the keys will be considered decoded.
|
||||
//
|
||||
// Also note that decoding into an empty interface will result in no decoding,
|
||||
// and so no keys will be considered decoded.
|
||||
//
|
||||
// In this sense, the Undecoded keys correspond to keys in the TOML document
|
||||
// that do not have a concrete type in your representation.
|
||||
func (md *MetaData) Undecoded() []Key {
|
||||
undecoded := make([]Key, 0, len(md.keys))
|
||||
for _, key := range md.keys {
|
||||
if !md.decoded[key.String()] {
|
||||
undecoded = append(undecoded, key)
|
||||
}
|
||||
}
|
||||
return undecoded
|
||||
}
|
27
vendor/github.com/BurntSushi/toml/doc.go
generated
vendored
27
vendor/github.com/BurntSushi/toml/doc.go
generated
vendored
@@ -1,27 +0,0 @@
|
||||
/*
|
||||
Package toml provides facilities for decoding and encoding TOML configuration
|
||||
files via reflection. There is also support for delaying decoding with
|
||||
the Primitive type, and querying the set of keys in a TOML document with the
|
||||
MetaData type.
|
||||
|
||||
The specification implemented: https://github.com/toml-lang/toml
|
||||
|
||||
The sub-command github.com/BurntSushi/toml/cmd/tomlv can be used to verify
|
||||
whether a file is a valid TOML document. It can also be used to print the
|
||||
type of each key in a TOML document.
|
||||
|
||||
Testing
|
||||
|
||||
There are two important types of tests used for this package. The first is
|
||||
contained inside '*_test.go' files and uses the standard Go unit testing
|
||||
framework. These tests are primarily devoted to holistically testing the
|
||||
decoder and encoder.
|
||||
|
||||
The second type of testing is used to verify the implementation's adherence
|
||||
to the TOML specification. These tests have been factored into their own
|
||||
project: https://github.com/BurntSushi/toml-test
|
||||
|
||||
The reason the tests are in a separate project is so that they can be used by
|
||||
any implementation of TOML. Namely, it is language agnostic.
|
||||
*/
|
||||
package toml
|
568
vendor/github.com/BurntSushi/toml/encode.go
generated
vendored
568
vendor/github.com/BurntSushi/toml/encode.go
generated
vendored
@@ -1,568 +0,0 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type tomlEncodeError struct{ error }
|
||||
|
||||
var (
|
||||
errArrayMixedElementTypes = errors.New(
|
||||
"toml: cannot encode array with mixed element types")
|
||||
errArrayNilElement = errors.New(
|
||||
"toml: cannot encode array with nil element")
|
||||
errNonString = errors.New(
|
||||
"toml: cannot encode a map with non-string key type")
|
||||
errAnonNonStruct = errors.New(
|
||||
"toml: cannot encode an anonymous field that is not a struct")
|
||||
errArrayNoTable = errors.New(
|
||||
"toml: TOML array element cannot contain a table")
|
||||
errNoKey = errors.New(
|
||||
"toml: top-level values must be Go maps or structs")
|
||||
errAnything = errors.New("") // used in testing
|
||||
)
|
||||
|
||||
var quotedReplacer = strings.NewReplacer(
|
||||
"\t", "\\t",
|
||||
"\n", "\\n",
|
||||
"\r", "\\r",
|
||||
"\"", "\\\"",
|
||||
"\\", "\\\\",
|
||||
)
|
||||
|
||||
// Encoder controls the encoding of Go values to a TOML document to some
|
||||
// io.Writer.
|
||||
//
|
||||
// The indentation level can be controlled with the Indent field.
|
||||
type Encoder struct {
|
||||
// A single indentation level. By default it is two spaces.
|
||||
Indent string
|
||||
|
||||
// hasWritten is whether we have written any output to w yet.
|
||||
hasWritten bool
|
||||
w *bufio.Writer
|
||||
}
|
||||
|
||||
// NewEncoder returns a TOML encoder that encodes Go values to the io.Writer
|
||||
// given. By default, a single indentation level is 2 spaces.
|
||||
func NewEncoder(w io.Writer) *Encoder {
|
||||
return &Encoder{
|
||||
w: bufio.NewWriter(w),
|
||||
Indent: " ",
|
||||
}
|
||||
}
|
||||
|
||||
// Encode writes a TOML representation of the Go value to the underlying
|
||||
// io.Writer. If the value given cannot be encoded to a valid TOML document,
|
||||
// then an error is returned.
|
||||
//
|
||||
// The mapping between Go values and TOML values should be precisely the same
|
||||
// as for the Decode* functions. Similarly, the TextMarshaler interface is
|
||||
// supported by encoding the resulting bytes as strings. (If you want to write
|
||||
// arbitrary binary data then you will need to use something like base64 since
|
||||
// TOML does not have any binary types.)
|
||||
//
|
||||
// When encoding TOML hashes (i.e., Go maps or structs), keys without any
|
||||
// sub-hashes are encoded first.
|
||||
//
|
||||
// If a Go map is encoded, then its keys are sorted alphabetically for
|
||||
// deterministic output. More control over this behavior may be provided if
|
||||
// there is demand for it.
|
||||
//
|
||||
// Encoding Go values without a corresponding TOML representation---like map
|
||||
// types with non-string keys---will cause an error to be returned. Similarly
|
||||
// for mixed arrays/slices, arrays/slices with nil elements, embedded
|
||||
// non-struct types and nested slices containing maps or structs.
|
||||
// (e.g., [][]map[string]string is not allowed but []map[string]string is OK
|
||||
// and so is []map[string][]string.)
|
||||
func (enc *Encoder) Encode(v interface{}) error {
|
||||
rv := eindirect(reflect.ValueOf(v))
|
||||
if err := enc.safeEncode(Key([]string{}), rv); err != nil {
|
||||
return err
|
||||
}
|
||||
return enc.w.Flush()
|
||||
}
|
||||
|
||||
func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
if terr, ok := r.(tomlEncodeError); ok {
|
||||
err = terr.error
|
||||
return
|
||||
}
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
enc.encode(key, rv)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (enc *Encoder) encode(key Key, rv reflect.Value) {
|
||||
// Special case. Time needs to be in ISO8601 format.
|
||||
// Special case. If we can marshal the type to text, then we used that.
|
||||
// Basically, this prevents the encoder for handling these types as
|
||||
// generic structs (or whatever the underlying type of a TextMarshaler is).
|
||||
switch rv.Interface().(type) {
|
||||
case time.Time, TextMarshaler:
|
||||
enc.keyEqElement(key, rv)
|
||||
return
|
||||
}
|
||||
|
||||
k := rv.Kind()
|
||||
switch k {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
||||
reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
|
||||
reflect.Uint64,
|
||||
reflect.Float32, reflect.Float64, reflect.String, reflect.Bool:
|
||||
enc.keyEqElement(key, rv)
|
||||
case reflect.Array, reflect.Slice:
|
||||
if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) {
|
||||
enc.eArrayOfTables(key, rv)
|
||||
} else {
|
||||
enc.keyEqElement(key, rv)
|
||||
}
|
||||
case reflect.Interface:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.encode(key, rv.Elem())
|
||||
case reflect.Map:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.eTable(key, rv)
|
||||
case reflect.Ptr:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.encode(key, rv.Elem())
|
||||
case reflect.Struct:
|
||||
enc.eTable(key, rv)
|
||||
default:
|
||||
panic(e("unsupported type for key '%s': %s", key, k))
|
||||
}
|
||||
}
|
||||
|
||||
// eElement encodes any value that can be an array element (primitives and
|
||||
// arrays).
|
||||
func (enc *Encoder) eElement(rv reflect.Value) {
|
||||
switch v := rv.Interface().(type) {
|
||||
case time.Time:
|
||||
// Special case time.Time as a primitive. Has to come before
|
||||
// TextMarshaler below because time.Time implements
|
||||
// encoding.TextMarshaler, but we need to always use UTC.
|
||||
enc.wf(v.UTC().Format("2006-01-02T15:04:05Z"))
|
||||
return
|
||||
case TextMarshaler:
|
||||
// Special case. Use text marshaler if it's available for this value.
|
||||
if s, err := v.MarshalText(); err != nil {
|
||||
encPanic(err)
|
||||
} else {
|
||||
enc.writeQuoted(string(s))
|
||||
}
|
||||
return
|
||||
}
|
||||
switch rv.Kind() {
|
||||
case reflect.Bool:
|
||||
enc.wf(strconv.FormatBool(rv.Bool()))
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
||||
reflect.Int64:
|
||||
enc.wf(strconv.FormatInt(rv.Int(), 10))
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16,
|
||||
reflect.Uint32, reflect.Uint64:
|
||||
enc.wf(strconv.FormatUint(rv.Uint(), 10))
|
||||
case reflect.Float32:
|
||||
enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 32)))
|
||||
case reflect.Float64:
|
||||
enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 64)))
|
||||
case reflect.Array, reflect.Slice:
|
||||
enc.eArrayOrSliceElement(rv)
|
||||
case reflect.Interface:
|
||||
enc.eElement(rv.Elem())
|
||||
case reflect.String:
|
||||
enc.writeQuoted(rv.String())
|
||||
default:
|
||||
panic(e("unexpected primitive type: %s", rv.Kind()))
|
||||
}
|
||||
}
|
||||
|
||||
// By the TOML spec, all floats must have a decimal with at least one
|
||||
// number on either side.
|
||||
func floatAddDecimal(fstr string) string {
|
||||
if !strings.Contains(fstr, ".") {
|
||||
return fstr + ".0"
|
||||
}
|
||||
return fstr
|
||||
}
|
||||
|
||||
func (enc *Encoder) writeQuoted(s string) {
|
||||
enc.wf("\"%s\"", quotedReplacer.Replace(s))
|
||||
}
|
||||
|
||||
func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) {
|
||||
length := rv.Len()
|
||||
enc.wf("[")
|
||||
for i := 0; i < length; i++ {
|
||||
elem := rv.Index(i)
|
||||
enc.eElement(elem)
|
||||
if i != length-1 {
|
||||
enc.wf(", ")
|
||||
}
|
||||
}
|
||||
enc.wf("]")
|
||||
}
|
||||
|
||||
func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) {
|
||||
if len(key) == 0 {
|
||||
encPanic(errNoKey)
|
||||
}
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
trv := rv.Index(i)
|
||||
if isNil(trv) {
|
||||
continue
|
||||
}
|
||||
panicIfInvalidKey(key)
|
||||
enc.newline()
|
||||
enc.wf("%s[[%s]]", enc.indentStr(key), key.maybeQuotedAll())
|
||||
enc.newline()
|
||||
enc.eMapOrStruct(key, trv)
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) eTable(key Key, rv reflect.Value) {
|
||||
panicIfInvalidKey(key)
|
||||
if len(key) == 1 {
|
||||
// Output an extra newline between top-level tables.
|
||||
// (The newline isn't written if nothing else has been written though.)
|
||||
enc.newline()
|
||||
}
|
||||
if len(key) > 0 {
|
||||
enc.wf("%s[%s]", enc.indentStr(key), key.maybeQuotedAll())
|
||||
enc.newline()
|
||||
}
|
||||
enc.eMapOrStruct(key, rv)
|
||||
}
|
||||
|
||||
func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value) {
|
||||
switch rv := eindirect(rv); rv.Kind() {
|
||||
case reflect.Map:
|
||||
enc.eMap(key, rv)
|
||||
case reflect.Struct:
|
||||
enc.eStruct(key, rv)
|
||||
default:
|
||||
panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String())
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) eMap(key Key, rv reflect.Value) {
|
||||
rt := rv.Type()
|
||||
if rt.Key().Kind() != reflect.String {
|
||||
encPanic(errNonString)
|
||||
}
|
||||
|
||||
// Sort keys so that we have deterministic output. And write keys directly
|
||||
// underneath this key first, before writing sub-structs or sub-maps.
|
||||
var mapKeysDirect, mapKeysSub []string
|
||||
for _, mapKey := range rv.MapKeys() {
|
||||
k := mapKey.String()
|
||||
if typeIsHash(tomlTypeOfGo(rv.MapIndex(mapKey))) {
|
||||
mapKeysSub = append(mapKeysSub, k)
|
||||
} else {
|
||||
mapKeysDirect = append(mapKeysDirect, k)
|
||||
}
|
||||
}
|
||||
|
||||
var writeMapKeys = func(mapKeys []string) {
|
||||
sort.Strings(mapKeys)
|
||||
for _, mapKey := range mapKeys {
|
||||
mrv := rv.MapIndex(reflect.ValueOf(mapKey))
|
||||
if isNil(mrv) {
|
||||
// Don't write anything for nil fields.
|
||||
continue
|
||||
}
|
||||
enc.encode(key.add(mapKey), mrv)
|
||||
}
|
||||
}
|
||||
writeMapKeys(mapKeysDirect)
|
||||
writeMapKeys(mapKeysSub)
|
||||
}
|
||||
|
||||
func (enc *Encoder) eStruct(key Key, rv reflect.Value) {
|
||||
// Write keys for fields directly under this key first, because if we write
|
||||
// a field that creates a new table, then all keys under it will be in that
|
||||
// table (not the one we're writing here).
|
||||
rt := rv.Type()
|
||||
var fieldsDirect, fieldsSub [][]int
|
||||
var addFields func(rt reflect.Type, rv reflect.Value, start []int)
|
||||
addFields = func(rt reflect.Type, rv reflect.Value, start []int) {
|
||||
for i := 0; i < rt.NumField(); i++ {
|
||||
f := rt.Field(i)
|
||||
// skip unexported fields
|
||||
if f.PkgPath != "" && !f.Anonymous {
|
||||
continue
|
||||
}
|
||||
frv := rv.Field(i)
|
||||
if f.Anonymous {
|
||||
t := f.Type
|
||||
switch t.Kind() {
|
||||
case reflect.Struct:
|
||||
// Treat anonymous struct fields with
|
||||
// tag names as though they are not
|
||||
// anonymous, like encoding/json does.
|
||||
if getOptions(f.Tag).name == "" {
|
||||
addFields(t, frv, f.Index)
|
||||
continue
|
||||
}
|
||||
case reflect.Ptr:
|
||||
if t.Elem().Kind() == reflect.Struct &&
|
||||
getOptions(f.Tag).name == "" {
|
||||
if !frv.IsNil() {
|
||||
addFields(t.Elem(), frv.Elem(), f.Index)
|
||||
}
|
||||
continue
|
||||
}
|
||||
// Fall through to the normal field encoding logic below
|
||||
// for non-struct anonymous fields.
|
||||
}
|
||||
}
|
||||
|
||||
if typeIsHash(tomlTypeOfGo(frv)) {
|
||||
fieldsSub = append(fieldsSub, append(start, f.Index...))
|
||||
} else {
|
||||
fieldsDirect = append(fieldsDirect, append(start, f.Index...))
|
||||
}
|
||||
}
|
||||
}
|
||||
addFields(rt, rv, nil)
|
||||
|
||||
var writeFields = func(fields [][]int) {
|
||||
for _, fieldIndex := range fields {
|
||||
sft := rt.FieldByIndex(fieldIndex)
|
||||
sf := rv.FieldByIndex(fieldIndex)
|
||||
if isNil(sf) {
|
||||
// Don't write anything for nil fields.
|
||||
continue
|
||||
}
|
||||
|
||||
opts := getOptions(sft.Tag)
|
||||
if opts.skip {
|
||||
continue
|
||||
}
|
||||
keyName := sft.Name
|
||||
if opts.name != "" {
|
||||
keyName = opts.name
|
||||
}
|
||||
if opts.omitempty && isEmpty(sf) {
|
||||
continue
|
||||
}
|
||||
if opts.omitzero && isZero(sf) {
|
||||
continue
|
||||
}
|
||||
|
||||
enc.encode(key.add(keyName), sf)
|
||||
}
|
||||
}
|
||||
writeFields(fieldsDirect)
|
||||
writeFields(fieldsSub)
|
||||
}
|
||||
|
||||
// tomlTypeName returns the TOML type name of the Go value's type. It is
|
||||
// used to determine whether the types of array elements are mixed (which is
|
||||
// forbidden). If the Go value is nil, then it is illegal for it to be an array
|
||||
// element, and valueIsNil is returned as true.
|
||||
|
||||
// Returns the TOML type of a Go value. The type may be `nil`, which means
|
||||
// no concrete TOML type could be found.
|
||||
func tomlTypeOfGo(rv reflect.Value) tomlType {
|
||||
if isNil(rv) || !rv.IsValid() {
|
||||
return nil
|
||||
}
|
||||
switch rv.Kind() {
|
||||
case reflect.Bool:
|
||||
return tomlBool
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
||||
reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
|
||||
reflect.Uint64:
|
||||
return tomlInteger
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return tomlFloat
|
||||
case reflect.Array, reflect.Slice:
|
||||
if typeEqual(tomlHash, tomlArrayType(rv)) {
|
||||
return tomlArrayHash
|
||||
}
|
||||
return tomlArray
|
||||
case reflect.Ptr, reflect.Interface:
|
||||
return tomlTypeOfGo(rv.Elem())
|
||||
case reflect.String:
|
||||
return tomlString
|
||||
case reflect.Map:
|
||||
return tomlHash
|
||||
case reflect.Struct:
|
||||
switch rv.Interface().(type) {
|
||||
case time.Time:
|
||||
return tomlDatetime
|
||||
case TextMarshaler:
|
||||
return tomlString
|
||||
default:
|
||||
return tomlHash
|
||||
}
|
||||
default:
|
||||
panic("unexpected reflect.Kind: " + rv.Kind().String())
|
||||
}
|
||||
}
|
||||
|
||||
// tomlArrayType returns the element type of a TOML array. The type returned
|
||||
// may be nil if it cannot be determined (e.g., a nil slice or a zero length
|
||||
// slize). This function may also panic if it finds a type that cannot be
|
||||
// expressed in TOML (such as nil elements, heterogeneous arrays or directly
|
||||
// nested arrays of tables).
|
||||
func tomlArrayType(rv reflect.Value) tomlType {
|
||||
if isNil(rv) || !rv.IsValid() || rv.Len() == 0 {
|
||||
return nil
|
||||
}
|
||||
firstType := tomlTypeOfGo(rv.Index(0))
|
||||
if firstType == nil {
|
||||
encPanic(errArrayNilElement)
|
||||
}
|
||||
|
||||
rvlen := rv.Len()
|
||||
for i := 1; i < rvlen; i++ {
|
||||
elem := rv.Index(i)
|
||||
switch elemType := tomlTypeOfGo(elem); {
|
||||
case elemType == nil:
|
||||
encPanic(errArrayNilElement)
|
||||
case !typeEqual(firstType, elemType):
|
||||
encPanic(errArrayMixedElementTypes)
|
||||
}
|
||||
}
|
||||
// If we have a nested array, then we must make sure that the nested
|
||||
// array contains ONLY primitives.
|
||||
// This checks arbitrarily nested arrays.
|
||||
if typeEqual(firstType, tomlArray) || typeEqual(firstType, tomlArrayHash) {
|
||||
nest := tomlArrayType(eindirect(rv.Index(0)))
|
||||
if typeEqual(nest, tomlHash) || typeEqual(nest, tomlArrayHash) {
|
||||
encPanic(errArrayNoTable)
|
||||
}
|
||||
}
|
||||
return firstType
|
||||
}
|
||||
|
||||
type tagOptions struct {
|
||||
skip bool // "-"
|
||||
name string
|
||||
omitempty bool
|
||||
omitzero bool
|
||||
}
|
||||
|
||||
func getOptions(tag reflect.StructTag) tagOptions {
|
||||
t := tag.Get("toml")
|
||||
if t == "-" {
|
||||
return tagOptions{skip: true}
|
||||
}
|
||||
var opts tagOptions
|
||||
parts := strings.Split(t, ",")
|
||||
opts.name = parts[0]
|
||||
for _, s := range parts[1:] {
|
||||
switch s {
|
||||
case "omitempty":
|
||||
opts.omitempty = true
|
||||
case "omitzero":
|
||||
opts.omitzero = true
|
||||
}
|
||||
}
|
||||
return opts
|
||||
}
|
||||
|
||||
func isZero(rv reflect.Value) bool {
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return rv.Int() == 0
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
return rv.Uint() == 0
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return rv.Float() == 0.0
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func isEmpty(rv reflect.Value) bool {
|
||||
switch rv.Kind() {
|
||||
case reflect.Array, reflect.Slice, reflect.Map, reflect.String:
|
||||
return rv.Len() == 0
|
||||
case reflect.Bool:
|
||||
return !rv.Bool()
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (enc *Encoder) newline() {
|
||||
if enc.hasWritten {
|
||||
enc.wf("\n")
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) keyEqElement(key Key, val reflect.Value) {
|
||||
if len(key) == 0 {
|
||||
encPanic(errNoKey)
|
||||
}
|
||||
panicIfInvalidKey(key)
|
||||
enc.wf("%s%s = ", enc.indentStr(key), key.maybeQuoted(len(key)-1))
|
||||
enc.eElement(val)
|
||||
enc.newline()
|
||||
}
|
||||
|
||||
func (enc *Encoder) wf(format string, v ...interface{}) {
|
||||
if _, err := fmt.Fprintf(enc.w, format, v...); err != nil {
|
||||
encPanic(err)
|
||||
}
|
||||
enc.hasWritten = true
|
||||
}
|
||||
|
||||
func (enc *Encoder) indentStr(key Key) string {
|
||||
return strings.Repeat(enc.Indent, len(key)-1)
|
||||
}
|
||||
|
||||
func encPanic(err error) {
|
||||
panic(tomlEncodeError{err})
|
||||
}
|
||||
|
||||
func eindirect(v reflect.Value) reflect.Value {
|
||||
switch v.Kind() {
|
||||
case reflect.Ptr, reflect.Interface:
|
||||
return eindirect(v.Elem())
|
||||
default:
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
func isNil(rv reflect.Value) bool {
|
||||
switch rv.Kind() {
|
||||
case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
|
||||
return rv.IsNil()
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func panicIfInvalidKey(key Key) {
|
||||
for _, k := range key {
|
||||
if len(k) == 0 {
|
||||
encPanic(e("Key '%s' is not a valid table name. Key names "+
|
||||
"cannot be empty.", key.maybeQuotedAll()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func isValidKeyName(s string) bool {
|
||||
return len(s) != 0
|
||||
}
|
19
vendor/github.com/BurntSushi/toml/encoding_types.go
generated
vendored
19
vendor/github.com/BurntSushi/toml/encoding_types.go
generated
vendored
@@ -1,19 +0,0 @@
|
||||
// +build go1.2
|
||||
|
||||
package toml
|
||||
|
||||
// In order to support Go 1.1, we define our own TextMarshaler and
|
||||
// TextUnmarshaler types. For Go 1.2+, we just alias them with the
|
||||
// standard library interfaces.
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
)
|
||||
|
||||
// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here
|
||||
// so that Go 1.1 can be supported.
|
||||
type TextMarshaler encoding.TextMarshaler
|
||||
|
||||
// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined
|
||||
// here so that Go 1.1 can be supported.
|
||||
type TextUnmarshaler encoding.TextUnmarshaler
|
18
vendor/github.com/BurntSushi/toml/encoding_types_1.1.go
generated
vendored
18
vendor/github.com/BurntSushi/toml/encoding_types_1.1.go
generated
vendored
@@ -1,18 +0,0 @@
|
||||
// +build !go1.2
|
||||
|
||||
package toml
|
||||
|
||||
// These interfaces were introduced in Go 1.2, so we add them manually when
|
||||
// compiling for Go 1.1.
|
||||
|
||||
// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here
|
||||
// so that Go 1.1 can be supported.
|
||||
type TextMarshaler interface {
|
||||
MarshalText() (text []byte, err error)
|
||||
}
|
||||
|
||||
// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined
|
||||
// here so that Go 1.1 can be supported.
|
||||
type TextUnmarshaler interface {
|
||||
UnmarshalText(text []byte) error
|
||||
}
|
953
vendor/github.com/BurntSushi/toml/lex.go
generated
vendored
953
vendor/github.com/BurntSushi/toml/lex.go
generated
vendored
@@ -1,953 +0,0 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type itemType int
|
||||
|
||||
const (
|
||||
itemError itemType = iota
|
||||
itemNIL // used in the parser to indicate no type
|
||||
itemEOF
|
||||
itemText
|
||||
itemString
|
||||
itemRawString
|
||||
itemMultilineString
|
||||
itemRawMultilineString
|
||||
itemBool
|
||||
itemInteger
|
||||
itemFloat
|
||||
itemDatetime
|
||||
itemArray // the start of an array
|
||||
itemArrayEnd
|
||||
itemTableStart
|
||||
itemTableEnd
|
||||
itemArrayTableStart
|
||||
itemArrayTableEnd
|
||||
itemKeyStart
|
||||
itemCommentStart
|
||||
itemInlineTableStart
|
||||
itemInlineTableEnd
|
||||
)
|
||||
|
||||
const (
|
||||
eof = 0
|
||||
comma = ','
|
||||
tableStart = '['
|
||||
tableEnd = ']'
|
||||
arrayTableStart = '['
|
||||
arrayTableEnd = ']'
|
||||
tableSep = '.'
|
||||
keySep = '='
|
||||
arrayStart = '['
|
||||
arrayEnd = ']'
|
||||
commentStart = '#'
|
||||
stringStart = '"'
|
||||
stringEnd = '"'
|
||||
rawStringStart = '\''
|
||||
rawStringEnd = '\''
|
||||
inlineTableStart = '{'
|
||||
inlineTableEnd = '}'
|
||||
)
|
||||
|
||||
type stateFn func(lx *lexer) stateFn
|
||||
|
||||
type lexer struct {
|
||||
input string
|
||||
start int
|
||||
pos int
|
||||
line int
|
||||
state stateFn
|
||||
items chan item
|
||||
|
||||
// Allow for backing up up to three runes.
|
||||
// This is necessary because TOML contains 3-rune tokens (""" and ''').
|
||||
prevWidths [3]int
|
||||
nprev int // how many of prevWidths are in use
|
||||
// If we emit an eof, we can still back up, but it is not OK to call
|
||||
// next again.
|
||||
atEOF bool
|
||||
|
||||
// A stack of state functions used to maintain context.
|
||||
// The idea is to reuse parts of the state machine in various places.
|
||||
// For example, values can appear at the top level or within arbitrarily
|
||||
// nested arrays. The last state on the stack is used after a value has
|
||||
// been lexed. Similarly for comments.
|
||||
stack []stateFn
|
||||
}
|
||||
|
||||
type item struct {
|
||||
typ itemType
|
||||
val string
|
||||
line int
|
||||
}
|
||||
|
||||
func (lx *lexer) nextItem() item {
|
||||
for {
|
||||
select {
|
||||
case item := <-lx.items:
|
||||
return item
|
||||
default:
|
||||
lx.state = lx.state(lx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func lex(input string) *lexer {
|
||||
lx := &lexer{
|
||||
input: input,
|
||||
state: lexTop,
|
||||
line: 1,
|
||||
items: make(chan item, 10),
|
||||
stack: make([]stateFn, 0, 10),
|
||||
}
|
||||
return lx
|
||||
}
|
||||
|
||||
func (lx *lexer) push(state stateFn) {
|
||||
lx.stack = append(lx.stack, state)
|
||||
}
|
||||
|
||||
func (lx *lexer) pop() stateFn {
|
||||
if len(lx.stack) == 0 {
|
||||
return lx.errorf("BUG in lexer: no states to pop")
|
||||
}
|
||||
last := lx.stack[len(lx.stack)-1]
|
||||
lx.stack = lx.stack[0 : len(lx.stack)-1]
|
||||
return last
|
||||
}
|
||||
|
||||
func (lx *lexer) current() string {
|
||||
return lx.input[lx.start:lx.pos]
|
||||
}
|
||||
|
||||
func (lx *lexer) emit(typ itemType) {
|
||||
lx.items <- item{typ, lx.current(), lx.line}
|
||||
lx.start = lx.pos
|
||||
}
|
||||
|
||||
func (lx *lexer) emitTrim(typ itemType) {
|
||||
lx.items <- item{typ, strings.TrimSpace(lx.current()), lx.line}
|
||||
lx.start = lx.pos
|
||||
}
|
||||
|
||||
func (lx *lexer) next() (r rune) {
|
||||
if lx.atEOF {
|
||||
panic("next called after EOF")
|
||||
}
|
||||
if lx.pos >= len(lx.input) {
|
||||
lx.atEOF = true
|
||||
return eof
|
||||
}
|
||||
|
||||
if lx.input[lx.pos] == '\n' {
|
||||
lx.line++
|
||||
}
|
||||
lx.prevWidths[2] = lx.prevWidths[1]
|
||||
lx.prevWidths[1] = lx.prevWidths[0]
|
||||
if lx.nprev < 3 {
|
||||
lx.nprev++
|
||||
}
|
||||
r, w := utf8.DecodeRuneInString(lx.input[lx.pos:])
|
||||
lx.prevWidths[0] = w
|
||||
lx.pos += w
|
||||
return r
|
||||
}
|
||||
|
||||
// ignore skips over the pending input before this point.
|
||||
func (lx *lexer) ignore() {
|
||||
lx.start = lx.pos
|
||||
}
|
||||
|
||||
// backup steps back one rune. Can be called only twice between calls to next.
|
||||
func (lx *lexer) backup() {
|
||||
if lx.atEOF {
|
||||
lx.atEOF = false
|
||||
return
|
||||
}
|
||||
if lx.nprev < 1 {
|
||||
panic("backed up too far")
|
||||
}
|
||||
w := lx.prevWidths[0]
|
||||
lx.prevWidths[0] = lx.prevWidths[1]
|
||||
lx.prevWidths[1] = lx.prevWidths[2]
|
||||
lx.nprev--
|
||||
lx.pos -= w
|
||||
if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' {
|
||||
lx.line--
|
||||
}
|
||||
}
|
||||
|
||||
// accept consumes the next rune if it's equal to `valid`.
|
||||
func (lx *lexer) accept(valid rune) bool {
|
||||
if lx.next() == valid {
|
||||
return true
|
||||
}
|
||||
lx.backup()
|
||||
return false
|
||||
}
|
||||
|
||||
// peek returns but does not consume the next rune in the input.
|
||||
func (lx *lexer) peek() rune {
|
||||
r := lx.next()
|
||||
lx.backup()
|
||||
return r
|
||||
}
|
||||
|
||||
// skip ignores all input that matches the given predicate.
|
||||
func (lx *lexer) skip(pred func(rune) bool) {
|
||||
for {
|
||||
r := lx.next()
|
||||
if pred(r) {
|
||||
continue
|
||||
}
|
||||
lx.backup()
|
||||
lx.ignore()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// errorf stops all lexing by emitting an error and returning `nil`.
|
||||
// Note that any value that is a character is escaped if it's a special
|
||||
// character (newlines, tabs, etc.).
|
||||
func (lx *lexer) errorf(format string, values ...interface{}) stateFn {
|
||||
lx.items <- item{
|
||||
itemError,
|
||||
fmt.Sprintf(format, values...),
|
||||
lx.line,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// lexTop consumes elements at the top level of TOML data.
|
||||
func lexTop(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isWhitespace(r) || isNL(r) {
|
||||
return lexSkip(lx, lexTop)
|
||||
}
|
||||
switch r {
|
||||
case commentStart:
|
||||
lx.push(lexTop)
|
||||
return lexCommentStart
|
||||
case tableStart:
|
||||
return lexTableStart
|
||||
case eof:
|
||||
if lx.pos > lx.start {
|
||||
return lx.errorf("unexpected EOF")
|
||||
}
|
||||
lx.emit(itemEOF)
|
||||
return nil
|
||||
}
|
||||
|
||||
// At this point, the only valid item can be a key, so we back up
|
||||
// and let the key lexer do the rest.
|
||||
lx.backup()
|
||||
lx.push(lexTopEnd)
|
||||
return lexKeyStart
|
||||
}
|
||||
|
||||
// lexTopEnd is entered whenever a top-level item has been consumed. (A value
|
||||
// or a table.) It must see only whitespace, and will turn back to lexTop
|
||||
// upon a newline. If it sees EOF, it will quit the lexer successfully.
|
||||
func lexTopEnd(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == commentStart:
|
||||
// a comment will read to a newline for us.
|
||||
lx.push(lexTop)
|
||||
return lexCommentStart
|
||||
case isWhitespace(r):
|
||||
return lexTopEnd
|
||||
case isNL(r):
|
||||
lx.ignore()
|
||||
return lexTop
|
||||
case r == eof:
|
||||
lx.emit(itemEOF)
|
||||
return nil
|
||||
}
|
||||
return lx.errorf("expected a top-level item to end with a newline, "+
|
||||
"comment, or EOF, but got %q instead", r)
|
||||
}
|
||||
|
||||
// lexTable lexes the beginning of a table. Namely, it makes sure that
|
||||
// it starts with a character other than '.' and ']'.
|
||||
// It assumes that '[' has already been consumed.
|
||||
// It also handles the case that this is an item in an array of tables.
|
||||
// e.g., '[[name]]'.
|
||||
func lexTableStart(lx *lexer) stateFn {
|
||||
if lx.peek() == arrayTableStart {
|
||||
lx.next()
|
||||
lx.emit(itemArrayTableStart)
|
||||
lx.push(lexArrayTableEnd)
|
||||
} else {
|
||||
lx.emit(itemTableStart)
|
||||
lx.push(lexTableEnd)
|
||||
}
|
||||
return lexTableNameStart
|
||||
}
|
||||
|
||||
func lexTableEnd(lx *lexer) stateFn {
|
||||
lx.emit(itemTableEnd)
|
||||
return lexTopEnd
|
||||
}
|
||||
|
||||
func lexArrayTableEnd(lx *lexer) stateFn {
|
||||
if r := lx.next(); r != arrayTableEnd {
|
||||
return lx.errorf("expected end of table array name delimiter %q, "+
|
||||
"but got %q instead", arrayTableEnd, r)
|
||||
}
|
||||
lx.emit(itemArrayTableEnd)
|
||||
return lexTopEnd
|
||||
}
|
||||
|
||||
func lexTableNameStart(lx *lexer) stateFn {
|
||||
lx.skip(isWhitespace)
|
||||
switch r := lx.peek(); {
|
||||
case r == tableEnd || r == eof:
|
||||
return lx.errorf("unexpected end of table name " +
|
||||
"(table names cannot be empty)")
|
||||
case r == tableSep:
|
||||
return lx.errorf("unexpected table separator " +
|
||||
"(table names cannot be empty)")
|
||||
case r == stringStart || r == rawStringStart:
|
||||
lx.ignore()
|
||||
lx.push(lexTableNameEnd)
|
||||
return lexValue // reuse string lexing
|
||||
default:
|
||||
return lexBareTableName
|
||||
}
|
||||
}
|
||||
|
||||
// lexBareTableName lexes the name of a table. It assumes that at least one
|
||||
// valid character for the table has already been read.
|
||||
func lexBareTableName(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isBareKeyChar(r) {
|
||||
return lexBareTableName
|
||||
}
|
||||
lx.backup()
|
||||
lx.emit(itemText)
|
||||
return lexTableNameEnd
|
||||
}
|
||||
|
||||
// lexTableNameEnd reads the end of a piece of a table name, optionally
|
||||
// consuming whitespace.
|
||||
func lexTableNameEnd(lx *lexer) stateFn {
|
||||
lx.skip(isWhitespace)
|
||||
switch r := lx.next(); {
|
||||
case isWhitespace(r):
|
||||
return lexTableNameEnd
|
||||
case r == tableSep:
|
||||
lx.ignore()
|
||||
return lexTableNameStart
|
||||
case r == tableEnd:
|
||||
return lx.pop()
|
||||
default:
|
||||
return lx.errorf("expected '.' or ']' to end table name, "+
|
||||
"but got %q instead", r)
|
||||
}
|
||||
}
|
||||
|
||||
// lexKeyStart consumes a key name up until the first non-whitespace character.
|
||||
// lexKeyStart will ignore whitespace.
|
||||
func lexKeyStart(lx *lexer) stateFn {
|
||||
r := lx.peek()
|
||||
switch {
|
||||
case r == keySep:
|
||||
return lx.errorf("unexpected key separator %q", keySep)
|
||||
case isWhitespace(r) || isNL(r):
|
||||
lx.next()
|
||||
return lexSkip(lx, lexKeyStart)
|
||||
case r == stringStart || r == rawStringStart:
|
||||
lx.ignore()
|
||||
lx.emit(itemKeyStart)
|
||||
lx.push(lexKeyEnd)
|
||||
return lexValue // reuse string lexing
|
||||
default:
|
||||
lx.ignore()
|
||||
lx.emit(itemKeyStart)
|
||||
return lexBareKey
|
||||
}
|
||||
}
|
||||
|
||||
// lexBareKey consumes the text of a bare key. Assumes that the first character
|
||||
// (which is not whitespace) has not yet been consumed.
|
||||
func lexBareKey(lx *lexer) stateFn {
|
||||
switch r := lx.next(); {
|
||||
case isBareKeyChar(r):
|
||||
return lexBareKey
|
||||
case isWhitespace(r):
|
||||
lx.backup()
|
||||
lx.emit(itemText)
|
||||
return lexKeyEnd
|
||||
case r == keySep:
|
||||
lx.backup()
|
||||
lx.emit(itemText)
|
||||
return lexKeyEnd
|
||||
default:
|
||||
return lx.errorf("bare keys cannot contain %q", r)
|
||||
}
|
||||
}
|
||||
|
||||
// lexKeyEnd consumes the end of a key and trims whitespace (up to the key
|
||||
// separator).
|
||||
func lexKeyEnd(lx *lexer) stateFn {
|
||||
switch r := lx.next(); {
|
||||
case r == keySep:
|
||||
return lexSkip(lx, lexValue)
|
||||
case isWhitespace(r):
|
||||
return lexSkip(lx, lexKeyEnd)
|
||||
default:
|
||||
return lx.errorf("expected key separator %q, but got %q instead",
|
||||
keySep, r)
|
||||
}
|
||||
}
|
||||
|
||||
// lexValue starts the consumption of a value anywhere a value is expected.
|
||||
// lexValue will ignore whitespace.
|
||||
// After a value is lexed, the last state on the next is popped and returned.
|
||||
func lexValue(lx *lexer) stateFn {
|
||||
// We allow whitespace to precede a value, but NOT newlines.
|
||||
// In array syntax, the array states are responsible for ignoring newlines.
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r):
|
||||
return lexSkip(lx, lexValue)
|
||||
case isDigit(r):
|
||||
lx.backup() // avoid an extra state and use the same as above
|
||||
return lexNumberOrDateStart
|
||||
}
|
||||
switch r {
|
||||
case arrayStart:
|
||||
lx.ignore()
|
||||
lx.emit(itemArray)
|
||||
return lexArrayValue
|
||||
case inlineTableStart:
|
||||
lx.ignore()
|
||||
lx.emit(itemInlineTableStart)
|
||||
return lexInlineTableValue
|
||||
case stringStart:
|
||||
if lx.accept(stringStart) {
|
||||
if lx.accept(stringStart) {
|
||||
lx.ignore() // Ignore """
|
||||
return lexMultilineString
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
lx.ignore() // ignore the '"'
|
||||
return lexString
|
||||
case rawStringStart:
|
||||
if lx.accept(rawStringStart) {
|
||||
if lx.accept(rawStringStart) {
|
||||
lx.ignore() // Ignore """
|
||||
return lexMultilineRawString
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
lx.ignore() // ignore the "'"
|
||||
return lexRawString
|
||||
case '+', '-':
|
||||
return lexNumberStart
|
||||
case '.': // special error case, be kind to users
|
||||
return lx.errorf("floats must start with a digit, not '.'")
|
||||
}
|
||||
if unicode.IsLetter(r) {
|
||||
// Be permissive here; lexBool will give a nice error if the
|
||||
// user wrote something like
|
||||
// x = foo
|
||||
// (i.e. not 'true' or 'false' but is something else word-like.)
|
||||
lx.backup()
|
||||
return lexBool
|
||||
}
|
||||
return lx.errorf("expected value but found %q instead", r)
|
||||
}
|
||||
|
||||
// lexArrayValue consumes one value in an array. It assumes that '[' or ','
|
||||
// have already been consumed. All whitespace and newlines are ignored.
|
||||
func lexArrayValue(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r) || isNL(r):
|
||||
return lexSkip(lx, lexArrayValue)
|
||||
case r == commentStart:
|
||||
lx.push(lexArrayValue)
|
||||
return lexCommentStart
|
||||
case r == comma:
|
||||
return lx.errorf("unexpected comma")
|
||||
case r == arrayEnd:
|
||||
// NOTE(caleb): The spec isn't clear about whether you can have
|
||||
// a trailing comma or not, so we'll allow it.
|
||||
return lexArrayEnd
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.push(lexArrayValueEnd)
|
||||
return lexValue
|
||||
}
|
||||
|
||||
// lexArrayValueEnd consumes everything between the end of an array value and
|
||||
// the next value (or the end of the array): it ignores whitespace and newlines
|
||||
// and expects either a ',' or a ']'.
|
||||
func lexArrayValueEnd(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r) || isNL(r):
|
||||
return lexSkip(lx, lexArrayValueEnd)
|
||||
case r == commentStart:
|
||||
lx.push(lexArrayValueEnd)
|
||||
return lexCommentStart
|
||||
case r == comma:
|
||||
lx.ignore()
|
||||
return lexArrayValue // move on to the next value
|
||||
case r == arrayEnd:
|
||||
return lexArrayEnd
|
||||
}
|
||||
return lx.errorf(
|
||||
"expected a comma or array terminator %q, but got %q instead",
|
||||
arrayEnd, r,
|
||||
)
|
||||
}
|
||||
|
||||
// lexArrayEnd finishes the lexing of an array.
|
||||
// It assumes that a ']' has just been consumed.
|
||||
func lexArrayEnd(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
lx.emit(itemArrayEnd)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexInlineTableValue consumes one key/value pair in an inline table.
|
||||
// It assumes that '{' or ',' have already been consumed. Whitespace is ignored.
|
||||
func lexInlineTableValue(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r):
|
||||
return lexSkip(lx, lexInlineTableValue)
|
||||
case isNL(r):
|
||||
return lx.errorf("newlines not allowed within inline tables")
|
||||
case r == commentStart:
|
||||
lx.push(lexInlineTableValue)
|
||||
return lexCommentStart
|
||||
case r == comma:
|
||||
return lx.errorf("unexpected comma")
|
||||
case r == inlineTableEnd:
|
||||
return lexInlineTableEnd
|
||||
}
|
||||
lx.backup()
|
||||
lx.push(lexInlineTableValueEnd)
|
||||
return lexKeyStart
|
||||
}
|
||||
|
||||
// lexInlineTableValueEnd consumes everything between the end of an inline table
|
||||
// key/value pair and the next pair (or the end of the table):
|
||||
// it ignores whitespace and expects either a ',' or a '}'.
|
||||
func lexInlineTableValueEnd(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r):
|
||||
return lexSkip(lx, lexInlineTableValueEnd)
|
||||
case isNL(r):
|
||||
return lx.errorf("newlines not allowed within inline tables")
|
||||
case r == commentStart:
|
||||
lx.push(lexInlineTableValueEnd)
|
||||
return lexCommentStart
|
||||
case r == comma:
|
||||
lx.ignore()
|
||||
return lexInlineTableValue
|
||||
case r == inlineTableEnd:
|
||||
return lexInlineTableEnd
|
||||
}
|
||||
return lx.errorf("expected a comma or an inline table terminator %q, "+
|
||||
"but got %q instead", inlineTableEnd, r)
|
||||
}
|
||||
|
||||
// lexInlineTableEnd finishes the lexing of an inline table.
|
||||
// It assumes that a '}' has just been consumed.
|
||||
func lexInlineTableEnd(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
lx.emit(itemInlineTableEnd)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexString consumes the inner contents of a string. It assumes that the
|
||||
// beginning '"' has already been consumed and ignored.
|
||||
func lexString(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == eof:
|
||||
return lx.errorf("unexpected EOF")
|
||||
case isNL(r):
|
||||
return lx.errorf("strings cannot contain newlines")
|
||||
case r == '\\':
|
||||
lx.push(lexString)
|
||||
return lexStringEscape
|
||||
case r == stringEnd:
|
||||
lx.backup()
|
||||
lx.emit(itemString)
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
return lexString
|
||||
}
|
||||
|
||||
// lexMultilineString consumes the inner contents of a string. It assumes that
|
||||
// the beginning '"""' has already been consumed and ignored.
|
||||
func lexMultilineString(lx *lexer) stateFn {
|
||||
switch lx.next() {
|
||||
case eof:
|
||||
return lx.errorf("unexpected EOF")
|
||||
case '\\':
|
||||
return lexMultilineStringEscape
|
||||
case stringEnd:
|
||||
if lx.accept(stringEnd) {
|
||||
if lx.accept(stringEnd) {
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.emit(itemMultilineString)
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
}
|
||||
return lexMultilineString
|
||||
}
|
||||
|
||||
// lexRawString consumes a raw string. Nothing can be escaped in such a string.
|
||||
// It assumes that the beginning "'" has already been consumed and ignored.
|
||||
func lexRawString(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == eof:
|
||||
return lx.errorf("unexpected EOF")
|
||||
case isNL(r):
|
||||
return lx.errorf("strings cannot contain newlines")
|
||||
case r == rawStringEnd:
|
||||
lx.backup()
|
||||
lx.emit(itemRawString)
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
return lexRawString
|
||||
}
|
||||
|
||||
// lexMultilineRawString consumes a raw string. Nothing can be escaped in such
|
||||
// a string. It assumes that the beginning "'''" has already been consumed and
|
||||
// ignored.
|
||||
func lexMultilineRawString(lx *lexer) stateFn {
|
||||
switch lx.next() {
|
||||
case eof:
|
||||
return lx.errorf("unexpected EOF")
|
||||
case rawStringEnd:
|
||||
if lx.accept(rawStringEnd) {
|
||||
if lx.accept(rawStringEnd) {
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.emit(itemRawMultilineString)
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
}
|
||||
return lexMultilineRawString
|
||||
}
|
||||
|
||||
// lexMultilineStringEscape consumes an escaped character. It assumes that the
|
||||
// preceding '\\' has already been consumed.
|
||||
func lexMultilineStringEscape(lx *lexer) stateFn {
|
||||
// Handle the special case first:
|
||||
if isNL(lx.next()) {
|
||||
return lexMultilineString
|
||||
}
|
||||
lx.backup()
|
||||
lx.push(lexMultilineString)
|
||||
return lexStringEscape(lx)
|
||||
}
|
||||
|
||||
func lexStringEscape(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch r {
|
||||
case 'b':
|
||||
fallthrough
|
||||
case 't':
|
||||
fallthrough
|
||||
case 'n':
|
||||
fallthrough
|
||||
case 'f':
|
||||
fallthrough
|
||||
case 'r':
|
||||
fallthrough
|
||||
case '"':
|
||||
fallthrough
|
||||
case '\\':
|
||||
return lx.pop()
|
||||
case 'u':
|
||||
return lexShortUnicodeEscape
|
||||
case 'U':
|
||||
return lexLongUnicodeEscape
|
||||
}
|
||||
return lx.errorf("invalid escape character %q; only the following "+
|
||||
"escape characters are allowed: "+
|
||||
`\b, \t, \n, \f, \r, \", \\, \uXXXX, and \UXXXXXXXX`, r)
|
||||
}
|
||||
|
||||
func lexShortUnicodeEscape(lx *lexer) stateFn {
|
||||
var r rune
|
||||
for i := 0; i < 4; i++ {
|
||||
r = lx.next()
|
||||
if !isHexadecimal(r) {
|
||||
return lx.errorf(`expected four hexadecimal digits after '\u', `+
|
||||
"but got %q instead", lx.current())
|
||||
}
|
||||
}
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
func lexLongUnicodeEscape(lx *lexer) stateFn {
|
||||
var r rune
|
||||
for i := 0; i < 8; i++ {
|
||||
r = lx.next()
|
||||
if !isHexadecimal(r) {
|
||||
return lx.errorf(`expected eight hexadecimal digits after '\U', `+
|
||||
"but got %q instead", lx.current())
|
||||
}
|
||||
}
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexNumberOrDateStart consumes either an integer, a float, or datetime.
|
||||
func lexNumberOrDateStart(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isDigit(r) {
|
||||
return lexNumberOrDate
|
||||
}
|
||||
switch r {
|
||||
case '_':
|
||||
return lexNumber
|
||||
case 'e', 'E':
|
||||
return lexFloat
|
||||
case '.':
|
||||
return lx.errorf("floats must start with a digit, not '.'")
|
||||
}
|
||||
return lx.errorf("expected a digit but got %q", r)
|
||||
}
|
||||
|
||||
// lexNumberOrDate consumes either an integer, float or datetime.
|
||||
func lexNumberOrDate(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isDigit(r) {
|
||||
return lexNumberOrDate
|
||||
}
|
||||
switch r {
|
||||
case '-':
|
||||
return lexDatetime
|
||||
case '_':
|
||||
return lexNumber
|
||||
case '.', 'e', 'E':
|
||||
return lexFloat
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemInteger)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexDatetime consumes a Datetime, to a first approximation.
|
||||
// The parser validates that it matches one of the accepted formats.
|
||||
func lexDatetime(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isDigit(r) {
|
||||
return lexDatetime
|
||||
}
|
||||
switch r {
|
||||
case '-', 'T', ':', '.', 'Z', '+':
|
||||
return lexDatetime
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemDatetime)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexNumberStart consumes either an integer or a float. It assumes that a sign
|
||||
// has already been read, but that *no* digits have been consumed.
|
||||
// lexNumberStart will move to the appropriate integer or float states.
|
||||
func lexNumberStart(lx *lexer) stateFn {
|
||||
// We MUST see a digit. Even floats have to start with a digit.
|
||||
r := lx.next()
|
||||
if !isDigit(r) {
|
||||
if r == '.' {
|
||||
return lx.errorf("floats must start with a digit, not '.'")
|
||||
}
|
||||
return lx.errorf("expected a digit but got %q", r)
|
||||
}
|
||||
return lexNumber
|
||||
}
|
||||
|
||||
// lexNumber consumes an integer or a float after seeing the first digit.
|
||||
func lexNumber(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isDigit(r) {
|
||||
return lexNumber
|
||||
}
|
||||
switch r {
|
||||
case '_':
|
||||
return lexNumber
|
||||
case '.', 'e', 'E':
|
||||
return lexFloat
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemInteger)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexFloat consumes the elements of a float. It allows any sequence of
|
||||
// float-like characters, so floats emitted by the lexer are only a first
|
||||
// approximation and must be validated by the parser.
|
||||
func lexFloat(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isDigit(r) {
|
||||
return lexFloat
|
||||
}
|
||||
switch r {
|
||||
case '_', '.', '-', '+', 'e', 'E':
|
||||
return lexFloat
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemFloat)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexBool consumes a bool string: 'true' or 'false.
|
||||
func lexBool(lx *lexer) stateFn {
|
||||
var rs []rune
|
||||
for {
|
||||
r := lx.next()
|
||||
if !unicode.IsLetter(r) {
|
||||
lx.backup()
|
||||
break
|
||||
}
|
||||
rs = append(rs, r)
|
||||
}
|
||||
s := string(rs)
|
||||
switch s {
|
||||
case "true", "false":
|
||||
lx.emit(itemBool)
|
||||
return lx.pop()
|
||||
}
|
||||
return lx.errorf("expected value but found %q instead", s)
|
||||
}
|
||||
|
||||
// lexCommentStart begins the lexing of a comment. It will emit
|
||||
// itemCommentStart and consume no characters, passing control to lexComment.
|
||||
func lexCommentStart(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
lx.emit(itemCommentStart)
|
||||
return lexComment
|
||||
}
|
||||
|
||||
// lexComment lexes an entire comment. It assumes that '#' has been consumed.
|
||||
// It will consume *up to* the first newline character, and pass control
|
||||
// back to the last state on the stack.
|
||||
func lexComment(lx *lexer) stateFn {
|
||||
r := lx.peek()
|
||||
if isNL(r) || r == eof {
|
||||
lx.emit(itemText)
|
||||
return lx.pop()
|
||||
}
|
||||
lx.next()
|
||||
return lexComment
|
||||
}
|
||||
|
||||
// lexSkip ignores all slurped input and moves on to the next state.
|
||||
func lexSkip(lx *lexer, nextState stateFn) stateFn {
|
||||
return func(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
return nextState
|
||||
}
|
||||
}
|
||||
|
||||
// isWhitespace returns true if `r` is a whitespace character according
|
||||
// to the spec.
|
||||
func isWhitespace(r rune) bool {
|
||||
return r == '\t' || r == ' '
|
||||
}
|
||||
|
||||
func isNL(r rune) bool {
|
||||
return r == '\n' || r == '\r'
|
||||
}
|
||||
|
||||
func isDigit(r rune) bool {
|
||||
return r >= '0' && r <= '9'
|
||||
}
|
||||
|
||||
func isHexadecimal(r rune) bool {
|
||||
return (r >= '0' && r <= '9') ||
|
||||
(r >= 'a' && r <= 'f') ||
|
||||
(r >= 'A' && r <= 'F')
|
||||
}
|
||||
|
||||
func isBareKeyChar(r rune) bool {
|
||||
return (r >= 'A' && r <= 'Z') ||
|
||||
(r >= 'a' && r <= 'z') ||
|
||||
(r >= '0' && r <= '9') ||
|
||||
r == '_' ||
|
||||
r == '-'
|
||||
}
|
||||
|
||||
func (itype itemType) String() string {
|
||||
switch itype {
|
||||
case itemError:
|
||||
return "Error"
|
||||
case itemNIL:
|
||||
return "NIL"
|
||||
case itemEOF:
|
||||
return "EOF"
|
||||
case itemText:
|
||||
return "Text"
|
||||
case itemString, itemRawString, itemMultilineString, itemRawMultilineString:
|
||||
return "String"
|
||||
case itemBool:
|
||||
return "Bool"
|
||||
case itemInteger:
|
||||
return "Integer"
|
||||
case itemFloat:
|
||||
return "Float"
|
||||
case itemDatetime:
|
||||
return "DateTime"
|
||||
case itemTableStart:
|
||||
return "TableStart"
|
||||
case itemTableEnd:
|
||||
return "TableEnd"
|
||||
case itemKeyStart:
|
||||
return "KeyStart"
|
||||
case itemArray:
|
||||
return "Array"
|
||||
case itemArrayEnd:
|
||||
return "ArrayEnd"
|
||||
case itemCommentStart:
|
||||
return "CommentStart"
|
||||
}
|
||||
panic(fmt.Sprintf("BUG: Unknown type '%d'.", int(itype)))
|
||||
}
|
||||
|
||||
func (item item) String() string {
|
||||
return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val)
|
||||
}
|
592
vendor/github.com/BurntSushi/toml/parse.go
generated
vendored
592
vendor/github.com/BurntSushi/toml/parse.go
generated
vendored
@@ -1,592 +0,0 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type parser struct {
|
||||
mapping map[string]interface{}
|
||||
types map[string]tomlType
|
||||
lx *lexer
|
||||
|
||||
// A list of keys in the order that they appear in the TOML data.
|
||||
ordered []Key
|
||||
|
||||
// the full key for the current hash in scope
|
||||
context Key
|
||||
|
||||
// the base key name for everything except hashes
|
||||
currentKey string
|
||||
|
||||
// rough approximation of line number
|
||||
approxLine int
|
||||
|
||||
// A map of 'key.group.names' to whether they were created implicitly.
|
||||
implicits map[string]bool
|
||||
}
|
||||
|
||||
type parseError string
|
||||
|
||||
func (pe parseError) Error() string {
|
||||
return string(pe)
|
||||
}
|
||||
|
||||
func parse(data string) (p *parser, err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
var ok bool
|
||||
if err, ok = r.(parseError); ok {
|
||||
return
|
||||
}
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
|
||||
p = &parser{
|
||||
mapping: make(map[string]interface{}),
|
||||
types: make(map[string]tomlType),
|
||||
lx: lex(data),
|
||||
ordered: make([]Key, 0),
|
||||
implicits: make(map[string]bool),
|
||||
}
|
||||
for {
|
||||
item := p.next()
|
||||
if item.typ == itemEOF {
|
||||
break
|
||||
}
|
||||
p.topLevel(item)
|
||||
}
|
||||
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func (p *parser) panicf(format string, v ...interface{}) {
|
||||
msg := fmt.Sprintf("Near line %d (last key parsed '%s'): %s",
|
||||
p.approxLine, p.current(), fmt.Sprintf(format, v...))
|
||||
panic(parseError(msg))
|
||||
}
|
||||
|
||||
func (p *parser) next() item {
|
||||
it := p.lx.nextItem()
|
||||
if it.typ == itemError {
|
||||
p.panicf("%s", it.val)
|
||||
}
|
||||
return it
|
||||
}
|
||||
|
||||
func (p *parser) bug(format string, v ...interface{}) {
|
||||
panic(fmt.Sprintf("BUG: "+format+"\n\n", v...))
|
||||
}
|
||||
|
||||
func (p *parser) expect(typ itemType) item {
|
||||
it := p.next()
|
||||
p.assertEqual(typ, it.typ)
|
||||
return it
|
||||
}
|
||||
|
||||
func (p *parser) assertEqual(expected, got itemType) {
|
||||
if expected != got {
|
||||
p.bug("Expected '%s' but got '%s'.", expected, got)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) topLevel(item item) {
|
||||
switch item.typ {
|
||||
case itemCommentStart:
|
||||
p.approxLine = item.line
|
||||
p.expect(itemText)
|
||||
case itemTableStart:
|
||||
kg := p.next()
|
||||
p.approxLine = kg.line
|
||||
|
||||
var key Key
|
||||
for ; kg.typ != itemTableEnd && kg.typ != itemEOF; kg = p.next() {
|
||||
key = append(key, p.keyString(kg))
|
||||
}
|
||||
p.assertEqual(itemTableEnd, kg.typ)
|
||||
|
||||
p.establishContext(key, false)
|
||||
p.setType("", tomlHash)
|
||||
p.ordered = append(p.ordered, key)
|
||||
case itemArrayTableStart:
|
||||
kg := p.next()
|
||||
p.approxLine = kg.line
|
||||
|
||||
var key Key
|
||||
for ; kg.typ != itemArrayTableEnd && kg.typ != itemEOF; kg = p.next() {
|
||||
key = append(key, p.keyString(kg))
|
||||
}
|
||||
p.assertEqual(itemArrayTableEnd, kg.typ)
|
||||
|
||||
p.establishContext(key, true)
|
||||
p.setType("", tomlArrayHash)
|
||||
p.ordered = append(p.ordered, key)
|
||||
case itemKeyStart:
|
||||
kname := p.next()
|
||||
p.approxLine = kname.line
|
||||
p.currentKey = p.keyString(kname)
|
||||
|
||||
val, typ := p.value(p.next())
|
||||
p.setValue(p.currentKey, val)
|
||||
p.setType(p.currentKey, typ)
|
||||
p.ordered = append(p.ordered, p.context.add(p.currentKey))
|
||||
p.currentKey = ""
|
||||
default:
|
||||
p.bug("Unexpected type at top level: %s", item.typ)
|
||||
}
|
||||
}
|
||||
|
||||
// Gets a string for a key (or part of a key in a table name).
|
||||
func (p *parser) keyString(it item) string {
|
||||
switch it.typ {
|
||||
case itemText:
|
||||
return it.val
|
||||
case itemString, itemMultilineString,
|
||||
itemRawString, itemRawMultilineString:
|
||||
s, _ := p.value(it)
|
||||
return s.(string)
|
||||
default:
|
||||
p.bug("Unexpected key type: %s", it.typ)
|
||||
panic("unreachable")
|
||||
}
|
||||
}
|
||||
|
||||
// value translates an expected value from the lexer into a Go value wrapped
|
||||
// as an empty interface.
|
||||
func (p *parser) value(it item) (interface{}, tomlType) {
|
||||
switch it.typ {
|
||||
case itemString:
|
||||
return p.replaceEscapes(it.val), p.typeOfPrimitive(it)
|
||||
case itemMultilineString:
|
||||
trimmed := stripFirstNewline(stripEscapedWhitespace(it.val))
|
||||
return p.replaceEscapes(trimmed), p.typeOfPrimitive(it)
|
||||
case itemRawString:
|
||||
return it.val, p.typeOfPrimitive(it)
|
||||
case itemRawMultilineString:
|
||||
return stripFirstNewline(it.val), p.typeOfPrimitive(it)
|
||||
case itemBool:
|
||||
switch it.val {
|
||||
case "true":
|
||||
return true, p.typeOfPrimitive(it)
|
||||
case "false":
|
||||
return false, p.typeOfPrimitive(it)
|
||||
}
|
||||
p.bug("Expected boolean value, but got '%s'.", it.val)
|
||||
case itemInteger:
|
||||
if !numUnderscoresOK(it.val) {
|
||||
p.panicf("Invalid integer %q: underscores must be surrounded by digits",
|
||||
it.val)
|
||||
}
|
||||
val := strings.Replace(it.val, "_", "", -1)
|
||||
num, err := strconv.ParseInt(val, 10, 64)
|
||||
if err != nil {
|
||||
// Distinguish integer values. Normally, it'd be a bug if the lexer
|
||||
// provides an invalid integer, but it's possible that the number is
|
||||
// out of range of valid values (which the lexer cannot determine).
|
||||
// So mark the former as a bug but the latter as a legitimate user
|
||||
// error.
|
||||
if e, ok := err.(*strconv.NumError); ok &&
|
||||
e.Err == strconv.ErrRange {
|
||||
|
||||
p.panicf("Integer '%s' is out of the range of 64-bit "+
|
||||
"signed integers.", it.val)
|
||||
} else {
|
||||
p.bug("Expected integer value, but got '%s'.", it.val)
|
||||
}
|
||||
}
|
||||
return num, p.typeOfPrimitive(it)
|
||||
case itemFloat:
|
||||
parts := strings.FieldsFunc(it.val, func(r rune) bool {
|
||||
switch r {
|
||||
case '.', 'e', 'E':
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
for _, part := range parts {
|
||||
if !numUnderscoresOK(part) {
|
||||
p.panicf("Invalid float %q: underscores must be "+
|
||||
"surrounded by digits", it.val)
|
||||
}
|
||||
}
|
||||
if !numPeriodsOK(it.val) {
|
||||
// As a special case, numbers like '123.' or '1.e2',
|
||||
// which are valid as far as Go/strconv are concerned,
|
||||
// must be rejected because TOML says that a fractional
|
||||
// part consists of '.' followed by 1+ digits.
|
||||
p.panicf("Invalid float %q: '.' must be followed "+
|
||||
"by one or more digits", it.val)
|
||||
}
|
||||
val := strings.Replace(it.val, "_", "", -1)
|
||||
num, err := strconv.ParseFloat(val, 64)
|
||||
if err != nil {
|
||||
if e, ok := err.(*strconv.NumError); ok &&
|
||||
e.Err == strconv.ErrRange {
|
||||
|
||||
p.panicf("Float '%s' is out of the range of 64-bit "+
|
||||
"IEEE-754 floating-point numbers.", it.val)
|
||||
} else {
|
||||
p.panicf("Invalid float value: %q", it.val)
|
||||
}
|
||||
}
|
||||
return num, p.typeOfPrimitive(it)
|
||||
case itemDatetime:
|
||||
var t time.Time
|
||||
var ok bool
|
||||
var err error
|
||||
for _, format := range []string{
|
||||
"2006-01-02T15:04:05Z07:00",
|
||||
"2006-01-02T15:04:05",
|
||||
"2006-01-02",
|
||||
} {
|
||||
t, err = time.ParseInLocation(format, it.val, time.Local)
|
||||
if err == nil {
|
||||
ok = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !ok {
|
||||
p.panicf("Invalid TOML Datetime: %q.", it.val)
|
||||
}
|
||||
return t, p.typeOfPrimitive(it)
|
||||
case itemArray:
|
||||
array := make([]interface{}, 0)
|
||||
types := make([]tomlType, 0)
|
||||
|
||||
for it = p.next(); it.typ != itemArrayEnd; it = p.next() {
|
||||
if it.typ == itemCommentStart {
|
||||
p.expect(itemText)
|
||||
continue
|
||||
}
|
||||
|
||||
val, typ := p.value(it)
|
||||
array = append(array, val)
|
||||
types = append(types, typ)
|
||||
}
|
||||
return array, p.typeOfArray(types)
|
||||
case itemInlineTableStart:
|
||||
var (
|
||||
hash = make(map[string]interface{})
|
||||
outerContext = p.context
|
||||
outerKey = p.currentKey
|
||||
)
|
||||
|
||||
p.context = append(p.context, p.currentKey)
|
||||
p.currentKey = ""
|
||||
for it := p.next(); it.typ != itemInlineTableEnd; it = p.next() {
|
||||
if it.typ != itemKeyStart {
|
||||
p.bug("Expected key start but instead found %q, around line %d",
|
||||
it.val, p.approxLine)
|
||||
}
|
||||
if it.typ == itemCommentStart {
|
||||
p.expect(itemText)
|
||||
continue
|
||||
}
|
||||
|
||||
// retrieve key
|
||||
k := p.next()
|
||||
p.approxLine = k.line
|
||||
kname := p.keyString(k)
|
||||
|
||||
// retrieve value
|
||||
p.currentKey = kname
|
||||
val, typ := p.value(p.next())
|
||||
// make sure we keep metadata up to date
|
||||
p.setType(kname, typ)
|
||||
p.ordered = append(p.ordered, p.context.add(p.currentKey))
|
||||
hash[kname] = val
|
||||
}
|
||||
p.context = outerContext
|
||||
p.currentKey = outerKey
|
||||
return hash, tomlHash
|
||||
}
|
||||
p.bug("Unexpected value type: %s", it.typ)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// numUnderscoresOK checks whether each underscore in s is surrounded by
|
||||
// characters that are not underscores.
|
||||
func numUnderscoresOK(s string) bool {
|
||||
accept := false
|
||||
for _, r := range s {
|
||||
if r == '_' {
|
||||
if !accept {
|
||||
return false
|
||||
}
|
||||
accept = false
|
||||
continue
|
||||
}
|
||||
accept = true
|
||||
}
|
||||
return accept
|
||||
}
|
||||
|
||||
// numPeriodsOK checks whether every period in s is followed by a digit.
|
||||
func numPeriodsOK(s string) bool {
|
||||
period := false
|
||||
for _, r := range s {
|
||||
if period && !isDigit(r) {
|
||||
return false
|
||||
}
|
||||
period = r == '.'
|
||||
}
|
||||
return !period
|
||||
}
|
||||
|
||||
// establishContext sets the current context of the parser,
|
||||
// where the context is either a hash or an array of hashes. Which one is
|
||||
// set depends on the value of the `array` parameter.
|
||||
//
|
||||
// Establishing the context also makes sure that the key isn't a duplicate, and
|
||||
// will create implicit hashes automatically.
|
||||
func (p *parser) establishContext(key Key, array bool) {
|
||||
var ok bool
|
||||
|
||||
// Always start at the top level and drill down for our context.
|
||||
hashContext := p.mapping
|
||||
keyContext := make(Key, 0)
|
||||
|
||||
// We only need implicit hashes for key[0:-1]
|
||||
for _, k := range key[0 : len(key)-1] {
|
||||
_, ok = hashContext[k]
|
||||
keyContext = append(keyContext, k)
|
||||
|
||||
// No key? Make an implicit hash and move on.
|
||||
if !ok {
|
||||
p.addImplicit(keyContext)
|
||||
hashContext[k] = make(map[string]interface{})
|
||||
}
|
||||
|
||||
// If the hash context is actually an array of tables, then set
|
||||
// the hash context to the last element in that array.
|
||||
//
|
||||
// Otherwise, it better be a table, since this MUST be a key group (by
|
||||
// virtue of it not being the last element in a key).
|
||||
switch t := hashContext[k].(type) {
|
||||
case []map[string]interface{}:
|
||||
hashContext = t[len(t)-1]
|
||||
case map[string]interface{}:
|
||||
hashContext = t
|
||||
default:
|
||||
p.panicf("Key '%s' was already created as a hash.", keyContext)
|
||||
}
|
||||
}
|
||||
|
||||
p.context = keyContext
|
||||
if array {
|
||||
// If this is the first element for this array, then allocate a new
|
||||
// list of tables for it.
|
||||
k := key[len(key)-1]
|
||||
if _, ok := hashContext[k]; !ok {
|
||||
hashContext[k] = make([]map[string]interface{}, 0, 5)
|
||||
}
|
||||
|
||||
// Add a new table. But make sure the key hasn't already been used
|
||||
// for something else.
|
||||
if hash, ok := hashContext[k].([]map[string]interface{}); ok {
|
||||
hashContext[k] = append(hash, make(map[string]interface{}))
|
||||
} else {
|
||||
p.panicf("Key '%s' was already created and cannot be used as "+
|
||||
"an array.", keyContext)
|
||||
}
|
||||
} else {
|
||||
p.setValue(key[len(key)-1], make(map[string]interface{}))
|
||||
}
|
||||
p.context = append(p.context, key[len(key)-1])
|
||||
}
|
||||
|
||||
// setValue sets the given key to the given value in the current context.
|
||||
// It will make sure that the key hasn't already been defined, account for
|
||||
// implicit key groups.
|
||||
func (p *parser) setValue(key string, value interface{}) {
|
||||
var tmpHash interface{}
|
||||
var ok bool
|
||||
|
||||
hash := p.mapping
|
||||
keyContext := make(Key, 0)
|
||||
for _, k := range p.context {
|
||||
keyContext = append(keyContext, k)
|
||||
if tmpHash, ok = hash[k]; !ok {
|
||||
p.bug("Context for key '%s' has not been established.", keyContext)
|
||||
}
|
||||
switch t := tmpHash.(type) {
|
||||
case []map[string]interface{}:
|
||||
// The context is a table of hashes. Pick the most recent table
|
||||
// defined as the current hash.
|
||||
hash = t[len(t)-1]
|
||||
case map[string]interface{}:
|
||||
hash = t
|
||||
default:
|
||||
p.bug("Expected hash to have type 'map[string]interface{}', but "+
|
||||
"it has '%T' instead.", tmpHash)
|
||||
}
|
||||
}
|
||||
keyContext = append(keyContext, key)
|
||||
|
||||
if _, ok := hash[key]; ok {
|
||||
// Typically, if the given key has already been set, then we have
|
||||
// to raise an error since duplicate keys are disallowed. However,
|
||||
// it's possible that a key was previously defined implicitly. In this
|
||||
// case, it is allowed to be redefined concretely. (See the
|
||||
// `tests/valid/implicit-and-explicit-after.toml` test in `toml-test`.)
|
||||
//
|
||||
// But we have to make sure to stop marking it as an implicit. (So that
|
||||
// another redefinition provokes an error.)
|
||||
//
|
||||
// Note that since it has already been defined (as a hash), we don't
|
||||
// want to overwrite it. So our business is done.
|
||||
if p.isImplicit(keyContext) {
|
||||
p.removeImplicit(keyContext)
|
||||
return
|
||||
}
|
||||
|
||||
// Otherwise, we have a concrete key trying to override a previous
|
||||
// key, which is *always* wrong.
|
||||
p.panicf("Key '%s' has already been defined.", keyContext)
|
||||
}
|
||||
hash[key] = value
|
||||
}
|
||||
|
||||
// setType sets the type of a particular value at a given key.
|
||||
// It should be called immediately AFTER setValue.
|
||||
//
|
||||
// Note that if `key` is empty, then the type given will be applied to the
|
||||
// current context (which is either a table or an array of tables).
|
||||
func (p *parser) setType(key string, typ tomlType) {
|
||||
keyContext := make(Key, 0, len(p.context)+1)
|
||||
for _, k := range p.context {
|
||||
keyContext = append(keyContext, k)
|
||||
}
|
||||
if len(key) > 0 { // allow type setting for hashes
|
||||
keyContext = append(keyContext, key)
|
||||
}
|
||||
p.types[keyContext.String()] = typ
|
||||
}
|
||||
|
||||
// addImplicit sets the given Key as having been created implicitly.
|
||||
func (p *parser) addImplicit(key Key) {
|
||||
p.implicits[key.String()] = true
|
||||
}
|
||||
|
||||
// removeImplicit stops tagging the given key as having been implicitly
|
||||
// created.
|
||||
func (p *parser) removeImplicit(key Key) {
|
||||
p.implicits[key.String()] = false
|
||||
}
|
||||
|
||||
// isImplicit returns true if the key group pointed to by the key was created
|
||||
// implicitly.
|
||||
func (p *parser) isImplicit(key Key) bool {
|
||||
return p.implicits[key.String()]
|
||||
}
|
||||
|
||||
// current returns the full key name of the current context.
|
||||
func (p *parser) current() string {
|
||||
if len(p.currentKey) == 0 {
|
||||
return p.context.String()
|
||||
}
|
||||
if len(p.context) == 0 {
|
||||
return p.currentKey
|
||||
}
|
||||
return fmt.Sprintf("%s.%s", p.context, p.currentKey)
|
||||
}
|
||||
|
||||
func stripFirstNewline(s string) string {
|
||||
if len(s) == 0 || s[0] != '\n' {
|
||||
return s
|
||||
}
|
||||
return s[1:]
|
||||
}
|
||||
|
||||
func stripEscapedWhitespace(s string) string {
|
||||
esc := strings.Split(s, "\\\n")
|
||||
if len(esc) > 1 {
|
||||
for i := 1; i < len(esc); i++ {
|
||||
esc[i] = strings.TrimLeftFunc(esc[i], unicode.IsSpace)
|
||||
}
|
||||
}
|
||||
return strings.Join(esc, "")
|
||||
}
|
||||
|
||||
func (p *parser) replaceEscapes(str string) string {
|
||||
var replaced []rune
|
||||
s := []byte(str)
|
||||
r := 0
|
||||
for r < len(s) {
|
||||
if s[r] != '\\' {
|
||||
c, size := utf8.DecodeRune(s[r:])
|
||||
r += size
|
||||
replaced = append(replaced, c)
|
||||
continue
|
||||
}
|
||||
r += 1
|
||||
if r >= len(s) {
|
||||
p.bug("Escape sequence at end of string.")
|
||||
return ""
|
||||
}
|
||||
switch s[r] {
|
||||
default:
|
||||
p.bug("Expected valid escape code after \\, but got %q.", s[r])
|
||||
return ""
|
||||
case 'b':
|
||||
replaced = append(replaced, rune(0x0008))
|
||||
r += 1
|
||||
case 't':
|
||||
replaced = append(replaced, rune(0x0009))
|
||||
r += 1
|
||||
case 'n':
|
||||
replaced = append(replaced, rune(0x000A))
|
||||
r += 1
|
||||
case 'f':
|
||||
replaced = append(replaced, rune(0x000C))
|
||||
r += 1
|
||||
case 'r':
|
||||
replaced = append(replaced, rune(0x000D))
|
||||
r += 1
|
||||
case '"':
|
||||
replaced = append(replaced, rune(0x0022))
|
||||
r += 1
|
||||
case '\\':
|
||||
replaced = append(replaced, rune(0x005C))
|
||||
r += 1
|
||||
case 'u':
|
||||
// At this point, we know we have a Unicode escape of the form
|
||||
// `uXXXX` at [r, r+5). (Because the lexer guarantees this
|
||||
// for us.)
|
||||
escaped := p.asciiEscapeToUnicode(s[r+1 : r+5])
|
||||
replaced = append(replaced, escaped)
|
||||
r += 5
|
||||
case 'U':
|
||||
// At this point, we know we have a Unicode escape of the form
|
||||
// `uXXXX` at [r, r+9). (Because the lexer guarantees this
|
||||
// for us.)
|
||||
escaped := p.asciiEscapeToUnicode(s[r+1 : r+9])
|
||||
replaced = append(replaced, escaped)
|
||||
r += 9
|
||||
}
|
||||
}
|
||||
return string(replaced)
|
||||
}
|
||||
|
||||
func (p *parser) asciiEscapeToUnicode(bs []byte) rune {
|
||||
s := string(bs)
|
||||
hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32)
|
||||
if err != nil {
|
||||
p.bug("Could not parse '%s' as a hexadecimal number, but the "+
|
||||
"lexer claims it's OK: %s", s, err)
|
||||
}
|
||||
if !utf8.ValidRune(rune(hex)) {
|
||||
p.panicf("Escaped character '\\u%s' is not valid UTF-8.", s)
|
||||
}
|
||||
return rune(hex)
|
||||
}
|
||||
|
||||
func isStringType(ty itemType) bool {
|
||||
return ty == itemString || ty == itemMultilineString ||
|
||||
ty == itemRawString || ty == itemRawMultilineString
|
||||
}
|
1
vendor/github.com/BurntSushi/toml/session.vim
generated
vendored
1
vendor/github.com/BurntSushi/toml/session.vim
generated
vendored
@@ -1 +0,0 @@
|
||||
au BufWritePost *.go silent!make tags > /dev/null 2>&1
|
91
vendor/github.com/BurntSushi/toml/type_check.go
generated
vendored
91
vendor/github.com/BurntSushi/toml/type_check.go
generated
vendored
@@ -1,91 +0,0 @@
|
||||
package toml
|
||||
|
||||
// tomlType represents any Go type that corresponds to a TOML type.
|
||||
// While the first draft of the TOML spec has a simplistic type system that
|
||||
// probably doesn't need this level of sophistication, we seem to be militating
|
||||
// toward adding real composite types.
|
||||
type tomlType interface {
|
||||
typeString() string
|
||||
}
|
||||
|
||||
// typeEqual accepts any two types and returns true if they are equal.
|
||||
func typeEqual(t1, t2 tomlType) bool {
|
||||
if t1 == nil || t2 == nil {
|
||||
return false
|
||||
}
|
||||
return t1.typeString() == t2.typeString()
|
||||
}
|
||||
|
||||
func typeIsHash(t tomlType) bool {
|
||||
return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash)
|
||||
}
|
||||
|
||||
type tomlBaseType string
|
||||
|
||||
func (btype tomlBaseType) typeString() string {
|
||||
return string(btype)
|
||||
}
|
||||
|
||||
func (btype tomlBaseType) String() string {
|
||||
return btype.typeString()
|
||||
}
|
||||
|
||||
var (
|
||||
tomlInteger tomlBaseType = "Integer"
|
||||
tomlFloat tomlBaseType = "Float"
|
||||
tomlDatetime tomlBaseType = "Datetime"
|
||||
tomlString tomlBaseType = "String"
|
||||
tomlBool tomlBaseType = "Bool"
|
||||
tomlArray tomlBaseType = "Array"
|
||||
tomlHash tomlBaseType = "Hash"
|
||||
tomlArrayHash tomlBaseType = "ArrayHash"
|
||||
)
|
||||
|
||||
// typeOfPrimitive returns a tomlType of any primitive value in TOML.
|
||||
// Primitive values are: Integer, Float, Datetime, String and Bool.
|
||||
//
|
||||
// Passing a lexer item other than the following will cause a BUG message
|
||||
// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime.
|
||||
func (p *parser) typeOfPrimitive(lexItem item) tomlType {
|
||||
switch lexItem.typ {
|
||||
case itemInteger:
|
||||
return tomlInteger
|
||||
case itemFloat:
|
||||
return tomlFloat
|
||||
case itemDatetime:
|
||||
return tomlDatetime
|
||||
case itemString:
|
||||
return tomlString
|
||||
case itemMultilineString:
|
||||
return tomlString
|
||||
case itemRawString:
|
||||
return tomlString
|
||||
case itemRawMultilineString:
|
||||
return tomlString
|
||||
case itemBool:
|
||||
return tomlBool
|
||||
}
|
||||
p.bug("Cannot infer primitive type of lex item '%s'.", lexItem)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// typeOfArray returns a tomlType for an array given a list of types of its
|
||||
// values.
|
||||
//
|
||||
// In the current spec, if an array is homogeneous, then its type is always
|
||||
// "Array". If the array is not homogeneous, an error is generated.
|
||||
func (p *parser) typeOfArray(types []tomlType) tomlType {
|
||||
// Empty arrays are cool.
|
||||
if len(types) == 0 {
|
||||
return tomlArray
|
||||
}
|
||||
|
||||
theType := types[0]
|
||||
for _, t := range types[1:] {
|
||||
if !typeEqual(theType, t) {
|
||||
p.panicf("Array contains values of type '%s' and '%s', but "+
|
||||
"arrays must be homogeneous.", theType, t)
|
||||
}
|
||||
}
|
||||
return tomlArray
|
||||
}
|
242
vendor/github.com/BurntSushi/toml/type_fields.go
generated
vendored
242
vendor/github.com/BurntSushi/toml/type_fields.go
generated
vendored
@@ -1,242 +0,0 @@
|
||||
package toml
|
||||
|
||||
// Struct field handling is adapted from code in encoding/json:
|
||||
//
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the Go distribution.
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// A field represents a single field found in a struct.
|
||||
type field struct {
|
||||
name string // the name of the field (`toml` tag included)
|
||||
tag bool // whether field has a `toml` tag
|
||||
index []int // represents the depth of an anonymous field
|
||||
typ reflect.Type // the type of the field
|
||||
}
|
||||
|
||||
// byName sorts field by name, breaking ties with depth,
|
||||
// then breaking ties with "name came from toml tag", then
|
||||
// breaking ties with index sequence.
|
||||
type byName []field
|
||||
|
||||
func (x byName) Len() int { return len(x) }
|
||||
|
||||
func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||
|
||||
func (x byName) Less(i, j int) bool {
|
||||
if x[i].name != x[j].name {
|
||||
return x[i].name < x[j].name
|
||||
}
|
||||
if len(x[i].index) != len(x[j].index) {
|
||||
return len(x[i].index) < len(x[j].index)
|
||||
}
|
||||
if x[i].tag != x[j].tag {
|
||||
return x[i].tag
|
||||
}
|
||||
return byIndex(x).Less(i, j)
|
||||
}
|
||||
|
||||
// byIndex sorts field by index sequence.
|
||||
type byIndex []field
|
||||
|
||||
func (x byIndex) Len() int { return len(x) }
|
||||
|
||||
func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||
|
||||
func (x byIndex) Less(i, j int) bool {
|
||||
for k, xik := range x[i].index {
|
||||
if k >= len(x[j].index) {
|
||||
return false
|
||||
}
|
||||
if xik != x[j].index[k] {
|
||||
return xik < x[j].index[k]
|
||||
}
|
||||
}
|
||||
return len(x[i].index) < len(x[j].index)
|
||||
}
|
||||
|
||||
// typeFields returns a list of fields that TOML should recognize for the given
|
||||
// type. The algorithm is breadth-first search over the set of structs to
|
||||
// include - the top struct and then any reachable anonymous structs.
|
||||
func typeFields(t reflect.Type) []field {
|
||||
// Anonymous fields to explore at the current level and the next.
|
||||
current := []field{}
|
||||
next := []field{{typ: t}}
|
||||
|
||||
// Count of queued names for current level and the next.
|
||||
count := map[reflect.Type]int{}
|
||||
nextCount := map[reflect.Type]int{}
|
||||
|
||||
// Types already visited at an earlier level.
|
||||
visited := map[reflect.Type]bool{}
|
||||
|
||||
// Fields found.
|
||||
var fields []field
|
||||
|
||||
for len(next) > 0 {
|
||||
current, next = next, current[:0]
|
||||
count, nextCount = nextCount, map[reflect.Type]int{}
|
||||
|
||||
for _, f := range current {
|
||||
if visited[f.typ] {
|
||||
continue
|
||||
}
|
||||
visited[f.typ] = true
|
||||
|
||||
// Scan f.typ for fields to include.
|
||||
for i := 0; i < f.typ.NumField(); i++ {
|
||||
sf := f.typ.Field(i)
|
||||
if sf.PkgPath != "" && !sf.Anonymous { // unexported
|
||||
continue
|
||||
}
|
||||
opts := getOptions(sf.Tag)
|
||||
if opts.skip {
|
||||
continue
|
||||
}
|
||||
index := make([]int, len(f.index)+1)
|
||||
copy(index, f.index)
|
||||
index[len(f.index)] = i
|
||||
|
||||
ft := sf.Type
|
||||
if ft.Name() == "" && ft.Kind() == reflect.Ptr {
|
||||
// Follow pointer.
|
||||
ft = ft.Elem()
|
||||
}
|
||||
|
||||
// Record found field and index sequence.
|
||||
if opts.name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct {
|
||||
tagged := opts.name != ""
|
||||
name := opts.name
|
||||
if name == "" {
|
||||
name = sf.Name
|
||||
}
|
||||
fields = append(fields, field{name, tagged, index, ft})
|
||||
if count[f.typ] > 1 {
|
||||
// If there were multiple instances, add a second,
|
||||
// so that the annihilation code will see a duplicate.
|
||||
// It only cares about the distinction between 1 or 2,
|
||||
// so don't bother generating any more copies.
|
||||
fields = append(fields, fields[len(fields)-1])
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Record new anonymous struct to explore in next round.
|
||||
nextCount[ft]++
|
||||
if nextCount[ft] == 1 {
|
||||
f := field{name: ft.Name(), index: index, typ: ft}
|
||||
next = append(next, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sort.Sort(byName(fields))
|
||||
|
||||
// Delete all fields that are hidden by the Go rules for embedded fields,
|
||||
// except that fields with TOML tags are promoted.
|
||||
|
||||
// The fields are sorted in primary order of name, secondary order
|
||||
// of field index length. Loop over names; for each name, delete
|
||||
// hidden fields by choosing the one dominant field that survives.
|
||||
out := fields[:0]
|
||||
for advance, i := 0, 0; i < len(fields); i += advance {
|
||||
// One iteration per name.
|
||||
// Find the sequence of fields with the name of this first field.
|
||||
fi := fields[i]
|
||||
name := fi.name
|
||||
for advance = 1; i+advance < len(fields); advance++ {
|
||||
fj := fields[i+advance]
|
||||
if fj.name != name {
|
||||
break
|
||||
}
|
||||
}
|
||||
if advance == 1 { // Only one field with this name
|
||||
out = append(out, fi)
|
||||
continue
|
||||
}
|
||||
dominant, ok := dominantField(fields[i : i+advance])
|
||||
if ok {
|
||||
out = append(out, dominant)
|
||||
}
|
||||
}
|
||||
|
||||
fields = out
|
||||
sort.Sort(byIndex(fields))
|
||||
|
||||
return fields
|
||||
}
|
||||
|
||||
// dominantField looks through the fields, all of which are known to
|
||||
// have the same name, to find the single field that dominates the
|
||||
// others using Go's embedding rules, modified by the presence of
|
||||
// TOML tags. If there are multiple top-level fields, the boolean
|
||||
// will be false: This condition is an error in Go and we skip all
|
||||
// the fields.
|
||||
func dominantField(fields []field) (field, bool) {
|
||||
// The fields are sorted in increasing index-length order. The winner
|
||||
// must therefore be one with the shortest index length. Drop all
|
||||
// longer entries, which is easy: just truncate the slice.
|
||||
length := len(fields[0].index)
|
||||
tagged := -1 // Index of first tagged field.
|
||||
for i, f := range fields {
|
||||
if len(f.index) > length {
|
||||
fields = fields[:i]
|
||||
break
|
||||
}
|
||||
if f.tag {
|
||||
if tagged >= 0 {
|
||||
// Multiple tagged fields at the same level: conflict.
|
||||
// Return no field.
|
||||
return field{}, false
|
||||
}
|
||||
tagged = i
|
||||
}
|
||||
}
|
||||
if tagged >= 0 {
|
||||
return fields[tagged], true
|
||||
}
|
||||
// All remaining fields have the same length. If there's more than one,
|
||||
// we have a conflict (two fields named "X" at the same level) and we
|
||||
// return no field.
|
||||
if len(fields) > 1 {
|
||||
return field{}, false
|
||||
}
|
||||
return fields[0], true
|
||||
}
|
||||
|
||||
var fieldCache struct {
|
||||
sync.RWMutex
|
||||
m map[reflect.Type][]field
|
||||
}
|
||||
|
||||
// cachedTypeFields is like typeFields but uses a cache to avoid repeated work.
|
||||
func cachedTypeFields(t reflect.Type) []field {
|
||||
fieldCache.RLock()
|
||||
f := fieldCache.m[t]
|
||||
fieldCache.RUnlock()
|
||||
if f != nil {
|
||||
return f
|
||||
}
|
||||
|
||||
// Compute fields without lock.
|
||||
// Might duplicate effort but won't hold other computations back.
|
||||
f = typeFields(t)
|
||||
if f == nil {
|
||||
f = []field{}
|
||||
}
|
||||
|
||||
fieldCache.Lock()
|
||||
if fieldCache.m == nil {
|
||||
fieldCache.m = map[reflect.Type][]field{}
|
||||
}
|
||||
fieldCache.m[t] = f
|
||||
fieldCache.Unlock()
|
||||
return f
|
||||
}
|
13
vendor/github.com/mvdan/sh/interp/builtin.go
generated
vendored
13
vendor/github.com/mvdan/sh/interp/builtin.go
generated
vendored
@@ -158,9 +158,7 @@ func (r *Runner) builtinCode(pos syntax.Pos, name string, args []string) int {
|
||||
r.errf("usage: cd [dir]\n")
|
||||
return 2
|
||||
}
|
||||
if !filepath.IsAbs(dir) {
|
||||
dir = filepath.Join(r.Dir, dir)
|
||||
}
|
||||
dir = r.relPath(dir)
|
||||
if _, err := os.Stat(dir); err != nil {
|
||||
return 1
|
||||
}
|
||||
@@ -216,7 +214,7 @@ func (r *Runner) builtinCode(pos syntax.Pos, name string, args []string) int {
|
||||
if len(args) < 1 {
|
||||
r.runErr(pos, "source: need filename")
|
||||
}
|
||||
f, err := os.Open(args[0])
|
||||
f, err := os.Open(r.relPath(args[0]))
|
||||
if err != nil {
|
||||
r.errf("eval: %v\n", err)
|
||||
return 1
|
||||
@@ -256,3 +254,10 @@ func (r *Runner) builtinCode(pos syntax.Pos, name string, args []string) int {
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (r *Runner) relPath(path string) string {
|
||||
if filepath.IsAbs(path) {
|
||||
return path
|
||||
}
|
||||
return filepath.Join(r.Dir, path)
|
||||
}
|
||||
|
57
vendor/github.com/mvdan/sh/interp/interp.go
generated
vendored
57
vendor/github.com/mvdan/sh/interp/interp.go
generated
vendored
@@ -123,19 +123,23 @@ type ExitCode uint8
|
||||
func (e ExitCode) Error() string { return fmt.Sprintf("exit status %d", e) }
|
||||
|
||||
type RunError struct {
|
||||
syntax.Position
|
||||
Filename string
|
||||
syntax.Pos
|
||||
Text string
|
||||
}
|
||||
|
||||
func (e RunError) Error() string {
|
||||
return fmt.Sprintf("%s: %s", e.Position.String(), e.Text)
|
||||
if e.Filename == "" {
|
||||
return fmt.Sprintf("%s: %s", e.Pos.String(), e.Text)
|
||||
}
|
||||
return fmt.Sprintf("%s:%s: %s", e.Filename, e.Pos.String(), e.Text)
|
||||
}
|
||||
|
||||
func (r *Runner) runErr(pos syntax.Pos, format string, a ...interface{}) {
|
||||
if r.err == nil {
|
||||
r.err = RunError{
|
||||
Position: r.File.Position(pos),
|
||||
Text: fmt.Sprintf(format, a...),
|
||||
Pos: pos,
|
||||
Text: fmt.Sprintf(format, a...),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -239,7 +243,7 @@ func (r *Runner) Run() error {
|
||||
}
|
||||
r.Dir = dir
|
||||
}
|
||||
r.stmts(r.File.Stmts)
|
||||
r.stmts(r.File.StmtList)
|
||||
r.lastExit()
|
||||
if r.err == ExitCode(0) {
|
||||
r.err = nil
|
||||
@@ -438,10 +442,10 @@ func (r *Runner) cmd(cm syntax.Command) {
|
||||
}
|
||||
switch x := cm.(type) {
|
||||
case *syntax.Block:
|
||||
r.stmts(x.Stmts)
|
||||
r.stmts(x.StmtList)
|
||||
case *syntax.Subshell:
|
||||
r2 := *r
|
||||
r2.stmts(x.Stmts)
|
||||
r2.stmts(x.StmtList)
|
||||
r.exit = r2.exit
|
||||
case *syntax.CallExpr:
|
||||
fields := r.fields(x.Args)
|
||||
@@ -477,26 +481,19 @@ func (r *Runner) cmd(cm syntax.Command) {
|
||||
pr.Close()
|
||||
}
|
||||
case *syntax.IfClause:
|
||||
r.stmts(x.CondStmts)
|
||||
r.stmts(x.Cond)
|
||||
if r.exit == 0 {
|
||||
r.stmts(x.ThenStmts)
|
||||
r.stmts(x.Then)
|
||||
return
|
||||
}
|
||||
r.exit = 0
|
||||
for _, el := range x.Elifs {
|
||||
r.stmts(el.CondStmts)
|
||||
if r.exit == 0 {
|
||||
r.stmts(el.ThenStmts)
|
||||
return
|
||||
}
|
||||
}
|
||||
r.stmts(x.ElseStmts)
|
||||
r.stmts(x.Else)
|
||||
case *syntax.WhileClause:
|
||||
for r.err == nil {
|
||||
r.stmts(x.CondStmts)
|
||||
r.stmts(x.Cond)
|
||||
stop := (r.exit == 0) == x.Until
|
||||
r.exit = 0
|
||||
if stop || r.loopStmtsBroken(x.DoStmts) {
|
||||
if stop || r.loopStmtsBroken(x.Do) {
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -506,14 +503,14 @@ func (r *Runner) cmd(cm syntax.Command) {
|
||||
name := y.Name.Value
|
||||
for _, field := range r.fields(y.Items) {
|
||||
r.setVar(name, field)
|
||||
if r.loopStmtsBroken(x.DoStmts) {
|
||||
if r.loopStmtsBroken(x.Do) {
|
||||
break
|
||||
}
|
||||
}
|
||||
case *syntax.CStyleLoop:
|
||||
r.arithm(y.Init)
|
||||
for r.arithm(y.Cond) != 0 {
|
||||
if r.loopStmtsBroken(x.DoStmts) {
|
||||
if r.loopStmtsBroken(x.Do) {
|
||||
break
|
||||
}
|
||||
r.arithm(y.Post)
|
||||
@@ -543,7 +540,7 @@ func (r *Runner) cmd(cm syntax.Command) {
|
||||
buf.WriteString(escaped)
|
||||
}
|
||||
if match(buf.String(), str) {
|
||||
r.stmts(ci.Stmts)
|
||||
r.stmts(ci.StmtList)
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -567,8 +564,8 @@ func (r *Runner) cmd(cm syntax.Command) {
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Runner) stmts(stmts []*syntax.Stmt) {
|
||||
for _, stmt := range stmts {
|
||||
func (r *Runner) stmts(sl syntax.StmtList) {
|
||||
for _, stmt := range sl.Stmts {
|
||||
r.stmt(stmt)
|
||||
}
|
||||
}
|
||||
@@ -615,11 +612,7 @@ func (r *Runner) redir(rd *syntax.Redirect) (io.Closer, error) {
|
||||
case syntax.RdrOut, syntax.RdrAll:
|
||||
mode = os.O_RDWR | os.O_CREATE | os.O_TRUNC
|
||||
}
|
||||
path := arg
|
||||
if !filepath.IsAbs(path) {
|
||||
path = filepath.Join(r.Dir, path)
|
||||
}
|
||||
f, err := os.OpenFile(path, mode, 0644)
|
||||
f, err := os.OpenFile(r.relPath(arg), mode, 0644)
|
||||
if err != nil {
|
||||
// TODO: print to stderr?
|
||||
return nil, err
|
||||
@@ -638,10 +631,10 @@ func (r *Runner) redir(rd *syntax.Redirect) (io.Closer, error) {
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func (r *Runner) loopStmtsBroken(stmts []*syntax.Stmt) bool {
|
||||
func (r *Runner) loopStmtsBroken(sl syntax.StmtList) bool {
|
||||
r.inLoop = true
|
||||
defer func() { r.inLoop = false }()
|
||||
for _, stmt := range stmts {
|
||||
for _, stmt := range sl.Stmts {
|
||||
r.stmt(stmt)
|
||||
if r.contnEnclosing > 0 {
|
||||
r.contnEnclosing--
|
||||
@@ -732,7 +725,7 @@ func (r *Runner) wordFields(wps []syntax.WordPart, quoted bool) [][]fieldPart {
|
||||
r2 := *r
|
||||
var buf bytes.Buffer
|
||||
r2.Stdout = &buf
|
||||
r2.stmts(x.Stmts)
|
||||
r2.stmts(x.StmtList)
|
||||
val := strings.TrimRight(buf.String(), "\n")
|
||||
if quoted {
|
||||
curField = append(curField, fieldPart{val: val})
|
||||
|
44
vendor/github.com/mvdan/sh/interp/test.go
generated
vendored
44
vendor/github.com/mvdan/sh/interp/test.go
generated
vendored
@@ -7,7 +7,6 @@ import (
|
||||
"bytes"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
|
||||
"github.com/mvdan/sh/syntax"
|
||||
@@ -58,19 +57,19 @@ func (r *Runner) binTest(op syntax.BinTestOperator, x, y string) bool {
|
||||
}
|
||||
return re.MatchString(x)
|
||||
case syntax.TsNewer:
|
||||
i1, i2 := stat(r.Dir, x), stat(r.Dir, y)
|
||||
i1, i2 := r.stat(x), r.stat(y)
|
||||
if i1 == nil || i2 == nil {
|
||||
return false
|
||||
}
|
||||
return i1.ModTime().After(i2.ModTime())
|
||||
case syntax.TsOlder:
|
||||
i1, i2 := stat(r.Dir, x), stat(r.Dir, y)
|
||||
i1, i2 := r.stat(x), r.stat(y)
|
||||
if i1 == nil || i2 == nil {
|
||||
return false
|
||||
}
|
||||
return i1.ModTime().Before(i2.ModTime())
|
||||
case syntax.TsDevIno:
|
||||
i1, i2 := stat(r.Dir, x), stat(r.Dir, y)
|
||||
i1, i2 := r.stat(x), r.stat(y)
|
||||
return os.SameFile(i1, i2)
|
||||
case syntax.TsEql:
|
||||
return atoi(x) == atoi(y)
|
||||
@@ -95,61 +94,60 @@ func (r *Runner) binTest(op syntax.BinTestOperator, x, y string) bool {
|
||||
}
|
||||
}
|
||||
|
||||
func stat(dir, name string) os.FileInfo {
|
||||
info, _ := os.Stat(filepath.Join(dir, name))
|
||||
func (r *Runner) stat(name string) os.FileInfo {
|
||||
info, _ := os.Stat(r.relPath(name))
|
||||
return info
|
||||
}
|
||||
|
||||
func statMode(dir, name string, mode os.FileMode) bool {
|
||||
info := stat(dir, name)
|
||||
func (r *Runner) statMode(name string, mode os.FileMode) bool {
|
||||
info := r.stat(name)
|
||||
return info != nil && info.Mode()&mode != 0
|
||||
}
|
||||
|
||||
func (r *Runner) unTest(op syntax.UnTestOperator, x string) bool {
|
||||
switch op {
|
||||
case syntax.TsExists:
|
||||
return stat(r.Dir, x) != nil
|
||||
return r.stat(x) != nil
|
||||
case syntax.TsRegFile:
|
||||
info := stat(r.Dir, x)
|
||||
info := r.stat(x)
|
||||
return info != nil && info.Mode().IsRegular()
|
||||
case syntax.TsDirect:
|
||||
return statMode(r.Dir, x, os.ModeDir)
|
||||
return r.statMode(x, os.ModeDir)
|
||||
//case syntax.TsCharSp:
|
||||
//case syntax.TsBlckSp:
|
||||
case syntax.TsNmPipe:
|
||||
return statMode(r.Dir, x, os.ModeNamedPipe)
|
||||
return r.statMode(x, os.ModeNamedPipe)
|
||||
case syntax.TsSocket:
|
||||
return statMode(r.Dir, x, os.ModeSocket)
|
||||
return r.statMode(x, os.ModeSocket)
|
||||
case syntax.TsSmbLink:
|
||||
info, _ := os.Lstat(x)
|
||||
info, _ := os.Lstat(r.relPath(x))
|
||||
return info != nil && info.Mode()&os.ModeSymlink != 0
|
||||
case syntax.TsSticky:
|
||||
return statMode(r.Dir, x, os.ModeSticky)
|
||||
return r.statMode(x, os.ModeSticky)
|
||||
case syntax.TsUIDSet:
|
||||
return statMode(r.Dir, x, os.ModeSetuid)
|
||||
return r.statMode(x, os.ModeSetuid)
|
||||
case syntax.TsGIDSet:
|
||||
return statMode(r.Dir, x, os.ModeSetgid)
|
||||
return r.statMode(x, os.ModeSetgid)
|
||||
//case syntax.TsGrpOwn:
|
||||
//case syntax.TsUsrOwn:
|
||||
//case syntax.TsModif:
|
||||
case syntax.TsRead:
|
||||
f, err := os.OpenFile(x, os.O_RDONLY, 0)
|
||||
f, err := os.OpenFile(r.relPath(x), os.O_RDONLY, 0)
|
||||
if err == nil {
|
||||
f.Close()
|
||||
}
|
||||
return err == nil
|
||||
case syntax.TsWrite:
|
||||
f, err := os.OpenFile(x, os.O_WRONLY, 0)
|
||||
f, err := os.OpenFile(r.relPath(x), os.O_WRONLY, 0)
|
||||
if err == nil {
|
||||
f.Close()
|
||||
}
|
||||
return err == nil
|
||||
case syntax.TsExec:
|
||||
// use an absolute path to not use $PATH
|
||||
_, err := exec.LookPath(filepath.Join(r.Dir, x))
|
||||
_, err := exec.LookPath(r.relPath(x))
|
||||
return err == nil
|
||||
case syntax.TsNoEmpty:
|
||||
info := stat(r.Dir, x)
|
||||
info := r.stat(x)
|
||||
return info != nil && info.Size() > 0
|
||||
//case syntax.TsFdTerm:
|
||||
case syntax.TsEmpStr:
|
||||
@@ -164,7 +162,7 @@ func (r *Runner) unTest(op syntax.UnTestOperator, x string) bool {
|
||||
case syntax.TsNot:
|
||||
return x == ""
|
||||
default:
|
||||
r.runErr(0, "unhandled unary test op: %v", op)
|
||||
r.runErr(syntax.Pos{}, "unhandled unary test op: %v", op)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
48
vendor/github.com/mvdan/sh/syntax/lexer.go
generated
vendored
48
vendor/github.com/mvdan/sh/syntax/lexer.go
generated
vendored
@@ -57,11 +57,16 @@ func wordBreak(r rune) bool {
|
||||
|
||||
func (p *Parser) rune() rune {
|
||||
retry:
|
||||
if p.npos < len(p.bs) {
|
||||
if b := p.bs[p.npos]; b < utf8.RuneSelf {
|
||||
if p.npos++; b == '\n' {
|
||||
p.f.lines = append(p.f.lines, p.getPos())
|
||||
if p.bsp < len(p.bs) {
|
||||
if b := p.bs[p.bsp]; b < utf8.RuneSelf {
|
||||
p.bsp++
|
||||
if p.r == '\n' {
|
||||
// p.r instead of b so that newline
|
||||
// character positions don't have col 0.
|
||||
p.npos.line++
|
||||
p.npos.col = 0
|
||||
}
|
||||
p.npos.col++
|
||||
if p.litBs != nil {
|
||||
p.litBs = append(p.litBs, b)
|
||||
}
|
||||
@@ -69,24 +74,25 @@ retry:
|
||||
p.r = r
|
||||
return r
|
||||
}
|
||||
if p.npos+utf8.UTFMax >= len(p.bs) {
|
||||
if p.bsp+utf8.UTFMax >= len(p.bs) {
|
||||
// we might need up to 4 bytes to read a full
|
||||
// non-ascii rune
|
||||
p.fill()
|
||||
}
|
||||
var w int
|
||||
p.r, w = utf8.DecodeRune(p.bs[p.npos:])
|
||||
p.r, w = utf8.DecodeRune(p.bs[p.bsp:])
|
||||
if p.litBs != nil {
|
||||
p.litBs = append(p.litBs, p.bs[p.npos:p.npos+w]...)
|
||||
p.litBs = append(p.litBs, p.bs[p.bsp:p.bsp+w]...)
|
||||
}
|
||||
p.npos += w
|
||||
p.bsp += w
|
||||
p.npos.col += uint16(w)
|
||||
if p.r == utf8.RuneError && w == 1 {
|
||||
p.posErr(p.getPos(), "invalid UTF-8 encoding")
|
||||
p.posErr(p.npos, "invalid UTF-8 encoding")
|
||||
}
|
||||
} else {
|
||||
if p.r == utf8.RuneSelf {
|
||||
} else if p.fill(); p.bs == nil {
|
||||
p.npos++
|
||||
p.bsp++
|
||||
p.r = utf8.RuneSelf
|
||||
} else {
|
||||
goto retry
|
||||
@@ -99,9 +105,9 @@ retry:
|
||||
// had not yet been used at the end of the buffer are slid into the
|
||||
// beginning of the buffer.
|
||||
func (p *Parser) fill() {
|
||||
left := len(p.bs) - p.npos
|
||||
p.offs += p.npos
|
||||
copy(p.readBuf[:left], p.readBuf[p.npos:])
|
||||
p.offs += p.bsp
|
||||
left := len(p.bs) - p.bsp
|
||||
copy(p.readBuf[:left], p.readBuf[p.bsp:])
|
||||
var n int
|
||||
var err error
|
||||
if p.readErr == nil {
|
||||
@@ -123,13 +129,13 @@ func (p *Parser) fill() {
|
||||
} else {
|
||||
p.bs = p.readBuf[:left+n]
|
||||
}
|
||||
p.npos = 0
|
||||
p.bsp = 0
|
||||
}
|
||||
|
||||
func (p *Parser) nextKeepSpaces() {
|
||||
r := p.r
|
||||
if p.pos = p.getPos(); r > utf8.RuneSelf {
|
||||
p.pos -= Pos(utf8.RuneLen(r) - 1)
|
||||
p.pos = posAddCol(p.pos, -1) // TODO
|
||||
}
|
||||
switch p.quote {
|
||||
case paramExpRepl:
|
||||
@@ -220,7 +226,7 @@ skipSpace:
|
||||
}
|
||||
}
|
||||
if p.pos = p.getPos(); r > utf8.RuneSelf {
|
||||
p.pos -= Pos(utf8.RuneLen(r) - 1)
|
||||
p.pos = posAddCol(p.pos, -1) // TODO
|
||||
}
|
||||
switch {
|
||||
case p.quote&allRegTokens != 0:
|
||||
@@ -234,7 +240,7 @@ skipSpace:
|
||||
r = p.rune()
|
||||
}
|
||||
if p.keepComments {
|
||||
p.f.Comments = append(p.f.Comments, &Comment{
|
||||
*p.curComs = append(*p.curComs, Comment{
|
||||
Hash: p.pos,
|
||||
Text: p.endLit(),
|
||||
})
|
||||
@@ -292,10 +298,10 @@ skipSpace:
|
||||
}
|
||||
|
||||
func (p *Parser) peekByte(b byte) bool {
|
||||
if p.npos == len(p.bs) && p.readErr == nil {
|
||||
if p.bsp == len(p.bs) && p.readErr == nil {
|
||||
p.fill()
|
||||
}
|
||||
return p.npos < len(p.bs) && p.bs[p.npos] == b
|
||||
return p.bsp < len(p.bs) && p.bs[p.bsp] == b
|
||||
}
|
||||
|
||||
func (p *Parser) regToken(r rune) token {
|
||||
@@ -693,9 +699,9 @@ func (p *Parser) newLit(r rune) {
|
||||
if r <= utf8.RuneSelf {
|
||||
p.litBs = p.litBuf[:1]
|
||||
p.litBs[0] = byte(r)
|
||||
} else if p.npos <= len(p.bs) {
|
||||
} else if p.bsp <= len(p.bs) {
|
||||
w := utf8.RuneLen(r)
|
||||
p.litBs = append(p.litBuf[:0], p.bs[p.npos-w:p.npos]...)
|
||||
p.litBs = append(p.litBuf[:0], p.bs[p.bsp-w:p.bsp]...)
|
||||
}
|
||||
}
|
||||
|
||||
|
331
vendor/github.com/mvdan/sh/syntax/nodes.go
generated
vendored
331
vendor/github.com/mvdan/sh/syntax/nodes.go
generated
vendored
@@ -7,9 +7,12 @@ import "fmt"
|
||||
|
||||
// Node represents an AST node.
|
||||
type Node interface {
|
||||
// Pos returns the first character of the node
|
||||
// Pos returns the position of the first character of the node.
|
||||
// Comments are ignored.
|
||||
Pos() Pos
|
||||
// End returns the character immediately after the node
|
||||
// End returns the position of the character immediately after
|
||||
// the node. If the character is a newline, the line number
|
||||
// won't cross into the next line. Comments are ignored.
|
||||
End() Pos
|
||||
}
|
||||
|
||||
@@ -17,83 +20,85 @@ type Node interface {
|
||||
type File struct {
|
||||
Name string
|
||||
|
||||
Stmts []*Stmt
|
||||
Comments []*Comment
|
||||
|
||||
lines []Pos
|
||||
StmtList
|
||||
}
|
||||
|
||||
// Pos is the internal representation of a position within a source
|
||||
// file.
|
||||
type Pos uint32
|
||||
|
||||
// IsValid reports whether the position is valid. All positions in nodes
|
||||
// returned by Parse are valid.
|
||||
func (p Pos) IsValid() bool { return p > 0 }
|
||||
|
||||
const maxPos = Pos(^uint32(0))
|
||||
|
||||
// Position describes a position within a source file including the line
|
||||
// and column location. A Position is valid if the line number is > 0.
|
||||
type Position struct {
|
||||
Filename string // if any
|
||||
Offset int // byte offset, starting at 0
|
||||
Line int // line number, starting at 1
|
||||
Column int // column number, starting at 1 (in bytes)
|
||||
// StmtList is a list of statements with any number of trailing
|
||||
// comments. Both lists can be empty.
|
||||
type StmtList struct {
|
||||
Stmts []*Stmt
|
||||
Last []Comment
|
||||
}
|
||||
|
||||
// IsValid reports whether the position is valid. All positions in nodes
|
||||
// returned by Parse are valid.
|
||||
func (p Position) IsValid() bool { return p.Line > 0 }
|
||||
|
||||
// String returns the position in the "file:line:column" form, or
|
||||
// "line:column" if there is no filename available.
|
||||
func (p Position) String() string {
|
||||
prefix := ""
|
||||
if p.Filename != "" {
|
||||
prefix = p.Filename + ":"
|
||||
func (s StmtList) pos() Pos {
|
||||
if len(s.Stmts) > 0 {
|
||||
return s.Stmts[0].Pos()
|
||||
}
|
||||
return fmt.Sprintf("%s%d:%d", prefix, p.Line, p.Column)
|
||||
if len(s.Last) > 0 {
|
||||
return s.Last[0].Pos()
|
||||
}
|
||||
return Pos{}
|
||||
}
|
||||
|
||||
func (s StmtList) empty() bool {
|
||||
return len(s.Stmts) == 0 && len(s.Last) == 0
|
||||
}
|
||||
|
||||
// Pos is a position within a source file.
|
||||
type Pos struct {
|
||||
offs uint32
|
||||
line, col uint16
|
||||
}
|
||||
|
||||
// Offset returns the byte offset of the position in the original
|
||||
// source file. Byte offsets start at 0.
|
||||
func (p Pos) Offset() uint {
|
||||
return uint(p.offs)
|
||||
}
|
||||
|
||||
// Line returns the line number of the position, starting at 1.
|
||||
func (p Pos) Line() uint {
|
||||
return uint(p.line)
|
||||
}
|
||||
|
||||
// Col returns the column number of the position, starting at 0. It
|
||||
// counts in bytes.
|
||||
func (p Pos) Col() uint {
|
||||
return uint(p.col)
|
||||
}
|
||||
|
||||
func (p Pos) String() string {
|
||||
return fmt.Sprintf("%d:%d", p.Line(), p.Col())
|
||||
}
|
||||
|
||||
// IsValid reports whether the position is valid. All positions in nodes
|
||||
// returned by Parse are valid.
|
||||
func (p Pos) IsValid() bool { return p.line > 0 }
|
||||
|
||||
func (p Pos) After(p2 Pos) bool { return p.offs > p2.offs }
|
||||
|
||||
func (f *File) Pos() Pos {
|
||||
if len(f.Stmts) == 0 {
|
||||
return 0
|
||||
return Pos{}
|
||||
}
|
||||
return f.Stmts[0].Pos()
|
||||
}
|
||||
|
||||
func (f *File) End() Pos {
|
||||
if len(f.Stmts) == 0 {
|
||||
return 0
|
||||
return Pos{}
|
||||
}
|
||||
return f.Stmts[len(f.Stmts)-1].End()
|
||||
}
|
||||
|
||||
func (f *File) Position(p Pos) (pos Position) {
|
||||
pos.Filename = f.Name
|
||||
pos.Offset = int(p) - 1
|
||||
if i := searchPos(f.lines, p); i >= 0 {
|
||||
pos.Line, pos.Column = i+1, int(p-f.lines[i])
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func searchPos(a []Pos, x Pos) int {
|
||||
i, j := 0, len(a)
|
||||
for i < j {
|
||||
h := i + (j-i)/2
|
||||
if a[h] <= x {
|
||||
i = h + 1
|
||||
} else {
|
||||
j = h
|
||||
}
|
||||
}
|
||||
return i - 1
|
||||
func posAddCol(p Pos, n int) Pos {
|
||||
p.col += uint16(n)
|
||||
p.offs += uint32(n)
|
||||
return p
|
||||
}
|
||||
|
||||
func posMax(p1, p2 Pos) Pos {
|
||||
if p2 > p1 {
|
||||
if p2.After(p1) {
|
||||
return p2
|
||||
}
|
||||
return p1
|
||||
@@ -106,12 +111,13 @@ type Comment struct {
|
||||
}
|
||||
|
||||
func (c *Comment) Pos() Pos { return c.Hash }
|
||||
func (c *Comment) End() Pos { return c.Hash + Pos(len(c.Text)) }
|
||||
func (c *Comment) End() Pos { return posAddCol(c.Hash, len(c.Text)) }
|
||||
|
||||
// Stmt represents a statement, otherwise known as a compound command.
|
||||
// It is compromised of a command and other components that may come
|
||||
// before or after it.
|
||||
type Stmt struct {
|
||||
Comments []Comment
|
||||
Cmd Command
|
||||
Position Pos
|
||||
Semicolon Pos
|
||||
@@ -126,11 +132,11 @@ type Stmt struct {
|
||||
func (s *Stmt) Pos() Pos { return s.Position }
|
||||
func (s *Stmt) End() Pos {
|
||||
if s.Semicolon.IsValid() {
|
||||
return s.Semicolon + 1
|
||||
return posAddCol(s.Semicolon, 1)
|
||||
}
|
||||
end := s.Position
|
||||
if s.Negated {
|
||||
end++
|
||||
end = posAddCol(end, 1)
|
||||
}
|
||||
if s.Cmd != nil {
|
||||
end = s.Cmd.End()
|
||||
@@ -170,6 +176,7 @@ func (*DeclClause) commandNode() {}
|
||||
func (*LetClause) commandNode() {}
|
||||
func (*TimeClause) commandNode() {}
|
||||
func (*CoprocClause) commandNode() {}
|
||||
func (*SelectClause) commandNode() {}
|
||||
|
||||
// Assign represents an assignment to a variable.
|
||||
//
|
||||
@@ -177,6 +184,10 @@ func (*CoprocClause) commandNode() {}
|
||||
// an associative array. In the former, it's just an arithmetic
|
||||
// expression. In the latter, it will be a word with a single DblQuoted
|
||||
// part.
|
||||
//
|
||||
// If Naked is true, it's part of a DeclClause and doesn't contain a
|
||||
// value. In that context, if the name wasn't a literal, it will be in
|
||||
// Value instead of Name.
|
||||
type Assign struct {
|
||||
Append bool // +=
|
||||
Naked bool // without '='
|
||||
@@ -186,7 +197,13 @@ type Assign struct {
|
||||
Array *ArrayExpr // =(arr)
|
||||
}
|
||||
|
||||
func (a *Assign) Pos() Pos { return a.Name.Pos() }
|
||||
func (a *Assign) Pos() Pos {
|
||||
if a.Name == nil {
|
||||
return a.Value.Pos()
|
||||
}
|
||||
return a.Name.Pos()
|
||||
}
|
||||
|
||||
func (a *Assign) End() Pos {
|
||||
if a.Value != nil {
|
||||
return a.Value.End()
|
||||
@@ -195,12 +212,12 @@ func (a *Assign) End() Pos {
|
||||
return a.Array.End()
|
||||
}
|
||||
if a.Index != nil {
|
||||
return a.Index.End() + 2
|
||||
return posAddCol(a.Index.End(), 2)
|
||||
}
|
||||
if a.Naked {
|
||||
return a.Name.End()
|
||||
}
|
||||
return a.Name.End() + 1
|
||||
return posAddCol(a.Name.End(), 1)
|
||||
}
|
||||
|
||||
// Redirect represents an input/output redirection.
|
||||
@@ -232,61 +249,73 @@ func (c *CallExpr) End() Pos { return c.Args[len(c.Args)-1].End() }
|
||||
// nested shell environment.
|
||||
type Subshell struct {
|
||||
Lparen, Rparen Pos
|
||||
Stmts []*Stmt
|
||||
StmtList
|
||||
}
|
||||
|
||||
func (s *Subshell) Pos() Pos { return s.Lparen }
|
||||
func (s *Subshell) End() Pos { return s.Rparen + 1 }
|
||||
func (s *Subshell) End() Pos { return posAddCol(s.Rparen, 1) }
|
||||
|
||||
// Block represents a series of commands that should be executed in a
|
||||
// nested scope.
|
||||
type Block struct {
|
||||
Lbrace, Rbrace Pos
|
||||
Stmts []*Stmt
|
||||
StmtList
|
||||
}
|
||||
|
||||
func (b *Block) Pos() Pos { return b.Rbrace }
|
||||
func (b *Block) End() Pos { return b.Rbrace + 1 }
|
||||
func (b *Block) End() Pos { return posAddCol(b.Rbrace, 1) }
|
||||
|
||||
// IfClause represents an if statement.
|
||||
type IfClause struct {
|
||||
If, Then, Else, Fi Pos
|
||||
CondStmts []*Stmt
|
||||
ThenStmts []*Stmt
|
||||
Elifs []*Elif
|
||||
ElseStmts []*Stmt
|
||||
Elif bool
|
||||
IfPos Pos // pos of "elif" if Elif == true
|
||||
ThenPos Pos
|
||||
ElsePos Pos // pos of "elif" if FollowedByElif() == true
|
||||
FiPos Pos // empty if Elif == true
|
||||
Cond StmtList
|
||||
Then StmtList
|
||||
Else StmtList
|
||||
}
|
||||
|
||||
func (c *IfClause) Pos() Pos { return c.If }
|
||||
func (c *IfClause) End() Pos { return c.Fi + 2 }
|
||||
func (c *IfClause) Pos() Pos { return c.IfPos }
|
||||
func (c *IfClause) End() Pos {
|
||||
if !c.FiPos.IsValid() {
|
||||
return posAddCol(c.ElsePos, 4)
|
||||
}
|
||||
return posAddCol(c.FiPos, 2)
|
||||
}
|
||||
|
||||
// Elif represents an "else if" case in an if clause.
|
||||
type Elif struct {
|
||||
Elif, Then Pos
|
||||
CondStmts []*Stmt
|
||||
ThenStmts []*Stmt
|
||||
// FollowedByElif reports whether this IfClause is followed by an "elif"
|
||||
// IfClause in its Else branch. This is true if Else.Stmts has exactly
|
||||
// one statement with an IfClause whose Elif field is true.
|
||||
func (c *IfClause) FollowedByElif() bool {
|
||||
if len(c.Else.Stmts) != 1 {
|
||||
return false
|
||||
}
|
||||
ic, _ := c.Else.Stmts[0].Cmd.(*IfClause)
|
||||
return ic != nil && ic.Elif
|
||||
}
|
||||
|
||||
// WhileClause represents a while or an until clause.
|
||||
type WhileClause struct {
|
||||
While, Do, Done Pos
|
||||
Until bool
|
||||
CondStmts []*Stmt
|
||||
DoStmts []*Stmt
|
||||
WhilePos, DoPos, DonePos Pos
|
||||
Until bool
|
||||
Cond StmtList
|
||||
Do StmtList
|
||||
}
|
||||
|
||||
func (w *WhileClause) Pos() Pos { return w.While }
|
||||
func (w *WhileClause) End() Pos { return w.Done + 4 }
|
||||
func (w *WhileClause) Pos() Pos { return w.WhilePos }
|
||||
func (w *WhileClause) End() Pos { return posAddCol(w.DonePos, 4) }
|
||||
|
||||
// ForClause represents a for clause.
|
||||
type ForClause struct {
|
||||
For, Do, Done Pos
|
||||
Loop Loop
|
||||
DoStmts []*Stmt
|
||||
ForPos, DoPos, DonePos Pos
|
||||
Loop Loop
|
||||
Do StmtList
|
||||
}
|
||||
|
||||
func (f *ForClause) Pos() Pos { return f.For }
|
||||
func (f *ForClause) End() Pos { return f.Done + 4 }
|
||||
func (f *ForClause) Pos() Pos { return f.ForPos }
|
||||
func (f *ForClause) End() Pos { return posAddCol(f.DonePos, 4) }
|
||||
|
||||
// Loop holds either *WordIter or *CStyleLoop.
|
||||
type Loop interface {
|
||||
@@ -310,14 +339,14 @@ func (w *WordIter) End() Pos { return posMax(w.Name.End(), wordLastEnd(w.Items))
|
||||
// CStyleLoop represents the behaviour of a for clause similar to the C
|
||||
// language.
|
||||
//
|
||||
// This node will never appear when in PosixConformant mode.
|
||||
// This node will only appear with LangBash.
|
||||
type CStyleLoop struct {
|
||||
Lparen, Rparen Pos
|
||||
Init, Cond, Post ArithmExpr
|
||||
}
|
||||
|
||||
func (c *CStyleLoop) Pos() Pos { return c.Lparen }
|
||||
func (c *CStyleLoop) End() Pos { return c.Rparen + 2 }
|
||||
func (c *CStyleLoop) End() Pos { return posAddCol(c.Rparen, 2) }
|
||||
|
||||
// BinaryCmd represents a binary expression between two statements.
|
||||
type BinaryCmd struct {
|
||||
@@ -332,7 +361,7 @@ func (b *BinaryCmd) End() Pos { return b.Y.End() }
|
||||
// FuncDecl represents the declaration of a function.
|
||||
type FuncDecl struct {
|
||||
Position Pos
|
||||
RsrvWord bool // non-posix "function " style
|
||||
RsrvWord bool // non-posix "function f()" style
|
||||
Name *Lit
|
||||
Body *Stmt
|
||||
}
|
||||
@@ -379,19 +408,13 @@ func (l *Lit) End() Pos { return l.ValueEnd }
|
||||
|
||||
// SglQuoted represents a string within single quotes.
|
||||
type SglQuoted struct {
|
||||
Position Pos
|
||||
Dollar bool // $''
|
||||
Value string
|
||||
Left, Right Pos
|
||||
Dollar bool // $''
|
||||
Value string
|
||||
}
|
||||
|
||||
func (q *SglQuoted) Pos() Pos { return q.Position }
|
||||
func (q *SglQuoted) End() Pos {
|
||||
end := q.Position + 2 + Pos(len(q.Value))
|
||||
if q.Dollar {
|
||||
end++
|
||||
}
|
||||
return end
|
||||
}
|
||||
func (q *SglQuoted) Pos() Pos { return q.Left }
|
||||
func (q *SglQuoted) End() Pos { return q.Right }
|
||||
|
||||
// DblQuoted represents a list of nodes within double quotes.
|
||||
type DblQuoted struct {
|
||||
@@ -404,24 +427,24 @@ func (q *DblQuoted) Pos() Pos { return q.Position }
|
||||
func (q *DblQuoted) End() Pos {
|
||||
if len(q.Parts) == 0 {
|
||||
if q.Dollar {
|
||||
return q.Position + 3
|
||||
return posAddCol(q.Position, 3)
|
||||
}
|
||||
return q.Position + 2
|
||||
return posAddCol(q.Position, 2)
|
||||
}
|
||||
return q.Parts[len(q.Parts)-1].End() + 1
|
||||
return posAddCol(q.Parts[len(q.Parts)-1].End(), 1)
|
||||
}
|
||||
|
||||
// CmdSubst represents a command substitution.
|
||||
type CmdSubst struct {
|
||||
Left, Right Pos
|
||||
Stmts []*Stmt
|
||||
StmtList
|
||||
|
||||
TempFile bool // mksh's ${ foo;}
|
||||
ReplyVar bool // mksh's ${|foo;}
|
||||
}
|
||||
|
||||
func (c *CmdSubst) Pos() Pos { return c.Left }
|
||||
func (c *CmdSubst) End() Pos { return c.Right + 1 }
|
||||
func (c *CmdSubst) End() Pos { return posAddCol(c.Right, 1) }
|
||||
|
||||
// ParamExp represents a parameter expansion.
|
||||
type ParamExp struct {
|
||||
@@ -440,10 +463,10 @@ type ParamExp struct {
|
||||
func (p *ParamExp) Pos() Pos { return p.Dollar }
|
||||
func (p *ParamExp) End() Pos {
|
||||
if !p.Short {
|
||||
return p.Rbrace + 1
|
||||
return posAddCol(p.Rbrace, 1)
|
||||
}
|
||||
if p.Index != nil {
|
||||
return p.Index.End() + 1
|
||||
return posAddCol(p.Index.End(), 1)
|
||||
}
|
||||
return p.Param.End()
|
||||
}
|
||||
@@ -454,7 +477,7 @@ func (p *ParamExp) nakedIndex() bool {
|
||||
|
||||
// Slice represents character slicing inside a ParamExp.
|
||||
//
|
||||
// This node will never appear when in PosixConformant mode.
|
||||
// This node will only appear in LangBash and LangMirBSDKorn.
|
||||
type Slice struct {
|
||||
Offset, Length ArithmExpr
|
||||
}
|
||||
@@ -483,14 +506,14 @@ type ArithmExp struct {
|
||||
func (a *ArithmExp) Pos() Pos { return a.Left }
|
||||
func (a *ArithmExp) End() Pos {
|
||||
if a.Bracket {
|
||||
return a.Right + 1
|
||||
return posAddCol(a.Right, 1)
|
||||
}
|
||||
return a.Right + 2
|
||||
return posAddCol(a.Right, 2)
|
||||
}
|
||||
|
||||
// ArithmCmd represents an arithmetic command.
|
||||
//
|
||||
// This node will never appear when in PosixConformant mode.
|
||||
// This node will only appear in LangBash and LangMirBSDKorn.
|
||||
type ArithmCmd struct {
|
||||
Left, Right Pos
|
||||
Unsigned bool // mksh's ((# expr))
|
||||
@@ -498,7 +521,7 @@ type ArithmCmd struct {
|
||||
}
|
||||
|
||||
func (a *ArithmCmd) Pos() Pos { return a.Left }
|
||||
func (a *ArithmCmd) End() Pos { return a.Right + 2 }
|
||||
func (a *ArithmCmd) End() Pos { return posAddCol(a.Right, 2) }
|
||||
|
||||
// ArithmExpr represents all nodes that form arithmetic expressions.
|
||||
//
|
||||
@@ -552,7 +575,7 @@ func (u *UnaryArithm) Pos() Pos {
|
||||
|
||||
func (u *UnaryArithm) End() Pos {
|
||||
if u.Post {
|
||||
return u.OpPos + 2
|
||||
return posAddCol(u.OpPos, 2)
|
||||
}
|
||||
return u.X.End()
|
||||
}
|
||||
@@ -565,36 +588,41 @@ type ParenArithm struct {
|
||||
}
|
||||
|
||||
func (p *ParenArithm) Pos() Pos { return p.Lparen }
|
||||
func (p *ParenArithm) End() Pos { return p.Rparen + 1 }
|
||||
func (p *ParenArithm) End() Pos { return posAddCol(p.Rparen, 1) }
|
||||
|
||||
// CaseClause represents a case (switch) clause.
|
||||
type CaseClause struct {
|
||||
Case, Esac Pos
|
||||
Word *Word
|
||||
Items []*CaseItem
|
||||
Last []Comment
|
||||
}
|
||||
|
||||
func (c *CaseClause) Pos() Pos { return c.Case }
|
||||
func (c *CaseClause) End() Pos { return c.Esac + 4 }
|
||||
func (c *CaseClause) End() Pos { return posAddCol(c.Esac, 4) }
|
||||
|
||||
// CaseItem represents a pattern list (case) within a CaseClause.
|
||||
type CaseItem struct {
|
||||
Op CaseOperator
|
||||
OpPos Pos
|
||||
Comments []Comment
|
||||
Patterns []*Word
|
||||
Stmts []*Stmt
|
||||
StmtList
|
||||
}
|
||||
|
||||
func (c *CaseItem) Pos() Pos { return c.Patterns[0].Pos() }
|
||||
func (c *CaseItem) End() Pos { return posAddCol(c.OpPos, len(c.Op.String())) }
|
||||
|
||||
// TestClause represents a Bash extended test clause.
|
||||
//
|
||||
// This node will never appear when in PosixConformant mode.
|
||||
// This node will only appear in LangBash and LangMirBSDKorn.
|
||||
type TestClause struct {
|
||||
Left, Right Pos
|
||||
X TestExpr
|
||||
}
|
||||
|
||||
func (t *TestClause) Pos() Pos { return t.Left }
|
||||
func (t *TestClause) End() Pos { return t.Right + 2 }
|
||||
func (t *TestClause) End() Pos { return posAddCol(t.Right, 2) }
|
||||
|
||||
// TestExpr represents all nodes that form arithmetic expressions.
|
||||
//
|
||||
@@ -639,40 +667,45 @@ type ParenTest struct {
|
||||
}
|
||||
|
||||
func (p *ParenTest) Pos() Pos { return p.Lparen }
|
||||
func (p *ParenTest) End() Pos { return p.Rparen + 1 }
|
||||
func (p *ParenTest) End() Pos { return posAddCol(p.Rparen, 1) }
|
||||
|
||||
// DeclClause represents a Bash declare clause.
|
||||
//
|
||||
// This node will never appear when in PosixConformant mode.
|
||||
// This node will only appear with LangBash.
|
||||
type DeclClause struct {
|
||||
Position Pos
|
||||
Variant string // "declare", "local", etc
|
||||
Opts []*Word
|
||||
Assigns []*Assign
|
||||
Variant *Lit // "declare", "local", etc
|
||||
Opts []*Word
|
||||
Assigns []*Assign
|
||||
}
|
||||
|
||||
func (d *DeclClause) Pos() Pos { return d.Position }
|
||||
func (d *DeclClause) Pos() Pos { return d.Variant.Pos() }
|
||||
func (d *DeclClause) End() Pos {
|
||||
if len(d.Assigns) > 0 {
|
||||
return d.Assigns[len(d.Assigns)-1].End()
|
||||
}
|
||||
return wordLastEnd(d.Opts)
|
||||
if len(d.Opts) > 0 {
|
||||
return wordLastEnd(d.Opts)
|
||||
}
|
||||
return d.Variant.End()
|
||||
}
|
||||
|
||||
// ArrayExpr represents a Bash array expression.
|
||||
//
|
||||
// This node will never appear when in PosixConformant mode.
|
||||
// This node will only appear with LangBash.
|
||||
type ArrayExpr struct {
|
||||
Lparen, Rparen Pos
|
||||
Elems []*ArrayElem
|
||||
Last []Comment
|
||||
}
|
||||
|
||||
func (a *ArrayExpr) Pos() Pos { return a.Lparen }
|
||||
func (a *ArrayExpr) End() Pos { return a.Rparen + 1 }
|
||||
func (a *ArrayExpr) End() Pos { return posAddCol(a.Rparen, 1) }
|
||||
|
||||
// ArrayElem represents a Bash array element.
|
||||
type ArrayElem struct {
|
||||
Index ArithmExpr
|
||||
Value *Word
|
||||
Index ArithmExpr // [i]=, ["k"]=
|
||||
Value *Word
|
||||
Comments []Comment
|
||||
}
|
||||
|
||||
func (a *ArrayElem) Pos() Pos {
|
||||
@@ -687,7 +720,7 @@ func (a *ArrayElem) End() Pos { return a.Value.End() }
|
||||
// these are parsed independently of whether shopt has been called or
|
||||
// not.
|
||||
//
|
||||
// This node will never appear when in PosixConformant mode.
|
||||
// This node will only appear in LangBash and LangMirBSDKorn.
|
||||
type ExtGlob struct {
|
||||
OpPos Pos
|
||||
Op GlobOperator
|
||||
@@ -695,23 +728,23 @@ type ExtGlob struct {
|
||||
}
|
||||
|
||||
func (e *ExtGlob) Pos() Pos { return e.OpPos }
|
||||
func (e *ExtGlob) End() Pos { return e.Pattern.End() + 1 }
|
||||
func (e *ExtGlob) End() Pos { return posAddCol(e.Pattern.End(), 1) }
|
||||
|
||||
// ProcSubst represents a Bash process substitution.
|
||||
//
|
||||
// This node will never appear when in PosixConformant mode.
|
||||
// This node will only appear with LangBash.
|
||||
type ProcSubst struct {
|
||||
OpPos, Rparen Pos
|
||||
Op ProcOperator
|
||||
Stmts []*Stmt
|
||||
StmtList
|
||||
}
|
||||
|
||||
func (s *ProcSubst) Pos() Pos { return s.OpPos }
|
||||
func (s *ProcSubst) End() Pos { return s.Rparen + 1 }
|
||||
func (s *ProcSubst) End() Pos { return posAddCol(s.Rparen, 1) }
|
||||
|
||||
// TimeClause represents a Bash time clause.
|
||||
//
|
||||
// This node will never appear when in PosixConformant mode.
|
||||
// This node will only appear in LangBash and LangMirBSDKorn.
|
||||
type TimeClause struct {
|
||||
Time Pos
|
||||
Stmt *Stmt
|
||||
@@ -720,14 +753,14 @@ type TimeClause struct {
|
||||
func (c *TimeClause) Pos() Pos { return c.Time }
|
||||
func (c *TimeClause) End() Pos {
|
||||
if c.Stmt == nil {
|
||||
return c.Time + 4
|
||||
return posAddCol(c.Time, 4)
|
||||
}
|
||||
return c.Stmt.End()
|
||||
}
|
||||
|
||||
// CoprocClause represents a Bash coproc clause.
|
||||
//
|
||||
// This node will never appear when in PosixConformant mode.
|
||||
// This node will only appear with LangBash.
|
||||
type CoprocClause struct {
|
||||
Coproc Pos
|
||||
Name *Lit
|
||||
@@ -739,7 +772,7 @@ func (c *CoprocClause) End() Pos { return c.Stmt.End() }
|
||||
|
||||
// LetClause represents a Bash let clause.
|
||||
//
|
||||
// This node will never appear when in PosixConformant mode.
|
||||
// This node will only appear in LangBash and LangMirBSDKorn.
|
||||
type LetClause struct {
|
||||
Let Pos
|
||||
Exprs []ArithmExpr
|
||||
@@ -748,9 +781,19 @@ type LetClause struct {
|
||||
func (l *LetClause) Pos() Pos { return l.Let }
|
||||
func (l *LetClause) End() Pos { return l.Exprs[len(l.Exprs)-1].End() }
|
||||
|
||||
// SelectClause represents a Bash select clause.
|
||||
type SelectClause struct {
|
||||
SelectPos, DoPos, DonePos Pos
|
||||
Loop WordIter
|
||||
Do StmtList
|
||||
}
|
||||
|
||||
func (f *SelectClause) Pos() Pos { return f.SelectPos }
|
||||
func (f *SelectClause) End() Pos { return posAddCol(f.DonePos, 4) }
|
||||
|
||||
func wordLastEnd(ws []*Word) Pos {
|
||||
if len(ws) == 0 {
|
||||
return 0
|
||||
return Pos{}
|
||||
}
|
||||
return ws[len(ws)-1].End()
|
||||
}
|
||||
|
257
vendor/github.com/mvdan/sh/syntax/parser.go
generated
vendored
257
vendor/github.com/mvdan/sh/syntax/parser.go
generated
vendored
@@ -38,17 +38,11 @@ func NewParser(options ...func(*Parser)) *Parser {
|
||||
// an error is returned.
|
||||
func (p *Parser) Parse(src io.Reader, name string) (*File, error) {
|
||||
p.reset()
|
||||
alloc := &struct {
|
||||
f File
|
||||
l [32]Pos
|
||||
}{}
|
||||
p.f = &alloc.f
|
||||
p.f.Name = name
|
||||
p.f.lines = alloc.l[:1]
|
||||
p.f = &File{Name: name}
|
||||
p.src = src
|
||||
p.rune()
|
||||
p.next()
|
||||
p.f.Stmts = p.stmts()
|
||||
p.f.StmtList = p.stmts()
|
||||
if p.err == nil {
|
||||
// EOF immediately after heredoc word so no newline to
|
||||
// trigger it
|
||||
@@ -60,6 +54,7 @@ func (p *Parser) Parse(src io.Reader, name string) (*File, error) {
|
||||
type Parser struct {
|
||||
src io.Reader
|
||||
bs []byte // current chunk of read bytes
|
||||
bsp int // pos within chunk for the next rune
|
||||
r rune
|
||||
|
||||
f *File
|
||||
@@ -73,9 +68,9 @@ type Parser struct {
|
||||
tok token // current token
|
||||
val string // current value (valid if tok is _Lit*)
|
||||
|
||||
offs int
|
||||
pos Pos // position of tok
|
||||
offs int // chunk offset
|
||||
npos int // pos within chunk for the next rune
|
||||
npos Pos // next position
|
||||
|
||||
quote quoteState // current lexer state
|
||||
asPos int // position of '=' in a literal
|
||||
@@ -90,6 +85,9 @@ type Parser struct {
|
||||
heredocs []*Redirect
|
||||
hdocStop []byte
|
||||
|
||||
accComs []Comment
|
||||
curComs *[]Comment
|
||||
|
||||
helperBuf *bytes.Buffer
|
||||
|
||||
litBatch []Lit
|
||||
@@ -107,14 +105,19 @@ type Parser struct {
|
||||
const bufSize = 1 << 10
|
||||
|
||||
func (p *Parser) reset() {
|
||||
p.bs = nil
|
||||
p.offs, p.npos = 0, 0
|
||||
p.bs, p.bsp = nil, 0
|
||||
p.offs = 0
|
||||
p.npos = Pos{line: 1}
|
||||
p.r, p.err, p.readErr = 0, nil, nil
|
||||
p.quote, p.forbidNested = noState, false
|
||||
p.heredocs, p.buriedHdocs = p.heredocs[:0], 0
|
||||
p.accComs, p.curComs = nil, &p.accComs
|
||||
}
|
||||
|
||||
func (p *Parser) getPos() Pos { return Pos(p.offs + p.npos) }
|
||||
func (p *Parser) getPos() Pos {
|
||||
p.npos.offs = uint32(p.offs + p.bsp - 1)
|
||||
return p.npos
|
||||
}
|
||||
|
||||
func (p *Parser) lit(pos Pos, val string) *Lit {
|
||||
if len(p.litBatch) == 0 {
|
||||
@@ -361,15 +364,15 @@ func (p *Parser) followRsrv(lpos Pos, left, val string) Pos {
|
||||
return pos
|
||||
}
|
||||
|
||||
func (p *Parser) followStmts(left string, lpos Pos, stops ...string) []*Stmt {
|
||||
func (p *Parser) followStmts(left string, lpos Pos, stops ...string) StmtList {
|
||||
if p.gotSameLine(semicolon) {
|
||||
return nil
|
||||
return StmtList{}
|
||||
}
|
||||
sts := p.stmts(stops...)
|
||||
if len(sts) < 1 && !p.newLine {
|
||||
sl := p.stmts(stops...)
|
||||
if len(sl.Stmts) < 1 && !p.newLine {
|
||||
p.followErr(lpos, left, "a statement list")
|
||||
}
|
||||
return sts
|
||||
return sl
|
||||
}
|
||||
|
||||
func (p *Parser) followWordTok(tok token, pos Pos) *Word {
|
||||
@@ -417,7 +420,7 @@ func (p *Parser) matched(lpos Pos, left, right token) Pos {
|
||||
func (p *Parser) errPass(err error) {
|
||||
if p.err == nil {
|
||||
p.err = err
|
||||
p.npos = len(p.bs) + 1
|
||||
p.bsp = len(p.bs) + 1
|
||||
p.r = utf8.RuneSelf
|
||||
p.tok = _EOF
|
||||
}
|
||||
@@ -425,17 +428,22 @@ func (p *Parser) errPass(err error) {
|
||||
|
||||
// ParseError represents an error found when parsing a source file.
|
||||
type ParseError struct {
|
||||
Position
|
||||
Filename string
|
||||
Pos
|
||||
Text string
|
||||
}
|
||||
|
||||
func (e *ParseError) Error() string {
|
||||
return fmt.Sprintf("%s: %s", e.Position.String(), e.Text)
|
||||
if e.Filename == "" {
|
||||
return fmt.Sprintf("%s: %s", e.Pos.String(), e.Text)
|
||||
}
|
||||
return fmt.Sprintf("%s:%s: %s", e.Filename, e.Pos.String(), e.Text)
|
||||
}
|
||||
|
||||
func (p *Parser) posErr(pos Pos, format string, a ...interface{}) {
|
||||
p.errPass(&ParseError{
|
||||
Position: p.f.Position(pos),
|
||||
Filename: p.f.Name,
|
||||
Pos: pos,
|
||||
Text: fmt.Sprintf(format, a...),
|
||||
})
|
||||
}
|
||||
@@ -444,27 +452,28 @@ func (p *Parser) curErr(format string, a ...interface{}) {
|
||||
p.posErr(p.pos, format, a...)
|
||||
}
|
||||
|
||||
func (p *Parser) stmts(stops ...string) (sts []*Stmt) {
|
||||
func (p *Parser) stmts(stops ...string) (sl StmtList) {
|
||||
gotEnd := true
|
||||
loop:
|
||||
for p.tok != _EOF {
|
||||
switch p.tok {
|
||||
case _LitWord:
|
||||
for _, stop := range stops {
|
||||
if p.val == stop {
|
||||
return
|
||||
break loop
|
||||
}
|
||||
}
|
||||
case rightParen:
|
||||
if p.quote == subCmd {
|
||||
return
|
||||
break loop
|
||||
}
|
||||
case bckQuote:
|
||||
if p.quote == subCmdBckquo {
|
||||
return
|
||||
break loop
|
||||
}
|
||||
case dblSemicolon, semiAnd, dblSemiAnd, semiOr:
|
||||
if p.quote == switchCase {
|
||||
return
|
||||
break loop
|
||||
}
|
||||
p.curErr("%s can only be used in a case clause", p.tok)
|
||||
}
|
||||
@@ -477,13 +486,14 @@ func (p *Parser) stmts(stops ...string) (sts []*Stmt) {
|
||||
if s, end := p.getStmt(true, false); s == nil {
|
||||
p.invalidStmtStart()
|
||||
} else {
|
||||
if sts == nil {
|
||||
sts = p.stList()
|
||||
if sl.Stmts == nil {
|
||||
sl.Stmts = p.stList()
|
||||
}
|
||||
sts = append(sts, s)
|
||||
sl.Stmts = append(sl.Stmts, s)
|
||||
gotEnd = end
|
||||
}
|
||||
}
|
||||
sl.Last, p.accComs = p.accComs, nil
|
||||
return
|
||||
}
|
||||
|
||||
@@ -564,7 +574,7 @@ func (p *Parser) wordPart() WordPart {
|
||||
old := p.preNested(subCmd)
|
||||
p.rune() // don't tokenize '|'
|
||||
p.next()
|
||||
cs.Stmts = p.stmts("}")
|
||||
cs.StmtList = p.stmts("}")
|
||||
p.postNested(old)
|
||||
cs.Right = p.pos
|
||||
if !p.gotRsrv("}") {
|
||||
@@ -608,7 +618,7 @@ func (p *Parser) wordPart() WordPart {
|
||||
cs := &CmdSubst{Left: p.pos}
|
||||
old := p.preNested(subCmd)
|
||||
p.next()
|
||||
cs.Stmts = p.stmts()
|
||||
cs.StmtList = p.stmts()
|
||||
p.postNested(old)
|
||||
cs.Right = p.matched(cs.Left, leftParen, rightParen)
|
||||
return cs
|
||||
@@ -620,7 +630,7 @@ func (p *Parser) wordPart() WordPart {
|
||||
ps := &ProcSubst{Op: ProcOperator(p.tok), OpPos: p.pos}
|
||||
old := p.preNested(subCmd)
|
||||
p.next()
|
||||
ps.Stmts = p.stmts()
|
||||
ps.StmtList = p.stmts()
|
||||
p.postNested(old)
|
||||
ps.Rparen = p.matched(ps.OpPos, token(ps.Op), rightParen)
|
||||
return ps
|
||||
@@ -628,12 +638,13 @@ func (p *Parser) wordPart() WordPart {
|
||||
if p.quote&allArithmExpr != 0 {
|
||||
p.curErr("quotes should not be used in arithmetic expressions")
|
||||
}
|
||||
sq := &SglQuoted{Position: p.pos}
|
||||
sq := &SglQuoted{Left: p.pos}
|
||||
r := p.r
|
||||
loop:
|
||||
for p.newLit(r); ; r = p.rune() {
|
||||
switch r {
|
||||
case utf8.RuneSelf, '\'':
|
||||
sq.Right = p.getPos()
|
||||
sq.Value = p.endLit()
|
||||
p.rune()
|
||||
break loop
|
||||
@@ -648,7 +659,7 @@ func (p *Parser) wordPart() WordPart {
|
||||
if p.quote&allArithmExpr != 0 {
|
||||
p.curErr("quotes should not be used in arithmetic expressions")
|
||||
}
|
||||
sq := &SglQuoted{Position: p.pos, Dollar: true}
|
||||
sq := &SglQuoted{Left: p.pos, Dollar: true}
|
||||
old := p.quote
|
||||
p.quote = sglQuotes
|
||||
p.next()
|
||||
@@ -657,6 +668,7 @@ func (p *Parser) wordPart() WordPart {
|
||||
sq.Value = p.val
|
||||
p.next()
|
||||
}
|
||||
sq.Right = p.pos
|
||||
if !p.got(sglQuote) {
|
||||
p.quoteErr(sq.Pos(), sglQuote)
|
||||
}
|
||||
@@ -679,7 +691,7 @@ func (p *Parser) wordPart() WordPart {
|
||||
cs := &CmdSubst{Left: p.pos}
|
||||
old := p.preNested(subCmdBckquo)
|
||||
p.next()
|
||||
cs.Stmts = p.stmts()
|
||||
cs.StmtList = p.stmts()
|
||||
p.postNested(old)
|
||||
cs.Right = p.pos
|
||||
if !p.got(bckQuote) {
|
||||
@@ -706,7 +718,7 @@ func (p *Parser) wordPart() WordPart {
|
||||
}
|
||||
}
|
||||
}
|
||||
eg.Pattern = p.lit(eg.OpPos+2, p.endLit())
|
||||
eg.Pattern = p.lit(posAddCol(eg.OpPos, 2), p.endLit())
|
||||
p.rune()
|
||||
p.next()
|
||||
if lparens != -1 {
|
||||
@@ -941,7 +953,7 @@ func (p *Parser) arithmExprBase(compact bool) ArithmExpr {
|
||||
|
||||
func (p *Parser) shortParamExp() *ParamExp {
|
||||
pe := &ParamExp{Dollar: p.pos, Short: true}
|
||||
p.pos++
|
||||
p.pos = posAddCol(p.pos, 1)
|
||||
switch p.r {
|
||||
case '@', '*', '#', '$', '?', '!', '0', '-':
|
||||
p.tok, p.val = _LitWord, string(p.r)
|
||||
@@ -1152,10 +1164,10 @@ func (p *Parser) getAssign(needEqual bool) *Assign {
|
||||
}
|
||||
as.Name = p.lit(p.pos, p.val[:nameEnd])
|
||||
// since we're not using the entire p.val
|
||||
as.Name.ValueEnd = as.Name.ValuePos + Pos(nameEnd)
|
||||
left := p.lit(p.pos+1, p.val[p.asPos+1:])
|
||||
as.Name.ValueEnd = posAddCol(as.Name.ValuePos, nameEnd)
|
||||
left := p.lit(posAddCol(p.pos, 1), p.val[p.asPos+1:])
|
||||
if left.Value != "" {
|
||||
left.ValuePos += Pos(p.asPos)
|
||||
left.ValuePos = posAddCol(left.ValuePos, p.asPos)
|
||||
as.Value = p.word(p.wps(left))
|
||||
}
|
||||
p.next()
|
||||
@@ -1163,7 +1175,7 @@ func (p *Parser) getAssign(needEqual bool) *Assign {
|
||||
as.Name = p.lit(p.pos, p.val)
|
||||
// hasValidIdent already checks p.r is '['
|
||||
p.rune()
|
||||
left := p.pos + 1
|
||||
left := posAddCol(p.pos, 1)
|
||||
old := p.preNested(arithmExprBrack)
|
||||
p.next()
|
||||
if p.tok == star {
|
||||
@@ -1182,7 +1194,7 @@ func (p *Parser) getAssign(needEqual bool) *Assign {
|
||||
if len(p.val) > 0 && p.val[0] == '+' {
|
||||
as.Append = true
|
||||
p.val = p.val[1:]
|
||||
p.pos++
|
||||
p.pos = posAddCol(p.pos, 1)
|
||||
}
|
||||
if len(p.val) < 1 || p.val[0] != '=' {
|
||||
if as.Append {
|
||||
@@ -1192,7 +1204,7 @@ func (p *Parser) getAssign(needEqual bool) *Assign {
|
||||
}
|
||||
return nil
|
||||
}
|
||||
p.pos++
|
||||
p.pos = posAddCol(p.pos, 1)
|
||||
p.val = p.val[1:]
|
||||
if p.val == "" {
|
||||
p.next()
|
||||
@@ -1214,6 +1226,7 @@ func (p *Parser) getAssign(needEqual bool) *Assign {
|
||||
p.next()
|
||||
for p.tok != _EOF && p.tok != rightParen {
|
||||
ae := &ArrayElem{}
|
||||
ae.Comments, p.accComs = p.accComs, nil
|
||||
if p.tok == leftBrack {
|
||||
left := p.pos
|
||||
p.quote = arithmExprBrack
|
||||
@@ -1235,9 +1248,18 @@ func (p *Parser) getAssign(needEqual bool) *Assign {
|
||||
}
|
||||
if ae.Value = p.getWord(); ae.Value == nil {
|
||||
p.curErr("array element values must be words")
|
||||
break
|
||||
}
|
||||
if len(p.accComs) > 0 {
|
||||
c := p.accComs[0]
|
||||
if c.Pos().Line() == ae.End().Line() {
|
||||
ae.Comments = append(ae.Comments, c)
|
||||
p.accComs = p.accComs[1:]
|
||||
}
|
||||
}
|
||||
as.Array.Elems = append(as.Array.Elems, ae)
|
||||
}
|
||||
as.Array.Last, p.accComs = p.accComs, nil
|
||||
p.postNested(old)
|
||||
as.Array.Rparen = p.matched(as.Array.Lparen, leftParen, rightParen)
|
||||
} else if w := p.getWord(); w != nil {
|
||||
@@ -1285,13 +1307,14 @@ func (p *Parser) doRedirect(s *Stmt) {
|
||||
|
||||
func (p *Parser) getStmt(readEnd, binCmd bool) (s *Stmt, gotEnd bool) {
|
||||
s = p.stmt(p.pos)
|
||||
s.Comments, p.accComs = p.accComs, nil
|
||||
if p.gotRsrv("!") {
|
||||
s.Negated = true
|
||||
if p.newLine || stopToken(p.tok) {
|
||||
p.posErr(s.Pos(), `! cannot form a statement alone`)
|
||||
}
|
||||
}
|
||||
if s = p.gotStmtPipe(s); s == nil {
|
||||
if s = p.gotStmtPipe(s); s == nil || p.err != nil {
|
||||
return
|
||||
}
|
||||
switch p.tok {
|
||||
@@ -1309,11 +1332,13 @@ func (p *Parser) getStmt(readEnd, binCmd bool) (s *Stmt, gotEnd bool) {
|
||||
X: s,
|
||||
}
|
||||
p.next()
|
||||
if b.Y, _ = p.getStmt(false, true); b.Y == nil {
|
||||
if b.Y, _ = p.getStmt(false, true); b.Y == nil || p.err != nil {
|
||||
p.followErr(b.OpPos, b.Op.String(), "a statement")
|
||||
return
|
||||
}
|
||||
s = p.stmt(s.Position)
|
||||
s.Cmd = b
|
||||
s.Comments, b.X.Comments = b.X.Comments, nil
|
||||
}
|
||||
if p.tok != semicolon {
|
||||
break
|
||||
@@ -1332,6 +1357,13 @@ func (p *Parser) getStmt(readEnd, binCmd bool) (s *Stmt, gotEnd bool) {
|
||||
s.Coprocess = true
|
||||
}
|
||||
gotEnd = s.Semicolon.IsValid() || s.Background || s.Coprocess
|
||||
if len(p.accComs) > 0 && !binCmd {
|
||||
c := p.accComs[0]
|
||||
if c.Pos().Line() == s.End().Line() {
|
||||
s.Comments = append(s.Comments, c)
|
||||
p.accComs = p.accComs[1:]
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1415,6 +1447,10 @@ preLoop:
|
||||
if p.lang == LangBash {
|
||||
s.Cmd = p.coprocClause()
|
||||
}
|
||||
case "select":
|
||||
if p.lang != LangPOSIX {
|
||||
s.Cmd = p.selectClause()
|
||||
}
|
||||
}
|
||||
if s.Cmd != nil {
|
||||
break
|
||||
@@ -1463,11 +1499,25 @@ preLoop:
|
||||
case or:
|
||||
b := &BinaryCmd{OpPos: p.pos, Op: BinCmdOperator(p.tok), X: s}
|
||||
p.next()
|
||||
if b.Y = p.gotStmtPipe(p.stmt(p.pos)); b.Y == nil {
|
||||
if b.Y = p.gotStmtPipe(p.stmt(p.pos)); b.Y == nil || p.err != nil {
|
||||
p.followErr(b.OpPos, b.Op.String(), "a statement")
|
||||
break
|
||||
}
|
||||
s = p.stmt(s.Position)
|
||||
s.Cmd = b
|
||||
s.Comments, b.X.Comments = b.X.Comments, nil
|
||||
move := 0
|
||||
for _, c := range p.accComs {
|
||||
// inline comment belongs in the parent
|
||||
if c.Hash.Line() >= b.Y.End().Line() {
|
||||
break
|
||||
}
|
||||
move++
|
||||
}
|
||||
if move > 0 {
|
||||
b.Y.Comments = p.accComs[:move]
|
||||
p.accComs = p.accComs[move:]
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
@@ -1476,7 +1526,7 @@ func (p *Parser) subshell() *Subshell {
|
||||
s := &Subshell{Lparen: p.pos}
|
||||
old := p.preNested(subCmd)
|
||||
p.next()
|
||||
s.Stmts = p.stmts()
|
||||
s.StmtList = p.stmts()
|
||||
p.postNested(old)
|
||||
s.Rparen = p.matched(s.Lparen, leftParen, rightParen)
|
||||
return s
|
||||
@@ -1500,7 +1550,7 @@ func (p *Parser) arithmExpCmd() Command {
|
||||
func (p *Parser) block() *Block {
|
||||
b := &Block{Lbrace: p.pos}
|
||||
p.next()
|
||||
b.Stmts = p.stmts("}")
|
||||
b.StmtList = p.stmts("}")
|
||||
b.Rbrace = p.pos
|
||||
if !p.gotRsrv("}") {
|
||||
p.matchingErr(b.Lbrace, "{", "}")
|
||||
@@ -1509,29 +1559,35 @@ func (p *Parser) block() *Block {
|
||||
}
|
||||
|
||||
func (p *Parser) ifClause() *IfClause {
|
||||
ic := &IfClause{If: p.pos}
|
||||
rif := &IfClause{IfPos: p.pos}
|
||||
p.next()
|
||||
ic.CondStmts = p.followStmts("if", ic.If, "then")
|
||||
ic.Then = p.followRsrv(ic.If, "if <cond>", "then")
|
||||
ic.ThenStmts = p.followStmts("then", ic.Then, "fi", "elif", "else")
|
||||
rif.Cond = p.followStmts("if", rif.IfPos, "then")
|
||||
rif.ThenPos = p.followRsrv(rif.IfPos, "if <cond>", "then")
|
||||
rif.Then = p.followStmts("then", rif.ThenPos, "fi", "elif", "else")
|
||||
curIf := rif
|
||||
for p.tok == _LitWord && p.val == "elif" {
|
||||
elf := &Elif{Elif: p.pos}
|
||||
elf := &IfClause{IfPos: p.pos, Elif: true}
|
||||
p.next()
|
||||
elf.CondStmts = p.followStmts("elif", elf.Elif, "then")
|
||||
elf.Then = p.followRsrv(elf.Elif, "elif <cond>", "then")
|
||||
elf.ThenStmts = p.followStmts("then", elf.Then, "fi", "elif", "else")
|
||||
ic.Elifs = append(ic.Elifs, elf)
|
||||
elf.Cond = p.followStmts("elif", elf.IfPos, "then")
|
||||
elf.ThenPos = p.followRsrv(elf.IfPos, "elif <cond>", "then")
|
||||
elf.Then = p.followStmts("then", elf.ThenPos, "fi", "elif", "else")
|
||||
s := p.stmt(elf.IfPos)
|
||||
s.Cmd = elf
|
||||
curIf.ElsePos = elf.IfPos
|
||||
curIf.Else.Stmts = []*Stmt{s}
|
||||
curIf = elf
|
||||
}
|
||||
if elsePos := p.pos; p.gotRsrv("else") {
|
||||
ic.Else = elsePos
|
||||
ic.ElseStmts = p.followStmts("else", ic.Else, "fi")
|
||||
curIf.ElsePos = elsePos
|
||||
curIf.Else = p.followStmts("else", curIf.ElsePos, "fi")
|
||||
}
|
||||
ic.Fi = p.stmtEnd(ic, "if", "fi")
|
||||
return ic
|
||||
rif.FiPos = p.stmtEnd(rif, "if", "fi")
|
||||
curIf.FiPos = rif.FiPos
|
||||
return rif
|
||||
}
|
||||
|
||||
func (p *Parser) whileClause(until bool) *WhileClause {
|
||||
wc := &WhileClause{While: p.pos, Until: until}
|
||||
wc := &WhileClause{WhilePos: p.pos, Until: until}
|
||||
rsrv := "while"
|
||||
rsrvCond := "while <cond>"
|
||||
if wc.Until {
|
||||
@@ -1539,24 +1595,24 @@ func (p *Parser) whileClause(until bool) *WhileClause {
|
||||
rsrvCond = "until <cond>"
|
||||
}
|
||||
p.next()
|
||||
wc.CondStmts = p.followStmts(rsrv, wc.While, "do")
|
||||
wc.Do = p.followRsrv(wc.While, rsrvCond, "do")
|
||||
wc.DoStmts = p.followStmts("do", wc.Do, "done")
|
||||
wc.Done = p.stmtEnd(wc, rsrv, "done")
|
||||
wc.Cond = p.followStmts(rsrv, wc.WhilePos, "do")
|
||||
wc.DoPos = p.followRsrv(wc.WhilePos, rsrvCond, "do")
|
||||
wc.Do = p.followStmts("do", wc.DoPos, "done")
|
||||
wc.DonePos = p.stmtEnd(wc, rsrv, "done")
|
||||
return wc
|
||||
}
|
||||
|
||||
func (p *Parser) forClause() *ForClause {
|
||||
fc := &ForClause{For: p.pos}
|
||||
fc := &ForClause{ForPos: p.pos}
|
||||
p.next()
|
||||
fc.Loop = p.loop(fc.For)
|
||||
fc.Do = p.followRsrv(fc.For, "for foo [in words]", "do")
|
||||
fc.DoStmts = p.followStmts("do", fc.Do, "done")
|
||||
fc.Done = p.stmtEnd(fc, "for", "done")
|
||||
fc.Loop = p.loop(fc.ForPos)
|
||||
fc.DoPos = p.followRsrv(fc.ForPos, "for foo [in words]", "do")
|
||||
fc.Do = p.followStmts("do", fc.DoPos, "done")
|
||||
fc.DonePos = p.stmtEnd(fc, "for", "done")
|
||||
return fc
|
||||
}
|
||||
|
||||
func (p *Parser) loop(forPos Pos) Loop {
|
||||
func (p *Parser) loop(fpos Pos) Loop {
|
||||
if p.lang != LangBash {
|
||||
switch p.tok {
|
||||
case leftParen, dblLeftParen:
|
||||
@@ -1580,9 +1636,14 @@ func (p *Parser) loop(forPos Pos) Loop {
|
||||
p.gotSameLine(semicolon)
|
||||
return cl
|
||||
}
|
||||
wi := &WordIter{}
|
||||
wi := p.wordIter("for", fpos)
|
||||
return &wi
|
||||
}
|
||||
|
||||
func (p *Parser) wordIter(ftok string, fpos Pos) WordIter {
|
||||
wi := WordIter{}
|
||||
if wi.Name = p.getLit(); wi.Name == nil {
|
||||
p.followErr(forPos, "for", "a literal")
|
||||
p.followErr(fpos, ftok, "a literal")
|
||||
}
|
||||
if p.gotRsrv("in") {
|
||||
for !p.newLine && p.tok != _EOF && p.tok != semicolon {
|
||||
@@ -1594,32 +1655,44 @@ func (p *Parser) loop(forPos Pos) Loop {
|
||||
}
|
||||
p.gotSameLine(semicolon)
|
||||
} else if !p.newLine && !p.got(semicolon) {
|
||||
p.followErr(forPos, "for foo", `"in", ; or a newline`)
|
||||
p.followErr(fpos, ftok+" foo", `"in", ; or a newline`)
|
||||
}
|
||||
return wi
|
||||
}
|
||||
|
||||
func (p *Parser) selectClause() *SelectClause {
|
||||
fc := &SelectClause{SelectPos: p.pos}
|
||||
p.next()
|
||||
fc.Loop = p.wordIter("select", fc.SelectPos)
|
||||
fc.DoPos = p.followRsrv(fc.SelectPos, "select foo [in words]", "do")
|
||||
fc.Do = p.followStmts("do", fc.DoPos, "done")
|
||||
fc.DonePos = p.stmtEnd(fc, "select", "done")
|
||||
return fc
|
||||
}
|
||||
|
||||
func (p *Parser) caseClause() *CaseClause {
|
||||
cc := &CaseClause{Case: p.pos}
|
||||
p.next()
|
||||
cc.Word = p.followWord("case", cc.Case)
|
||||
end := "esac"
|
||||
if p.gotRsrv("{") {
|
||||
if p.lang != LangMirBSDKorn {
|
||||
p.posErr(cc.Pos(), `"case i {" is a mksh feature`)
|
||||
}
|
||||
cc.Items = p.caseItems("}")
|
||||
cc.Esac = p.stmtEnd(cc, "case", "}")
|
||||
end = "}"
|
||||
} else {
|
||||
p.followRsrv(cc.Case, "case x", "in")
|
||||
cc.Items = p.caseItems("esac")
|
||||
cc.Esac = p.stmtEnd(cc, "case", "esac")
|
||||
}
|
||||
cc.Items = p.caseItems(end)
|
||||
cc.Last, p.accComs = p.accComs, nil
|
||||
cc.Esac = p.stmtEnd(cc, "case", end)
|
||||
return cc
|
||||
}
|
||||
|
||||
func (p *Parser) caseItems(stop string) (items []*CaseItem) {
|
||||
for p.tok != _EOF && !(p.tok == _LitWord && p.val == stop) {
|
||||
ci := &CaseItem{}
|
||||
ci.Comments, p.accComs = p.accComs, nil
|
||||
p.got(leftParen)
|
||||
for p.tok != _EOF {
|
||||
if w := p.getWord(); w == nil {
|
||||
@@ -1636,7 +1709,7 @@ func (p *Parser) caseItems(stop string) (items []*CaseItem) {
|
||||
}
|
||||
old := p.preNested(switchCase)
|
||||
p.next()
|
||||
ci.Stmts = p.stmts(stop)
|
||||
ci.StmtList = p.stmts(stop)
|
||||
p.postNested(old)
|
||||
ci.OpPos = p.pos
|
||||
switch p.tok {
|
||||
@@ -1646,8 +1719,17 @@ func (p *Parser) caseItems(stop string) (items []*CaseItem) {
|
||||
items = append(items, ci)
|
||||
return
|
||||
}
|
||||
ci.Last = append(ci.Last, p.accComs...)
|
||||
p.accComs = nil
|
||||
ci.Op = CaseOperator(p.tok)
|
||||
p.next()
|
||||
if len(p.accComs) > 0 {
|
||||
c := p.accComs[0]
|
||||
if c.Pos().Line() == ci.OpPos.Line() {
|
||||
ci.Comments = append(ci.Comments, c)
|
||||
p.accComs = p.accComs[1:]
|
||||
}
|
||||
}
|
||||
items = append(items, ci)
|
||||
}
|
||||
return
|
||||
@@ -1726,7 +1808,7 @@ func (p *Parser) testExpr(ftok token, fpos Pos, pastAndOr bool) TestExpr {
|
||||
|
||||
func (p *Parser) testExprBase(ftok token, fpos Pos) TestExpr {
|
||||
switch p.tok {
|
||||
case _EOF:
|
||||
case _EOF, rightParen:
|
||||
return nil
|
||||
case _LitWord:
|
||||
op := token(testUnaryOp(p.val))
|
||||
@@ -1762,8 +1844,6 @@ func (p *Parser) testExprBase(ftok token, fpos Pos) TestExpr {
|
||||
}
|
||||
pe.Rparen = p.matched(pe.Lparen, leftParen, rightParen)
|
||||
return pe
|
||||
case rightParen:
|
||||
return nil
|
||||
default:
|
||||
// since we don't have [[ as a token
|
||||
fstr := "[["
|
||||
@@ -1775,7 +1855,7 @@ func (p *Parser) testExprBase(ftok token, fpos Pos) TestExpr {
|
||||
}
|
||||
|
||||
func (p *Parser) declClause() *DeclClause {
|
||||
ds := &DeclClause{Position: p.pos, Variant: p.val}
|
||||
ds := &DeclClause{Variant: p.lit(p.pos, p.val)}
|
||||
p.next()
|
||||
for (p.tok == _LitWord || p.tok == _Lit) && p.val[0] == '-' {
|
||||
ds.Opts = append(ds.Opts, p.getWord())
|
||||
@@ -1788,8 +1868,13 @@ func (p *Parser) declClause() *DeclClause {
|
||||
Naked: true,
|
||||
Name: p.getLit(),
|
||||
})
|
||||
} else if w := p.getWord(); w != nil {
|
||||
ds.Assigns = append(ds.Assigns, &Assign{
|
||||
Naked: true,
|
||||
Value: w,
|
||||
})
|
||||
} else {
|
||||
p.followErr(p.pos, ds.Variant, "names or assignments")
|
||||
p.followErr(p.pos, ds.Variant.Value, "names or assignments")
|
||||
}
|
||||
}
|
||||
return ds
|
||||
|
395
vendor/github.com/mvdan/sh/syntax/printer.go
generated
vendored
395
vendor/github.com/mvdan/sh/syntax/printer.go
generated
vendored
@@ -29,11 +29,9 @@ func NewPrinter(options ...func(*Printer)) *Printer {
|
||||
// Print "pretty-prints" the given AST file to the given writer.
|
||||
func (p *Printer) Print(w io.Writer, f *File) error {
|
||||
p.reset()
|
||||
p.lines, p.comments = f.lines, f.Comments
|
||||
p.bufWriter.Reset(w)
|
||||
p.stmts(f.Stmts)
|
||||
p.commentsUpTo(0)
|
||||
p.newline(0)
|
||||
p.stmts(f.StmtList)
|
||||
p.newline(Pos{})
|
||||
return p.bufWriter.Flush()
|
||||
}
|
||||
|
||||
@@ -50,17 +48,14 @@ type Printer struct {
|
||||
indentSpaces int
|
||||
binNextLine bool
|
||||
|
||||
lines []Pos
|
||||
|
||||
wantSpace bool
|
||||
wantNewline bool
|
||||
wroteSemi bool
|
||||
|
||||
commentPadding int
|
||||
|
||||
// nline is the position of the next newline
|
||||
nline Pos
|
||||
nlineIndex int
|
||||
// line is the current line number
|
||||
line uint
|
||||
|
||||
// lastLevel is the last level of indentation that was used.
|
||||
lastLevel int
|
||||
@@ -72,9 +67,6 @@ type Printer struct {
|
||||
|
||||
nestedBinary bool
|
||||
|
||||
// comments is the list of pending comments to write.
|
||||
comments []*Comment
|
||||
|
||||
// pendingHdocs is the list of pending heredocs to write.
|
||||
pendingHdocs []*Redirect
|
||||
|
||||
@@ -86,27 +78,13 @@ type Printer struct {
|
||||
func (p *Printer) reset() {
|
||||
p.wantSpace, p.wantNewline = false, false
|
||||
p.commentPadding = 0
|
||||
p.nline, p.nlineIndex = 0, 0
|
||||
p.line = 0
|
||||
p.lastLevel, p.level = 0, 0
|
||||
p.levelIncs = p.levelIncs[:0]
|
||||
p.nestedBinary = false
|
||||
p.pendingHdocs = p.pendingHdocs[:0]
|
||||
}
|
||||
|
||||
func (p *Printer) incLine() {
|
||||
if p.nlineIndex++; p.nlineIndex >= len(p.lines) {
|
||||
p.nline = maxPos
|
||||
} else {
|
||||
p.nline = p.lines[p.nlineIndex]
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Printer) incLines(pos Pos) {
|
||||
for p.nline < pos {
|
||||
p.incLine()
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Printer) spaces(n int) {
|
||||
for i := 0; i < n; i++ {
|
||||
p.WriteByte(' ')
|
||||
@@ -119,7 +97,8 @@ func (p *Printer) bslashNewl() {
|
||||
}
|
||||
p.WriteString("\\\n")
|
||||
p.wantSpace = false
|
||||
p.incLine()
|
||||
p.line++
|
||||
p.indent()
|
||||
}
|
||||
|
||||
func (p *Printer) spacedString(s string) {
|
||||
@@ -139,7 +118,7 @@ func (p *Printer) semiOrNewl(s string, pos Pos) {
|
||||
p.WriteByte(';')
|
||||
}
|
||||
p.WriteByte(' ')
|
||||
p.incLines(pos)
|
||||
p.line = pos.Line()
|
||||
}
|
||||
p.WriteString(s)
|
||||
p.wantSpace = true
|
||||
@@ -180,45 +159,35 @@ func (p *Printer) indent() {
|
||||
func (p *Printer) newline(pos Pos) {
|
||||
p.wantNewline, p.wantSpace = false, false
|
||||
p.WriteByte('\n')
|
||||
if pos > p.nline {
|
||||
p.incLine()
|
||||
if p.line < pos.Line() {
|
||||
p.line++
|
||||
}
|
||||
hdocs := p.pendingHdocs
|
||||
p.pendingHdocs = p.pendingHdocs[:0]
|
||||
for _, r := range hdocs {
|
||||
if r.Hdoc != nil {
|
||||
p.word(r.Hdoc)
|
||||
p.incLines(r.Hdoc.End())
|
||||
p.line = r.Hdoc.End().Line()
|
||||
}
|
||||
p.unquotedWord(r.Word)
|
||||
p.line++
|
||||
p.WriteByte('\n')
|
||||
p.incLine()
|
||||
p.wantSpace = false
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Printer) newlines(pos Pos) {
|
||||
p.newline(pos)
|
||||
if pos > p.nline {
|
||||
if pos.Line() > p.line {
|
||||
// preserve single empty lines
|
||||
p.WriteByte('\n')
|
||||
p.incLine()
|
||||
p.line++
|
||||
}
|
||||
p.indent()
|
||||
}
|
||||
|
||||
func (p *Printer) commentsAndSeparate(pos Pos) {
|
||||
p.commentsUpTo(pos)
|
||||
if p.wantNewline || pos > p.nline {
|
||||
p.newlines(pos)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Printer) sepTok(s string, pos Pos) {
|
||||
p.level++
|
||||
p.commentsUpTo(pos)
|
||||
p.level--
|
||||
if p.wantNewline || pos > p.nline {
|
||||
if p.wantNewline || pos.Line() > p.line {
|
||||
p.newlines(pos)
|
||||
}
|
||||
p.WriteString(s)
|
||||
@@ -226,10 +195,7 @@ func (p *Printer) sepTok(s string, pos Pos) {
|
||||
}
|
||||
|
||||
func (p *Printer) semiRsrv(s string, pos Pos, fallback bool) {
|
||||
p.level++
|
||||
p.commentsUpTo(pos)
|
||||
p.level--
|
||||
if p.wantNewline || pos > p.nline {
|
||||
if p.wantNewline || pos.Line() > p.line {
|
||||
p.newlines(pos)
|
||||
} else {
|
||||
if fallback && !p.wroteSemi {
|
||||
@@ -243,33 +209,23 @@ func (p *Printer) semiRsrv(s string, pos Pos, fallback bool) {
|
||||
p.wantSpace = true
|
||||
}
|
||||
|
||||
func (p *Printer) anyCommentsBefore(pos Pos) bool {
|
||||
if !pos.IsValid() || len(p.comments) < 1 {
|
||||
return false
|
||||
}
|
||||
return p.comments[0].Hash < pos
|
||||
}
|
||||
|
||||
func (p *Printer) commentsUpTo(pos Pos) {
|
||||
if len(p.comments) < 1 {
|
||||
return
|
||||
}
|
||||
c := p.comments[0]
|
||||
if pos.IsValid() && c.Hash >= pos {
|
||||
return
|
||||
}
|
||||
p.comments = p.comments[1:]
|
||||
func (p *Printer) comment(c Comment) {
|
||||
switch {
|
||||
case p.nlineIndex == 0:
|
||||
case c.Hash > p.nline:
|
||||
case p.line == 0:
|
||||
case c.Hash.Line() > p.line:
|
||||
p.newlines(c.Hash)
|
||||
case p.wantSpace:
|
||||
p.spaces(p.commentPadding + 1)
|
||||
}
|
||||
p.incLines(c.Hash)
|
||||
p.line = c.Hash.Line()
|
||||
p.WriteByte('#')
|
||||
p.WriteString(c.Text)
|
||||
p.commentsUpTo(pos)
|
||||
}
|
||||
|
||||
func (p *Printer) comments(cs []Comment) {
|
||||
for _, c := range cs {
|
||||
p.comment(c)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Printer) wordPart(wp WordPart) {
|
||||
@@ -283,27 +239,27 @@ func (p *Printer) wordPart(wp WordPart) {
|
||||
p.WriteByte('\'')
|
||||
p.WriteString(x.Value)
|
||||
p.WriteByte('\'')
|
||||
p.incLines(x.End())
|
||||
p.line = x.End().Line()
|
||||
case *DblQuoted:
|
||||
p.dblQuoted(x)
|
||||
case *CmdSubst:
|
||||
p.incLines(x.Pos())
|
||||
p.line = x.Pos().Line()
|
||||
switch {
|
||||
case x.TempFile:
|
||||
p.WriteString("${")
|
||||
p.wantSpace = true
|
||||
p.nestedStmts(x.Stmts, x.Right)
|
||||
p.nestedStmts(x.StmtList, x.Right)
|
||||
p.wantSpace = false
|
||||
p.semiRsrv("}", x.Right, true)
|
||||
case x.ReplyVar:
|
||||
p.WriteString("${|")
|
||||
p.nestedStmts(x.Stmts, x.Right)
|
||||
p.nestedStmts(x.StmtList, x.Right)
|
||||
p.wantSpace = false
|
||||
p.semiRsrv("}", x.Right, true)
|
||||
default:
|
||||
p.WriteString("$(")
|
||||
p.wantSpace = len(x.Stmts) > 0 && startsWithLparen(x.Stmts[0])
|
||||
p.nestedStmts(x.Stmts, x.Right)
|
||||
p.nestedStmts(x.StmtList, x.Right)
|
||||
p.sepTok(")", x.Right)
|
||||
}
|
||||
case *ParamExp:
|
||||
@@ -326,7 +282,7 @@ func (p *Printer) wordPart(wp WordPart) {
|
||||
p.wantSpace = false
|
||||
}
|
||||
p.WriteString(x.Op.String())
|
||||
p.nestedStmts(x.Stmts, 0)
|
||||
p.nestedStmts(x.StmtList, Pos{})
|
||||
p.WriteByte(')')
|
||||
}
|
||||
}
|
||||
@@ -339,7 +295,7 @@ func (p *Printer) dblQuoted(dq *DblQuoted) {
|
||||
for i, n := range dq.Parts {
|
||||
p.wordPart(n)
|
||||
if i == len(dq.Parts)-1 {
|
||||
p.incLines(n.End())
|
||||
p.line = n.End().Line()
|
||||
}
|
||||
}
|
||||
p.WriteByte('"')
|
||||
@@ -521,14 +477,12 @@ func (p *Printer) unquotedWord(w *Word) {
|
||||
func (p *Printer) wordJoin(ws []*Word) {
|
||||
anyNewline := false
|
||||
for _, w := range ws {
|
||||
if pos := w.Pos(); pos > p.nline {
|
||||
p.commentsUpTo(pos)
|
||||
p.bslashNewl()
|
||||
if pos := w.Pos(); pos.Line() > p.line {
|
||||
if !anyNewline {
|
||||
p.incLevel()
|
||||
anyNewline = true
|
||||
}
|
||||
p.indent()
|
||||
p.bslashNewl()
|
||||
} else if p.wantSpace {
|
||||
p.WriteByte(' ')
|
||||
p.wantSpace = false
|
||||
@@ -540,30 +494,35 @@ func (p *Printer) wordJoin(ws []*Word) {
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Printer) elemJoin(elems []*ArrayElem) {
|
||||
anyNewline := false
|
||||
func (p *Printer) elemJoin(elems []*ArrayElem, last []Comment) {
|
||||
p.incLevel()
|
||||
for _, el := range elems {
|
||||
if pos := el.Pos(); pos > p.nline {
|
||||
p.commentsUpTo(pos)
|
||||
p.WriteByte('\n')
|
||||
p.incLine()
|
||||
if !anyNewline {
|
||||
p.incLevel()
|
||||
anyNewline = true
|
||||
var left *Comment
|
||||
for _, c := range el.Comments {
|
||||
if c.Pos().After(el.Pos()) {
|
||||
left = &c
|
||||
break
|
||||
}
|
||||
p.comment(c)
|
||||
}
|
||||
if el.Pos().Line() > p.line {
|
||||
p.newline(el.Pos())
|
||||
p.indent()
|
||||
} else if p.wantSpace {
|
||||
p.WriteByte(' ')
|
||||
p.wantSpace = false
|
||||
}
|
||||
if p.wroteIndex(el.Index) {
|
||||
p.WriteByte('=')
|
||||
}
|
||||
p.word(el.Value)
|
||||
if left != nil {
|
||||
p.comment(*left)
|
||||
}
|
||||
}
|
||||
if anyNewline {
|
||||
p.decLevel()
|
||||
if len(last) > 0 {
|
||||
p.comments(last)
|
||||
}
|
||||
p.decLevel()
|
||||
}
|
||||
|
||||
func (p *Printer) stmt(s *Stmt) {
|
||||
@@ -575,17 +534,11 @@ func (p *Printer) stmt(s *Stmt) {
|
||||
if s.Cmd != nil {
|
||||
startRedirs = p.command(s.Cmd, s.Redirs)
|
||||
}
|
||||
anyNewline := false
|
||||
p.incLevel()
|
||||
for _, r := range s.Redirs[startRedirs:] {
|
||||
if r.OpPos > p.nline {
|
||||
if r.OpPos.Line() > p.line {
|
||||
p.bslashNewl()
|
||||
if !anyNewline {
|
||||
p.incLevel()
|
||||
anyNewline = true
|
||||
}
|
||||
p.indent()
|
||||
}
|
||||
p.commentsAndSeparate(r.OpPos)
|
||||
if p.wantSpace {
|
||||
p.WriteByte(' ')
|
||||
}
|
||||
@@ -601,11 +554,8 @@ func (p *Printer) stmt(s *Stmt) {
|
||||
}
|
||||
p.wroteSemi = false
|
||||
switch {
|
||||
case s.Semicolon.IsValid() && s.Semicolon > p.nline:
|
||||
p.incLevel()
|
||||
case s.Semicolon.IsValid() && s.Semicolon.Line() > p.line:
|
||||
p.bslashNewl()
|
||||
p.indent()
|
||||
p.decLevel()
|
||||
p.WriteByte(';')
|
||||
p.wroteSemi = true
|
||||
case s.Background:
|
||||
@@ -613,9 +563,7 @@ func (p *Printer) stmt(s *Stmt) {
|
||||
case s.Coprocess:
|
||||
p.WriteString(" |&")
|
||||
}
|
||||
if anyNewline {
|
||||
p.decLevel()
|
||||
}
|
||||
p.decLevel()
|
||||
}
|
||||
|
||||
func (p *Printer) command(cmd Command, redirs []*Redirect) (startRedirs int) {
|
||||
@@ -631,7 +579,7 @@ func (p *Printer) command(cmd Command, redirs []*Redirect) (startRedirs int) {
|
||||
}
|
||||
p.wordJoin(x.Args[:1])
|
||||
for _, r := range redirs {
|
||||
if r.Pos() > x.Args[1].Pos() || r.Op == Hdoc || r.Op == DashHdoc {
|
||||
if r.Pos().After(x.Args[1].Pos()) || r.Op == Hdoc || r.Op == DashHdoc {
|
||||
break
|
||||
}
|
||||
if p.wantSpace {
|
||||
@@ -649,30 +597,14 @@ func (p *Printer) command(cmd Command, redirs []*Redirect) (startRedirs int) {
|
||||
case *Block:
|
||||
p.WriteByte('{')
|
||||
p.wantSpace = true
|
||||
p.nestedStmts(x.Stmts, x.Rbrace)
|
||||
p.nestedStmts(x.StmtList, x.Rbrace)
|
||||
p.semiRsrv("}", x.Rbrace, true)
|
||||
case *IfClause:
|
||||
p.spacedString("if")
|
||||
p.nestedStmts(x.CondStmts, 0)
|
||||
p.semiOrNewl("then", x.Then)
|
||||
p.nestedStmts(x.ThenStmts, 0)
|
||||
for _, el := range x.Elifs {
|
||||
p.semiRsrv("elif", el.Elif, true)
|
||||
p.nestedStmts(el.CondStmts, 0)
|
||||
p.semiOrNewl("then", el.Then)
|
||||
p.nestedStmts(el.ThenStmts, 0)
|
||||
}
|
||||
if len(x.ElseStmts) > 0 {
|
||||
p.semiRsrv("else", x.Else, true)
|
||||
p.nestedStmts(x.ElseStmts, 0)
|
||||
} else if x.Else.IsValid() {
|
||||
p.incLines(x.Else)
|
||||
}
|
||||
p.semiRsrv("fi", x.Fi, true)
|
||||
p.ifClause(x, false)
|
||||
case *Subshell:
|
||||
p.WriteByte('(')
|
||||
p.wantSpace = len(x.Stmts) > 0 && startsWithLparen(x.Stmts[0])
|
||||
p.nestedStmts(x.Stmts, x.Rparen)
|
||||
p.nestedStmts(x.StmtList, x.Rparen)
|
||||
p.sepTok(")", x.Rparen)
|
||||
case *WhileClause:
|
||||
if x.Until {
|
||||
@@ -680,19 +612,25 @@ func (p *Printer) command(cmd Command, redirs []*Redirect) (startRedirs int) {
|
||||
} else {
|
||||
p.spacedString("while")
|
||||
}
|
||||
p.nestedStmts(x.CondStmts, 0)
|
||||
p.semiOrNewl("do", x.Do)
|
||||
p.nestedStmts(x.DoStmts, 0)
|
||||
p.semiRsrv("done", x.Done, true)
|
||||
p.nestedStmts(x.Cond, Pos{})
|
||||
p.semiOrNewl("do", x.DoPos)
|
||||
p.nestedStmts(x.Do, Pos{})
|
||||
p.semiRsrv("done", x.DonePos, true)
|
||||
case *ForClause:
|
||||
p.WriteString("for ")
|
||||
p.loop(x.Loop)
|
||||
p.semiOrNewl("do", x.Do)
|
||||
p.nestedStmts(x.DoStmts, 0)
|
||||
p.semiRsrv("done", x.Done, true)
|
||||
p.semiOrNewl("do", x.DoPos)
|
||||
p.nestedStmts(x.Do, Pos{})
|
||||
p.semiRsrv("done", x.DonePos, true)
|
||||
case *SelectClause:
|
||||
p.WriteString("select ")
|
||||
p.loop(&x.Loop)
|
||||
p.semiOrNewl("do", x.DoPos)
|
||||
p.nestedStmts(x.Do, Pos{})
|
||||
p.semiRsrv("done", x.DonePos, true)
|
||||
case *BinaryCmd:
|
||||
p.stmt(x.X)
|
||||
if x.Y.Pos() < p.nline {
|
||||
if x.Y.Pos().Line() <= p.line {
|
||||
// leave p.nestedBinary untouched
|
||||
p.spacedString(x.Op.String())
|
||||
p.stmt(x.Y)
|
||||
@@ -705,29 +643,25 @@ func (p *Printer) command(cmd Command, redirs []*Redirect) (startRedirs int) {
|
||||
if p.binNextLine {
|
||||
if len(p.pendingHdocs) == 0 {
|
||||
p.bslashNewl()
|
||||
p.indent()
|
||||
}
|
||||
p.spacedString(x.Op.String())
|
||||
if p.anyCommentsBefore(x.Y.Pos()) {
|
||||
if len(x.Y.Comments) > 0 {
|
||||
p.wantSpace = false
|
||||
p.WriteByte('\n')
|
||||
p.indent()
|
||||
p.incLines(p.comments[0].Pos())
|
||||
p.commentsUpTo(x.Y.Pos())
|
||||
p.comments(x.Y.Comments)
|
||||
p.WriteByte('\n')
|
||||
p.indent()
|
||||
}
|
||||
} else {
|
||||
p.wantSpace = true
|
||||
p.spacedString(x.Op.String())
|
||||
if x.OpPos > p.nline {
|
||||
p.incLines(x.OpPos)
|
||||
}
|
||||
p.commentsUpTo(x.Y.Pos())
|
||||
p.newline(0)
|
||||
p.line = x.OpPos.Line()
|
||||
p.comments(x.Y.Comments)
|
||||
p.newline(Pos{})
|
||||
p.indent()
|
||||
}
|
||||
p.incLines(x.Y.Pos())
|
||||
p.line = x.Y.Pos().Line()
|
||||
_, p.nestedBinary = x.Y.Cmd.(*BinaryCmd)
|
||||
p.stmt(x.Y)
|
||||
if indent {
|
||||
@@ -740,14 +674,24 @@ func (p *Printer) command(cmd Command, redirs []*Redirect) (startRedirs int) {
|
||||
}
|
||||
p.WriteString(x.Name.Value)
|
||||
p.WriteString("() ")
|
||||
p.incLines(x.Body.Pos())
|
||||
p.line = x.Body.Pos().Line()
|
||||
p.stmt(x.Body)
|
||||
case *CaseClause:
|
||||
p.WriteString("case ")
|
||||
p.word(x.Word)
|
||||
p.WriteString(" in")
|
||||
for _, ci := range x.Items {
|
||||
p.commentsAndSeparate(ci.Patterns[0].Pos())
|
||||
var inlineCom *Comment
|
||||
for _, c := range ci.Comments {
|
||||
if c.Pos().After(ci.Patterns[0].Pos()) {
|
||||
inlineCom = &c
|
||||
break
|
||||
}
|
||||
p.comment(c)
|
||||
}
|
||||
if pos := ci.Patterns[0].Pos(); pos.Line() > p.line {
|
||||
p.newlines(pos)
|
||||
}
|
||||
for i, w := range ci.Patterns {
|
||||
if i > 0 {
|
||||
p.spacedString("|")
|
||||
@@ -759,20 +703,21 @@ func (p *Printer) command(cmd Command, redirs []*Redirect) (startRedirs int) {
|
||||
}
|
||||
p.WriteByte(')')
|
||||
p.wantSpace = true
|
||||
sep := len(ci.Stmts) > 1 || (len(ci.Stmts) > 0 && ci.Stmts[0].Pos() > p.nline)
|
||||
p.nestedStmts(ci.Stmts, 0)
|
||||
sep := len(ci.Stmts) > 1 || ci.StmtList.pos().Line() > p.line
|
||||
sl := ci.StmtList
|
||||
p.nestedStmts(sl, Pos{})
|
||||
p.level++
|
||||
if sep {
|
||||
p.commentsUpTo(ci.OpPos)
|
||||
p.newlines(ci.OpPos)
|
||||
}
|
||||
p.spacedString(ci.Op.String())
|
||||
p.incLines(ci.OpPos)
|
||||
p.level--
|
||||
if sep || ci.OpPos == x.Esac {
|
||||
p.wantNewline = true
|
||||
}
|
||||
p.spacedString(ci.Op.String())
|
||||
if inlineCom != nil {
|
||||
p.comment(*inlineCom)
|
||||
}
|
||||
p.level--
|
||||
}
|
||||
p.comments(x.Last)
|
||||
p.semiRsrv("esac", x.Esac, len(x.Items) == 0)
|
||||
case *ArithmCmd:
|
||||
p.WriteString("((")
|
||||
@@ -786,7 +731,7 @@ func (p *Printer) command(cmd Command, redirs []*Redirect) (startRedirs int) {
|
||||
p.testExpr(x.X)
|
||||
p.spacedString("]]")
|
||||
case *DeclClause:
|
||||
p.spacedString(x.Variant)
|
||||
p.spacedString(x.Variant.Value)
|
||||
for _, w := range x.Opts {
|
||||
p.WriteByte(' ')
|
||||
p.word(w)
|
||||
@@ -814,6 +759,27 @@ func (p *Printer) command(cmd Command, redirs []*Redirect) (startRedirs int) {
|
||||
return startRedirs
|
||||
}
|
||||
|
||||
func (p *Printer) ifClause(ic *IfClause, elif bool) {
|
||||
if !elif {
|
||||
p.spacedString("if")
|
||||
}
|
||||
p.nestedStmts(ic.Cond, Pos{})
|
||||
p.semiOrNewl("then", ic.ThenPos)
|
||||
p.nestedStmts(ic.Then, Pos{})
|
||||
if ic.FollowedByElif() {
|
||||
p.semiRsrv("elif", ic.ElsePos, true)
|
||||
p.ifClause(ic.Else.Stmts[0].Cmd.(*IfClause), true)
|
||||
return
|
||||
}
|
||||
if !ic.Else.empty() {
|
||||
p.semiRsrv("else", ic.ElsePos, true)
|
||||
p.nestedStmts(ic.Else, Pos{})
|
||||
} else if ic.ElsePos.IsValid() {
|
||||
p.line = ic.ElsePos.Line()
|
||||
}
|
||||
p.semiRsrv("fi", ic.FiPos, true)
|
||||
}
|
||||
|
||||
func startsWithLparen(s *Stmt) bool {
|
||||
switch x := s.Cmd.(type) {
|
||||
case *Subshell:
|
||||
@@ -824,94 +790,94 @@ func startsWithLparen(s *Stmt) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *Printer) hasInline(pos, npos, nline Pos) bool {
|
||||
for _, c := range p.comments {
|
||||
if c.Hash > nline {
|
||||
return false
|
||||
}
|
||||
if c.Hash > pos && (npos == 0 || c.Hash < npos) {
|
||||
func (p *Printer) hasInline(s *Stmt) bool {
|
||||
for _, c := range s.Comments {
|
||||
if c.Pos().Line() == s.End().Line() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *Printer) stmts(stmts []*Stmt) {
|
||||
switch len(stmts) {
|
||||
func (p *Printer) stmts(sl StmtList) {
|
||||
switch len(sl.Stmts) {
|
||||
case 0:
|
||||
p.comments(sl.Last)
|
||||
return
|
||||
case 1:
|
||||
s := stmts[0]
|
||||
s := sl.Stmts[0]
|
||||
pos := s.Pos()
|
||||
p.commentsUpTo(pos)
|
||||
if pos <= p.nline {
|
||||
var inlineCom *Comment
|
||||
for _, c := range s.Comments {
|
||||
if c.Pos().After(s.Pos()) {
|
||||
inlineCom = &c
|
||||
break
|
||||
}
|
||||
p.comment(c)
|
||||
}
|
||||
if pos.Line() <= p.line {
|
||||
p.stmt(s)
|
||||
} else {
|
||||
if p.nlineIndex > 0 {
|
||||
if p.line > 0 {
|
||||
p.newlines(pos)
|
||||
}
|
||||
p.incLines(pos)
|
||||
p.line = pos.Line()
|
||||
p.stmt(s)
|
||||
p.wantNewline = true
|
||||
}
|
||||
if inlineCom != nil {
|
||||
p.comment(*inlineCom)
|
||||
}
|
||||
p.comments(sl.Last)
|
||||
return
|
||||
}
|
||||
inlineIndent := 0
|
||||
for i, s := range stmts {
|
||||
lastIndentedLine := uint(0)
|
||||
for i, s := range sl.Stmts {
|
||||
pos := s.Pos()
|
||||
ind := p.nlineIndex
|
||||
p.commentsUpTo(pos)
|
||||
if p.nlineIndex > 0 {
|
||||
var inlineCom *Comment
|
||||
for _, c := range s.Comments {
|
||||
if c.Pos().After(s.Pos()) {
|
||||
inlineCom = &c
|
||||
break
|
||||
}
|
||||
p.comment(c)
|
||||
}
|
||||
if p.line > 0 {
|
||||
p.newlines(pos)
|
||||
}
|
||||
p.incLines(pos)
|
||||
p.line = pos.Line()
|
||||
p.stmt(s)
|
||||
var npos Pos
|
||||
if i+1 < len(stmts) {
|
||||
npos = stmts[i+1].Pos()
|
||||
}
|
||||
if !p.hasInline(pos, npos, p.nline) {
|
||||
if !p.hasInline(s) {
|
||||
inlineIndent = 0
|
||||
p.commentPadding = 0
|
||||
continue
|
||||
}
|
||||
if ind < len(p.lines)-1 && s.End() > p.lines[ind+1] {
|
||||
if s.Pos().Line() > lastIndentedLine+1 {
|
||||
inlineIndent = 0
|
||||
}
|
||||
if inlineIndent == 0 {
|
||||
ind2 := p.nlineIndex
|
||||
nline2 := p.nline
|
||||
follow := stmts[i:]
|
||||
for j, s2 := range follow {
|
||||
pos2 := s2.Pos()
|
||||
var npos2 Pos
|
||||
if j+1 < len(follow) {
|
||||
npos2 = follow[j+1].Pos()
|
||||
}
|
||||
if !p.hasInline(pos2, npos2, nline2) {
|
||||
for _, s2 := range sl.Stmts[i:] {
|
||||
if !p.hasInline(s2) {
|
||||
break
|
||||
}
|
||||
if l := p.stmtCols(s2); l > inlineIndent {
|
||||
inlineIndent = l
|
||||
}
|
||||
if ind2++; ind2 >= len(p.lines) {
|
||||
nline2 = maxPos
|
||||
} else {
|
||||
nline2 = p.lines[ind2]
|
||||
}
|
||||
}
|
||||
if ind2 == p.nlineIndex+1 {
|
||||
// no inline comments directly after this one
|
||||
continue
|
||||
}
|
||||
}
|
||||
if inlineIndent > 0 {
|
||||
if l := p.stmtCols(s); l > 0 {
|
||||
p.commentPadding = inlineIndent - l
|
||||
}
|
||||
lastIndentedLine = p.line
|
||||
}
|
||||
if inlineCom != nil {
|
||||
p.comment(*inlineCom)
|
||||
}
|
||||
}
|
||||
p.wantNewline = true
|
||||
p.comments(sl.Last)
|
||||
}
|
||||
|
||||
type byteCounter int
|
||||
@@ -942,36 +908,33 @@ func (c *byteCounter) Flush() error { return nil }
|
||||
// stmtCols reports the length that s will take when formatted in a
|
||||
// single line. If it will span multiple lines, stmtCols will return -1.
|
||||
func (p *Printer) stmtCols(s *Stmt) int {
|
||||
if p.lenPrinter == nil {
|
||||
return -1 // stmtCols call within stmtCols, bail
|
||||
}
|
||||
*p.lenPrinter = Printer{
|
||||
bufWriter: &p.lenCounter,
|
||||
lines: p.lines,
|
||||
}
|
||||
p.lenPrinter.bufWriter.Reset(nil)
|
||||
p.lenPrinter.incLines(s.Pos())
|
||||
p.lenPrinter.line = s.Pos().Line()
|
||||
p.lenPrinter.stmt(s)
|
||||
return int(p.lenCounter)
|
||||
}
|
||||
|
||||
func (p *Printer) nestedStmts(stmts []*Stmt, closing Pos) {
|
||||
func (p *Printer) nestedStmts(sl StmtList, closing Pos) {
|
||||
p.incLevel()
|
||||
if len(stmts) == 1 && closing > p.nline && stmts[0].End() <= p.nline {
|
||||
p.newline(0)
|
||||
if len(sl.Stmts) == 1 && closing.Line() > p.line && sl.Stmts[0].End().Line() <= p.line {
|
||||
p.newline(Pos{})
|
||||
p.indent()
|
||||
}
|
||||
p.stmts(stmts)
|
||||
p.stmts(sl)
|
||||
p.decLevel()
|
||||
}
|
||||
|
||||
func (p *Printer) assigns(assigns []*Assign, alwaysEqual bool) {
|
||||
anyNewline := false
|
||||
p.incLevel()
|
||||
for _, a := range assigns {
|
||||
if a.Pos() > p.nline {
|
||||
if a.Pos().Line() > p.line {
|
||||
p.bslashNewl()
|
||||
if !anyNewline {
|
||||
p.incLevel()
|
||||
anyNewline = true
|
||||
}
|
||||
p.indent()
|
||||
} else if p.wantSpace {
|
||||
p.WriteByte(' ')
|
||||
}
|
||||
@@ -990,12 +953,10 @@ func (p *Printer) assigns(assigns []*Assign, alwaysEqual bool) {
|
||||
} else if a.Array != nil {
|
||||
p.wantSpace = false
|
||||
p.WriteByte('(')
|
||||
p.elemJoin(a.Array.Elems)
|
||||
p.elemJoin(a.Array.Elems, a.Array.Last)
|
||||
p.sepTok(")", a.Array.Rparen)
|
||||
}
|
||||
p.wantSpace = true
|
||||
}
|
||||
if anyNewline {
|
||||
p.decLevel()
|
||||
}
|
||||
p.decLevel()
|
||||
}
|
||||
|
7
vendor/github.com/mvdan/sh/syntax/simplify.go
generated
vendored
7
vendor/github.com/mvdan/sh/syntax/simplify.go
generated
vendored
@@ -121,9 +121,10 @@ parts:
|
||||
}
|
||||
s.modified = true
|
||||
wps[i] = &SglQuoted{
|
||||
Position: dq.Position,
|
||||
Dollar: dq.Dollar,
|
||||
Value: newVal,
|
||||
Left: dq.Pos(),
|
||||
Right: dq.End(),
|
||||
Dollar: dq.Dollar,
|
||||
Value: newVal,
|
||||
}
|
||||
}
|
||||
return wps
|
||||
|
39
vendor/github.com/mvdan/sh/syntax/walk.go
generated
vendored
39
vendor/github.com/mvdan/sh/syntax/walk.go
generated
vendored
@@ -5,8 +5,8 @@ package syntax
|
||||
|
||||
import "fmt"
|
||||
|
||||
func walkStmts(stmts []*Stmt, f func(Node) bool) {
|
||||
for _, s := range stmts {
|
||||
func walkStmts(sl StmtList, f func(Node) bool) {
|
||||
for _, s := range sl.Stmts {
|
||||
Walk(s, f)
|
||||
}
|
||||
}
|
||||
@@ -28,7 +28,7 @@ func Walk(node Node, f func(Node) bool) {
|
||||
|
||||
switch x := node.(type) {
|
||||
case *File:
|
||||
walkStmts(x.Stmts, f)
|
||||
walkStmts(x.StmtList, f)
|
||||
case *Stmt:
|
||||
if x.Cmd != nil {
|
||||
Walk(x.Cmd, f)
|
||||
@@ -63,23 +63,22 @@ func Walk(node Node, f func(Node) bool) {
|
||||
case *CallExpr:
|
||||
walkWords(x.Args, f)
|
||||
case *Subshell:
|
||||
walkStmts(x.Stmts, f)
|
||||
walkStmts(x.StmtList, f)
|
||||
case *Block:
|
||||
walkStmts(x.Stmts, f)
|
||||
walkStmts(x.StmtList, f)
|
||||
case *IfClause:
|
||||
walkStmts(x.CondStmts, f)
|
||||
walkStmts(x.ThenStmts, f)
|
||||
for _, elif := range x.Elifs {
|
||||
walkStmts(elif.CondStmts, f)
|
||||
walkStmts(elif.ThenStmts, f)
|
||||
}
|
||||
walkStmts(x.ElseStmts, f)
|
||||
walkStmts(x.Cond, f)
|
||||
walkStmts(x.Then, f)
|
||||
walkStmts(x.Else, f)
|
||||
case *WhileClause:
|
||||
walkStmts(x.CondStmts, f)
|
||||
walkStmts(x.DoStmts, f)
|
||||
walkStmts(x.Cond, f)
|
||||
walkStmts(x.Do, f)
|
||||
case *ForClause:
|
||||
Walk(x.Loop, f)
|
||||
walkStmts(x.DoStmts, f)
|
||||
walkStmts(x.Do, f)
|
||||
case *SelectClause:
|
||||
Walk(&x.Loop, f)
|
||||
walkStmts(x.Do, f)
|
||||
case *WordIter:
|
||||
Walk(x.Name, f)
|
||||
walkWords(x.Items, f)
|
||||
@@ -110,7 +109,7 @@ func Walk(node Node, f func(Node) bool) {
|
||||
Walk(wp, f)
|
||||
}
|
||||
case *CmdSubst:
|
||||
walkStmts(x.Stmts, f)
|
||||
walkStmts(x.StmtList, f)
|
||||
case *ParamExp:
|
||||
Walk(x.Param, f)
|
||||
if x.Index != nil {
|
||||
@@ -148,9 +147,11 @@ func Walk(node Node, f func(Node) bool) {
|
||||
case *CaseClause:
|
||||
Walk(x.Word, f)
|
||||
for _, ci := range x.Items {
|
||||
walkWords(ci.Patterns, f)
|
||||
walkStmts(ci.Stmts, f)
|
||||
Walk(ci, f)
|
||||
}
|
||||
case *CaseItem:
|
||||
walkWords(x.Patterns, f)
|
||||
walkStmts(x.StmtList, f)
|
||||
case *TestClause:
|
||||
Walk(x.X, f)
|
||||
case *DeclClause:
|
||||
@@ -170,7 +171,7 @@ func Walk(node Node, f func(Node) bool) {
|
||||
case *ExtGlob:
|
||||
Walk(x.Pattern, f)
|
||||
case *ProcSubst:
|
||||
walkStmts(x.Stmts, f)
|
||||
walkStmts(x.StmtList, f)
|
||||
case *TimeClause:
|
||||
if x.Stmt != nil {
|
||||
Walk(x.Stmt, f)
|
||||
|
2
vendor/golang.org/x/crypto/scrypt/scrypt.go
generated
vendored
2
vendor/golang.org/x/crypto/scrypt/scrypt.go
generated
vendored
@@ -4,7 +4,7 @@
|
||||
|
||||
// Package scrypt implements the scrypt key derivation function as defined in
|
||||
// Colin Percival's paper "Stronger Key Derivation via Sequential Memory-Hard
|
||||
// Functions" (http://www.tarsnap.com/scrypt/scrypt.pdf).
|
||||
// Functions" (https://www.tarsnap.com/scrypt/scrypt.pdf).
|
||||
package scrypt // import "golang.org/x/crypto/scrypt"
|
||||
|
||||
import (
|
||||
|
6
vendor/golang.org/x/sys/unix/mkerrors.sh
generated
vendored
6
vendor/golang.org/x/sys/unix/mkerrors.sh
generated
vendored
@@ -153,6 +153,7 @@ struct ltchars {
|
||||
#include <sys/types.h>
|
||||
#include <sys/time.h>
|
||||
#include <sys/socket.h>
|
||||
#include <sys/xattr.h>
|
||||
#include <linux/if.h>
|
||||
#include <linux/if_alg.h>
|
||||
#include <linux/if_arp.h>
|
||||
@@ -170,6 +171,7 @@ struct ltchars {
|
||||
#include <linux/rtnetlink.h>
|
||||
#include <linux/ptrace.h>
|
||||
#include <linux/sched.h>
|
||||
#include <linux/seccomp.h>
|
||||
#include <linux/wait.h>
|
||||
#include <linux/icmpv6.h>
|
||||
#include <linux/serial.h>
|
||||
@@ -374,7 +376,7 @@ ccflags="$@"
|
||||
$2 == "IFNAMSIZ" ||
|
||||
$2 ~ /^CTL_(MAXNAME|NET|QUERY)$/ ||
|
||||
$2 ~ /^SYSCTL_VERS/ ||
|
||||
$2 ~ /^(MS|MNT)_/ ||
|
||||
$2 ~ /^(MS|MNT|UMOUNT)_/ ||
|
||||
$2 ~ /^TUN(SET|GET|ATTACH|DETACH)/ ||
|
||||
$2 ~ /^(O|F|E?FD|NAME|S|PTRACE|PT)_/ ||
|
||||
$2 ~ /^LINUX_REBOOT_CMD_/ ||
|
||||
@@ -402,8 +404,10 @@ ccflags="$@"
|
||||
$2 ~ /^GRND_/ ||
|
||||
$2 ~ /^KEY_(SPEC|REQKEY_DEFL)_/ ||
|
||||
$2 ~ /^KEYCTL_/ ||
|
||||
$2 ~ /^SECCOMP_MODE_/ ||
|
||||
$2 ~ /^SPLICE_/ ||
|
||||
$2 ~ /^(VM|VMADDR)_/ ||
|
||||
$2 ~ /^XATTR_(CREATE|REPLACE)/ ||
|
||||
$2 !~ "WMESGLEN" &&
|
||||
$2 ~ /^W[A-Z0-9]+$/ ||
|
||||
$2 ~ /^BLK[A-Z]*(GET$|SET$|BUF$|PART$|SIZE)/ {printf("\t%s = C.%s\n", $2, $2)}
|
||||
|
10
vendor/golang.org/x/sys/unix/syscall_linux.go
generated
vendored
10
vendor/golang.org/x/sys/unix/syscall_linux.go
generated
vendored
@@ -61,6 +61,10 @@ func IoctlSetInt(fd int, req uint, value int) (err error) {
|
||||
return ioctl(fd, req, uintptr(value))
|
||||
}
|
||||
|
||||
func IoctlSetTermios(fd int, req uint, value *Termios) (err error) {
|
||||
return ioctl(fd, req, uintptr(unsafe.Pointer(value)))
|
||||
}
|
||||
|
||||
// IoctlGetInt performs an ioctl operation which gets an integer value
|
||||
// from fd, using the specified request number.
|
||||
func IoctlGetInt(fd int, req uint) (int, error) {
|
||||
@@ -69,6 +73,12 @@ func IoctlGetInt(fd int, req uint) (int, error) {
|
||||
return value, err
|
||||
}
|
||||
|
||||
func IoctlGetTermios(fd int, req uint) (*Termios, error) {
|
||||
var value Termios
|
||||
err := ioctl(fd, req, uintptr(unsafe.Pointer(&value)))
|
||||
return &value, err
|
||||
}
|
||||
|
||||
//sys Linkat(olddirfd int, oldpath string, newdirfd int, newpath string, flags int) (err error)
|
||||
|
||||
func Link(oldpath string, newpath string) (err error) {
|
||||
|
6
vendor/golang.org/x/sys/unix/zerrors_linux_386.go
generated
vendored
6
vendor/golang.org/x/sys/unix/zerrors_linux_386.go
generated
vendored
@@ -1393,6 +1393,9 @@ const (
|
||||
SCM_TIMESTAMPING_OPT_STATS = 0x36
|
||||
SCM_TIMESTAMPNS = 0x23
|
||||
SCM_WIFI_STATUS = 0x29
|
||||
SECCOMP_MODE_DISABLED = 0x0
|
||||
SECCOMP_MODE_FILTER = 0x2
|
||||
SECCOMP_MODE_STRICT = 0x1
|
||||
SHUT_RD = 0x0
|
||||
SHUT_RDWR = 0x2
|
||||
SHUT_WR = 0x1
|
||||
@@ -1752,6 +1755,7 @@ const (
|
||||
TUNSETVNETBE = 0x400454de
|
||||
TUNSETVNETHDRSZ = 0x400454d8
|
||||
TUNSETVNETLE = 0x400454dc
|
||||
UMOUNT_NOFOLLOW = 0x8
|
||||
VDISCARD = 0xd
|
||||
VEOF = 0x4
|
||||
VEOL = 0xb
|
||||
@@ -1788,6 +1792,8 @@ const (
|
||||
WORDSIZE = 0x20
|
||||
WSTOPPED = 0x2
|
||||
WUNTRACED = 0x2
|
||||
XATTR_CREATE = 0x1
|
||||
XATTR_REPLACE = 0x2
|
||||
XCASE = 0x4
|
||||
XTABS = 0x1800
|
||||
)
|
||||
|
6
vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go
generated
vendored
6
vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go
generated
vendored
@@ -1394,6 +1394,9 @@ const (
|
||||
SCM_TIMESTAMPING_OPT_STATS = 0x36
|
||||
SCM_TIMESTAMPNS = 0x23
|
||||
SCM_WIFI_STATUS = 0x29
|
||||
SECCOMP_MODE_DISABLED = 0x0
|
||||
SECCOMP_MODE_FILTER = 0x2
|
||||
SECCOMP_MODE_STRICT = 0x1
|
||||
SHUT_RD = 0x0
|
||||
SHUT_RDWR = 0x2
|
||||
SHUT_WR = 0x1
|
||||
@@ -1753,6 +1756,7 @@ const (
|
||||
TUNSETVNETBE = 0x400454de
|
||||
TUNSETVNETHDRSZ = 0x400454d8
|
||||
TUNSETVNETLE = 0x400454dc
|
||||
UMOUNT_NOFOLLOW = 0x8
|
||||
VDISCARD = 0xd
|
||||
VEOF = 0x4
|
||||
VEOL = 0xb
|
||||
@@ -1789,6 +1793,8 @@ const (
|
||||
WORDSIZE = 0x40
|
||||
WSTOPPED = 0x2
|
||||
WUNTRACED = 0x2
|
||||
XATTR_CREATE = 0x1
|
||||
XATTR_REPLACE = 0x2
|
||||
XCASE = 0x4
|
||||
XTABS = 0x1800
|
||||
)
|
||||
|
6
vendor/golang.org/x/sys/unix/zerrors_linux_arm.go
generated
vendored
6
vendor/golang.org/x/sys/unix/zerrors_linux_arm.go
generated
vendored
@@ -1398,6 +1398,9 @@ const (
|
||||
SCM_TIMESTAMPING_OPT_STATS = 0x36
|
||||
SCM_TIMESTAMPNS = 0x23
|
||||
SCM_WIFI_STATUS = 0x29
|
||||
SECCOMP_MODE_DISABLED = 0x0
|
||||
SECCOMP_MODE_FILTER = 0x2
|
||||
SECCOMP_MODE_STRICT = 0x1
|
||||
SHUT_RD = 0x0
|
||||
SHUT_RDWR = 0x2
|
||||
SHUT_WR = 0x1
|
||||
@@ -1757,6 +1760,7 @@ const (
|
||||
TUNSETVNETBE = 0x400454de
|
||||
TUNSETVNETHDRSZ = 0x400454d8
|
||||
TUNSETVNETLE = 0x400454dc
|
||||
UMOUNT_NOFOLLOW = 0x8
|
||||
VDISCARD = 0xd
|
||||
VEOF = 0x4
|
||||
VEOL = 0xb
|
||||
@@ -1793,6 +1797,8 @@ const (
|
||||
WORDSIZE = 0x20
|
||||
WSTOPPED = 0x2
|
||||
WUNTRACED = 0x2
|
||||
XATTR_CREATE = 0x1
|
||||
XATTR_REPLACE = 0x2
|
||||
XCASE = 0x4
|
||||
XTABS = 0x1800
|
||||
)
|
||||
|
6
vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go
generated
vendored
6
vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go
generated
vendored
@@ -1383,6 +1383,9 @@ const (
|
||||
SCM_TIMESTAMPING_OPT_STATS = 0x36
|
||||
SCM_TIMESTAMPNS = 0x23
|
||||
SCM_WIFI_STATUS = 0x29
|
||||
SECCOMP_MODE_DISABLED = 0x0
|
||||
SECCOMP_MODE_FILTER = 0x2
|
||||
SECCOMP_MODE_STRICT = 0x1
|
||||
SHUT_RD = 0x0
|
||||
SHUT_RDWR = 0x2
|
||||
SHUT_WR = 0x1
|
||||
@@ -1742,6 +1745,7 @@ const (
|
||||
TUNSETVNETBE = 0x400454de
|
||||
TUNSETVNETHDRSZ = 0x400454d8
|
||||
TUNSETVNETLE = 0x400454dc
|
||||
UMOUNT_NOFOLLOW = 0x8
|
||||
VDISCARD = 0xd
|
||||
VEOF = 0x4
|
||||
VEOL = 0xb
|
||||
@@ -1778,6 +1782,8 @@ const (
|
||||
WORDSIZE = 0x40
|
||||
WSTOPPED = 0x2
|
||||
WUNTRACED = 0x2
|
||||
XATTR_CREATE = 0x1
|
||||
XATTR_REPLACE = 0x2
|
||||
XCASE = 0x4
|
||||
XTABS = 0x1800
|
||||
)
|
||||
|
6
vendor/golang.org/x/sys/unix/zerrors_linux_mips.go
generated
vendored
6
vendor/golang.org/x/sys/unix/zerrors_linux_mips.go
generated
vendored
@@ -1395,6 +1395,9 @@ const (
|
||||
SCM_TIMESTAMPING_OPT_STATS = 0x36
|
||||
SCM_TIMESTAMPNS = 0x23
|
||||
SCM_WIFI_STATUS = 0x29
|
||||
SECCOMP_MODE_DISABLED = 0x0
|
||||
SECCOMP_MODE_FILTER = 0x2
|
||||
SECCOMP_MODE_STRICT = 0x1
|
||||
SHUT_RD = 0x0
|
||||
SHUT_RDWR = 0x2
|
||||
SHUT_WR = 0x1
|
||||
@@ -1756,6 +1759,7 @@ const (
|
||||
TUNSETVNETBE = 0x800454de
|
||||
TUNSETVNETHDRSZ = 0x800454d8
|
||||
TUNSETVNETLE = 0x800454dc
|
||||
UMOUNT_NOFOLLOW = 0x8
|
||||
VDISCARD = 0xd
|
||||
VEOF = 0x10
|
||||
VEOL = 0x11
|
||||
@@ -1793,6 +1797,8 @@ const (
|
||||
WORDSIZE = 0x20
|
||||
WSTOPPED = 0x2
|
||||
WUNTRACED = 0x2
|
||||
XATTR_CREATE = 0x1
|
||||
XATTR_REPLACE = 0x2
|
||||
XCASE = 0x4
|
||||
XTABS = 0x1800
|
||||
)
|
||||
|
6
vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go
generated
vendored
6
vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go
generated
vendored
@@ -1395,6 +1395,9 @@ const (
|
||||
SCM_TIMESTAMPING_OPT_STATS = 0x36
|
||||
SCM_TIMESTAMPNS = 0x23
|
||||
SCM_WIFI_STATUS = 0x29
|
||||
SECCOMP_MODE_DISABLED = 0x0
|
||||
SECCOMP_MODE_FILTER = 0x2
|
||||
SECCOMP_MODE_STRICT = 0x1
|
||||
SHUT_RD = 0x0
|
||||
SHUT_RDWR = 0x2
|
||||
SHUT_WR = 0x1
|
||||
@@ -1756,6 +1759,7 @@ const (
|
||||
TUNSETVNETBE = 0x800454de
|
||||
TUNSETVNETHDRSZ = 0x800454d8
|
||||
TUNSETVNETLE = 0x800454dc
|
||||
UMOUNT_NOFOLLOW = 0x8
|
||||
VDISCARD = 0xd
|
||||
VEOF = 0x10
|
||||
VEOL = 0x11
|
||||
@@ -1793,6 +1797,8 @@ const (
|
||||
WORDSIZE = 0x40
|
||||
WSTOPPED = 0x2
|
||||
WUNTRACED = 0x2
|
||||
XATTR_CREATE = 0x1
|
||||
XATTR_REPLACE = 0x2
|
||||
XCASE = 0x4
|
||||
XTABS = 0x1800
|
||||
)
|
||||
|
6
vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go
generated
vendored
6
vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go
generated
vendored
@@ -1395,6 +1395,9 @@ const (
|
||||
SCM_TIMESTAMPING_OPT_STATS = 0x36
|
||||
SCM_TIMESTAMPNS = 0x23
|
||||
SCM_WIFI_STATUS = 0x29
|
||||
SECCOMP_MODE_DISABLED = 0x0
|
||||
SECCOMP_MODE_FILTER = 0x2
|
||||
SECCOMP_MODE_STRICT = 0x1
|
||||
SHUT_RD = 0x0
|
||||
SHUT_RDWR = 0x2
|
||||
SHUT_WR = 0x1
|
||||
@@ -1756,6 +1759,7 @@ const (
|
||||
TUNSETVNETBE = 0x800454de
|
||||
TUNSETVNETHDRSZ = 0x800454d8
|
||||
TUNSETVNETLE = 0x800454dc
|
||||
UMOUNT_NOFOLLOW = 0x8
|
||||
VDISCARD = 0xd
|
||||
VEOF = 0x10
|
||||
VEOL = 0x11
|
||||
@@ -1793,6 +1797,8 @@ const (
|
||||
WORDSIZE = 0x40
|
||||
WSTOPPED = 0x2
|
||||
WUNTRACED = 0x2
|
||||
XATTR_CREATE = 0x1
|
||||
XATTR_REPLACE = 0x2
|
||||
XCASE = 0x4
|
||||
XTABS = 0x1800
|
||||
)
|
||||
|
6
vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go
generated
vendored
6
vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go
generated
vendored
@@ -1395,6 +1395,9 @@ const (
|
||||
SCM_TIMESTAMPING_OPT_STATS = 0x36
|
||||
SCM_TIMESTAMPNS = 0x23
|
||||
SCM_WIFI_STATUS = 0x29
|
||||
SECCOMP_MODE_DISABLED = 0x0
|
||||
SECCOMP_MODE_FILTER = 0x2
|
||||
SECCOMP_MODE_STRICT = 0x1
|
||||
SHUT_RD = 0x0
|
||||
SHUT_RDWR = 0x2
|
||||
SHUT_WR = 0x1
|
||||
@@ -1756,6 +1759,7 @@ const (
|
||||
TUNSETVNETBE = 0x800454de
|
||||
TUNSETVNETHDRSZ = 0x800454d8
|
||||
TUNSETVNETLE = 0x800454dc
|
||||
UMOUNT_NOFOLLOW = 0x8
|
||||
VDISCARD = 0xd
|
||||
VEOF = 0x10
|
||||
VEOL = 0x11
|
||||
@@ -1793,6 +1797,8 @@ const (
|
||||
WORDSIZE = 0x20
|
||||
WSTOPPED = 0x2
|
||||
WUNTRACED = 0x2
|
||||
XATTR_CREATE = 0x1
|
||||
XATTR_REPLACE = 0x2
|
||||
XCASE = 0x4
|
||||
XTABS = 0x1800
|
||||
)
|
||||
|
6
vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go
generated
vendored
6
vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go
generated
vendored
@@ -1451,6 +1451,9 @@ const (
|
||||
SCM_TIMESTAMPING_OPT_STATS = 0x36
|
||||
SCM_TIMESTAMPNS = 0x23
|
||||
SCM_WIFI_STATUS = 0x29
|
||||
SECCOMP_MODE_DISABLED = 0x0
|
||||
SECCOMP_MODE_FILTER = 0x2
|
||||
SECCOMP_MODE_STRICT = 0x1
|
||||
SHUT_RD = 0x0
|
||||
SHUT_RDWR = 0x2
|
||||
SHUT_WR = 0x1
|
||||
@@ -1814,6 +1817,7 @@ const (
|
||||
TUNSETVNETBE = 0x800454de
|
||||
TUNSETVNETHDRSZ = 0x800454d8
|
||||
TUNSETVNETLE = 0x800454dc
|
||||
UMOUNT_NOFOLLOW = 0x8
|
||||
VDISCARD = 0x10
|
||||
VEOF = 0x4
|
||||
VEOL = 0x6
|
||||
@@ -1850,6 +1854,8 @@ const (
|
||||
WORDSIZE = 0x40
|
||||
WSTOPPED = 0x2
|
||||
WUNTRACED = 0x2
|
||||
XATTR_CREATE = 0x1
|
||||
XATTR_REPLACE = 0x2
|
||||
XCASE = 0x4000
|
||||
XTABS = 0xc00
|
||||
)
|
||||
|
6
vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go
generated
vendored
6
vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go
generated
vendored
@@ -1451,6 +1451,9 @@ const (
|
||||
SCM_TIMESTAMPING_OPT_STATS = 0x36
|
||||
SCM_TIMESTAMPNS = 0x23
|
||||
SCM_WIFI_STATUS = 0x29
|
||||
SECCOMP_MODE_DISABLED = 0x0
|
||||
SECCOMP_MODE_FILTER = 0x2
|
||||
SECCOMP_MODE_STRICT = 0x1
|
||||
SHUT_RD = 0x0
|
||||
SHUT_RDWR = 0x2
|
||||
SHUT_WR = 0x1
|
||||
@@ -1814,6 +1817,7 @@ const (
|
||||
TUNSETVNETBE = 0x800454de
|
||||
TUNSETVNETHDRSZ = 0x800454d8
|
||||
TUNSETVNETLE = 0x800454dc
|
||||
UMOUNT_NOFOLLOW = 0x8
|
||||
VDISCARD = 0x10
|
||||
VEOF = 0x4
|
||||
VEOL = 0x6
|
||||
@@ -1850,6 +1854,8 @@ const (
|
||||
WORDSIZE = 0x40
|
||||
WSTOPPED = 0x2
|
||||
WUNTRACED = 0x2
|
||||
XATTR_CREATE = 0x1
|
||||
XATTR_REPLACE = 0x2
|
||||
XCASE = 0x4000
|
||||
XTABS = 0xc00
|
||||
)
|
||||
|
6
vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go
generated
vendored
6
vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go
generated
vendored
@@ -1455,6 +1455,9 @@ const (
|
||||
SCM_TIMESTAMPING_OPT_STATS = 0x36
|
||||
SCM_TIMESTAMPNS = 0x23
|
||||
SCM_WIFI_STATUS = 0x29
|
||||
SECCOMP_MODE_DISABLED = 0x0
|
||||
SECCOMP_MODE_FILTER = 0x2
|
||||
SECCOMP_MODE_STRICT = 0x1
|
||||
SHUT_RD = 0x0
|
||||
SHUT_RDWR = 0x2
|
||||
SHUT_WR = 0x1
|
||||
@@ -1814,6 +1817,7 @@ const (
|
||||
TUNSETVNETBE = 0x400454de
|
||||
TUNSETVNETHDRSZ = 0x400454d8
|
||||
TUNSETVNETLE = 0x400454dc
|
||||
UMOUNT_NOFOLLOW = 0x8
|
||||
VDISCARD = 0xd
|
||||
VEOF = 0x4
|
||||
VEOL = 0xb
|
||||
@@ -1850,6 +1854,8 @@ const (
|
||||
WORDSIZE = 0x40
|
||||
WSTOPPED = 0x2
|
||||
WUNTRACED = 0x2
|
||||
XATTR_CREATE = 0x1
|
||||
XATTR_REPLACE = 0x2
|
||||
XCASE = 0x4
|
||||
XTABS = 0x1800
|
||||
)
|
||||
|
Reference in New Issue
Block a user