mirror of
https://github.com/go-task/task.git
synced 2024-12-12 10:45:49 +02:00
go mod vendor
This commit is contained in:
parent
83d618e1eb
commit
b2df398a12
12
vendor/gopkg.in/yaml.v2/.travis.yml
generated
vendored
Normal file
12
vendor/gopkg.in/yaml.v2/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.4
|
||||
- 1.5
|
||||
- 1.6
|
||||
- 1.7
|
||||
- 1.8
|
||||
- 1.9
|
||||
- tip
|
||||
|
||||
go_import_path: gopkg.in/yaml.v2
|
201
vendor/gopkg.in/yaml.v2/LICENSE
generated
vendored
Normal file
201
vendor/gopkg.in/yaml.v2/LICENSE
generated
vendored
Normal file
@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
39
vendor/gopkg.in/yaml.v3/LICENSE → vendor/gopkg.in/yaml.v2/LICENSE.libyaml
generated
vendored
39
vendor/gopkg.in/yaml.v3/LICENSE → vendor/gopkg.in/yaml.v2/LICENSE.libyaml
generated
vendored
@ -1,17 +1,16 @@
|
||||
|
||||
This project is covered by two different licenses: MIT and Apache.
|
||||
|
||||
#### MIT License ####
|
||||
|
||||
The following files were ported to Go from C files of libyaml, and thus
|
||||
are still covered by their original MIT license, with the additional
|
||||
copyright staring in 2011 when the project was ported over:
|
||||
are still covered by their original copyright and license:
|
||||
|
||||
apic.go emitterc.go parserc.go readerc.go scannerc.go
|
||||
writerc.go yamlh.go yamlprivateh.go
|
||||
apic.go
|
||||
emitterc.go
|
||||
parserc.go
|
||||
readerc.go
|
||||
scannerc.go
|
||||
writerc.go
|
||||
yamlh.go
|
||||
yamlprivateh.go
|
||||
|
||||
Copyright (c) 2006-2010 Kirill Simonov
|
||||
Copyright (c) 2006-2011 Kirill Simonov
|
||||
Copyright (c) 2006 Kirill Simonov
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
@ -30,21 +29,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
### Apache License ###
|
||||
|
||||
All the remaining project files are covered by the Apache license:
|
||||
|
||||
Copyright (c) 2011-2019 Canonical Ltd
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
0
vendor/gopkg.in/yaml.v3/NOTICE → vendor/gopkg.in/yaml.v2/NOTICE
generated
vendored
0
vendor/gopkg.in/yaml.v3/NOTICE → vendor/gopkg.in/yaml.v2/NOTICE
generated
vendored
31
vendor/gopkg.in/yaml.v3/README.md → vendor/gopkg.in/yaml.v2/README.md
generated
vendored
31
vendor/gopkg.in/yaml.v3/README.md → vendor/gopkg.in/yaml.v2/README.md
generated
vendored
@ -12,23 +12,7 @@ C library to parse and generate YAML data quickly and reliably.
|
||||
Compatibility
|
||||
-------------
|
||||
|
||||
The yaml package supports most of YAML 1.2, but preserves some behavior
|
||||
from 1.1 for backwards compatibility.
|
||||
|
||||
Specifically, as of v3 of the yaml package:
|
||||
|
||||
- YAML 1.1 bools (_yes/no, on/off_) are supported as long as they are being
|
||||
decoded into a typed bool value. Otherwise they behave as a string. Booleans
|
||||
in YAML 1.2 are _true/false_ only.
|
||||
- Octals encode and decode as _0777_ per YAML 1.1, rather than _0o777_
|
||||
as specified in YAML 1.2, because most parsers still use the old format.
|
||||
Octals in the _0o777_ format are supported though, so new files work.
|
||||
- Does not support base-60 floats. These are gone from YAML 1.2, and were
|
||||
actually never supported by this package as it's clearly a poor choice.
|
||||
|
||||
and offers backwards
|
||||
compatibility with YAML 1.1 in some cases.
|
||||
1.2, including support for
|
||||
The yaml package supports most of YAML 1.1 and 1.2, including support for
|
||||
anchors, tags, map merging, etc. Multi-document unmarshalling is not yet
|
||||
implemented, and base-60 floats from YAML 1.1 are purposefully not
|
||||
supported since they're a poor design and are gone in YAML 1.2.
|
||||
@ -36,30 +20,29 @@ supported since they're a poor design and are gone in YAML 1.2.
|
||||
Installation and usage
|
||||
----------------------
|
||||
|
||||
The import path for the package is *gopkg.in/yaml.v3*.
|
||||
The import path for the package is *gopkg.in/yaml.v2*.
|
||||
|
||||
To install it, run:
|
||||
|
||||
go get gopkg.in/yaml.v3
|
||||
go get gopkg.in/yaml.v2
|
||||
|
||||
API documentation
|
||||
-----------------
|
||||
|
||||
If opened in a browser, the import path itself leads to the API documentation:
|
||||
|
||||
- [https://gopkg.in/yaml.v3](https://gopkg.in/yaml.v3)
|
||||
* [https://gopkg.in/yaml.v2](https://gopkg.in/yaml.v2)
|
||||
|
||||
API stability
|
||||
-------------
|
||||
|
||||
The package API for yaml v3 will remain stable as described in [gopkg.in](https://gopkg.in).
|
||||
The package API for yaml v2 will remain stable as described in [gopkg.in](https://gopkg.in).
|
||||
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
The yaml package is licensed under the MIT and Apache License 2.0 licenses.
|
||||
Please see the LICENSE file for details.
|
||||
The yaml package is licensed under the Apache License 2.0. Please see the LICENSE file for details.
|
||||
|
||||
|
||||
Example
|
||||
@ -72,7 +55,7 @@ import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
var data = `
|
55
vendor/gopkg.in/yaml.v3/apic.go → vendor/gopkg.in/yaml.v2/apic.go
generated
vendored
55
vendor/gopkg.in/yaml.v3/apic.go → vendor/gopkg.in/yaml.v2/apic.go
generated
vendored
@ -1,25 +1,3 @@
|
||||
//
|
||||
// Copyright (c) 2011-2019 Canonical Ltd
|
||||
// Copyright (c) 2006-2010 Kirill Simonov
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
// this software and associated documentation files (the "Software"), to deal in
|
||||
// the Software without restriction, including without limitation the rights to
|
||||
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
// of the Software, and to permit persons to whom the Software is furnished to do
|
||||
// so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package yaml
|
||||
|
||||
import (
|
||||
@ -160,7 +138,7 @@ func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) {
|
||||
emitter.canonical = canonical
|
||||
}
|
||||
|
||||
// Set the indentation increment.
|
||||
//// Set the indentation increment.
|
||||
func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) {
|
||||
if indent < 2 || indent > 9 {
|
||||
indent = 2
|
||||
@ -310,14 +288,29 @@ func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) {
|
||||
}
|
||||
}
|
||||
|
||||
// Create ALIAS.
|
||||
func yaml_alias_event_initialize(event *yaml_event_t, anchor []byte) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_ALIAS_EVENT,
|
||||
anchor: anchor,
|
||||
}
|
||||
return true
|
||||
}
|
||||
///*
|
||||
// * Create ALIAS.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t)
|
||||
//{
|
||||
// mark yaml_mark_t = { 0, 0, 0 }
|
||||
// anchor_copy *yaml_char_t = NULL
|
||||
//
|
||||
// assert(event) // Non-NULL event object is expected.
|
||||
// assert(anchor) // Non-NULL anchor is expected.
|
||||
//
|
||||
// if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0
|
||||
//
|
||||
// anchor_copy = yaml_strdup(anchor)
|
||||
// if (!anchor_copy)
|
||||
// return 0
|
||||
//
|
||||
// ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark)
|
||||
//
|
||||
// return 1
|
||||
//}
|
||||
|
||||
// Create SCALAR.
|
||||
func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool {
|
519
vendor/gopkg.in/yaml.v3/decode.go → vendor/gopkg.in/yaml.v2/decode.go
generated
vendored
519
vendor/gopkg.in/yaml.v3/decode.go → vendor/gopkg.in/yaml.v2/decode.go
generated
vendored
@ -1,18 +1,3 @@
|
||||
//
|
||||
// Copyright (c) 2011-2019 Canonical Ltd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package yaml
|
||||
|
||||
import (
|
||||
@ -26,14 +11,33 @@ import (
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
documentNode = 1 << iota
|
||||
mappingNode
|
||||
sequenceNode
|
||||
scalarNode
|
||||
aliasNode
|
||||
)
|
||||
|
||||
type node struct {
|
||||
kind int
|
||||
line, column int
|
||||
tag string
|
||||
// For an alias node, alias holds the resolved alias.
|
||||
alias *node
|
||||
value string
|
||||
implicit bool
|
||||
children []*node
|
||||
anchors map[string]*node
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Parser, produces a node tree out of a libyaml event stream.
|
||||
|
||||
type parser struct {
|
||||
parser yaml_parser_t
|
||||
event yaml_event_t
|
||||
doc *Node
|
||||
anchors map[string]*Node
|
||||
doc *node
|
||||
doneInit bool
|
||||
}
|
||||
|
||||
@ -62,7 +66,6 @@ func (p *parser) init() {
|
||||
if p.doneInit {
|
||||
return
|
||||
}
|
||||
p.anchors = make(map[string]*Node)
|
||||
p.expect(yaml_STREAM_START_EVENT)
|
||||
p.doneInit = true
|
||||
}
|
||||
@ -129,14 +132,13 @@ func (p *parser) fail() {
|
||||
failf("%s%s", where, msg)
|
||||
}
|
||||
|
||||
func (p *parser) anchor(n *Node, anchor []byte) {
|
||||
func (p *parser) anchor(n *node, anchor []byte) {
|
||||
if anchor != nil {
|
||||
n.Anchor = string(anchor)
|
||||
p.anchors[n.Anchor] = n
|
||||
p.doc.anchors[string(anchor)] = n
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) parse() *Node {
|
||||
func (p *parser) parse() *node {
|
||||
p.init()
|
||||
switch p.peek() {
|
||||
case yaml_SCALAR_EVENT:
|
||||
@ -157,120 +159,63 @@ func (p *parser) parse() *Node {
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) node(kind Kind, defaultTag, tag, value string) *Node {
|
||||
var style Style
|
||||
if tag != "" && tag != "!" {
|
||||
tag = shortTag(tag)
|
||||
style = TaggedStyle
|
||||
} else if defaultTag != "" {
|
||||
tag = defaultTag
|
||||
} else if kind == ScalarNode {
|
||||
tag, _ = resolve("", value)
|
||||
}
|
||||
return &Node{
|
||||
Kind: kind,
|
||||
Tag: tag,
|
||||
Value: value,
|
||||
Style: style,
|
||||
Line: p.event.start_mark.line + 1,
|
||||
Column: p.event.start_mark.column + 1,
|
||||
HeadComment: string(p.event.head_comment),
|
||||
LineComment: string(p.event.line_comment),
|
||||
FootComment: string(p.event.foot_comment),
|
||||
func (p *parser) node(kind int) *node {
|
||||
return &node{
|
||||
kind: kind,
|
||||
line: p.event.start_mark.line,
|
||||
column: p.event.start_mark.column,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) parseChild(parent *Node) *Node {
|
||||
child := p.parse()
|
||||
parent.Content = append(parent.Content, child)
|
||||
return child
|
||||
}
|
||||
|
||||
func (p *parser) document() *Node {
|
||||
n := p.node(DocumentNode, "", "", "")
|
||||
func (p *parser) document() *node {
|
||||
n := p.node(documentNode)
|
||||
n.anchors = make(map[string]*node)
|
||||
p.doc = n
|
||||
p.expect(yaml_DOCUMENT_START_EVENT)
|
||||
p.parseChild(n)
|
||||
if p.peek() == yaml_DOCUMENT_END_EVENT {
|
||||
n.FootComment = string(p.event.foot_comment)
|
||||
}
|
||||
n.children = append(n.children, p.parse())
|
||||
p.expect(yaml_DOCUMENT_END_EVENT)
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) alias() *Node {
|
||||
n := p.node(AliasNode, "", "", string(p.event.anchor))
|
||||
n.Alias = p.anchors[n.Value]
|
||||
if n.Alias == nil {
|
||||
failf("unknown anchor '%s' referenced", n.Value)
|
||||
func (p *parser) alias() *node {
|
||||
n := p.node(aliasNode)
|
||||
n.value = string(p.event.anchor)
|
||||
n.alias = p.doc.anchors[n.value]
|
||||
if n.alias == nil {
|
||||
failf("unknown anchor '%s' referenced", n.value)
|
||||
}
|
||||
p.expect(yaml_ALIAS_EVENT)
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) scalar() *Node {
|
||||
var parsedStyle = p.event.scalar_style()
|
||||
var nodeStyle Style
|
||||
switch {
|
||||
case parsedStyle&yaml_DOUBLE_QUOTED_SCALAR_STYLE != 0:
|
||||
nodeStyle = DoubleQuotedStyle
|
||||
case parsedStyle&yaml_SINGLE_QUOTED_SCALAR_STYLE != 0:
|
||||
nodeStyle = SingleQuotedStyle
|
||||
case parsedStyle&yaml_LITERAL_SCALAR_STYLE != 0:
|
||||
nodeStyle = LiteralStyle
|
||||
case parsedStyle&yaml_FOLDED_SCALAR_STYLE != 0:
|
||||
nodeStyle = FoldedStyle
|
||||
}
|
||||
var nodeValue = string(p.event.value)
|
||||
var nodeTag = string(p.event.tag)
|
||||
var defaultTag string
|
||||
if nodeStyle == 0 {
|
||||
if nodeValue == "<<" {
|
||||
defaultTag = mergeTag
|
||||
}
|
||||
} else {
|
||||
defaultTag = strTag
|
||||
}
|
||||
n := p.node(ScalarNode, defaultTag, nodeTag, nodeValue)
|
||||
n.Style |= nodeStyle
|
||||
func (p *parser) scalar() *node {
|
||||
n := p.node(scalarNode)
|
||||
n.value = string(p.event.value)
|
||||
n.tag = string(p.event.tag)
|
||||
n.implicit = p.event.implicit
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.expect(yaml_SCALAR_EVENT)
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) sequence() *Node {
|
||||
n := p.node(SequenceNode, seqTag, string(p.event.tag), "")
|
||||
if p.event.sequence_style()&yaml_FLOW_SEQUENCE_STYLE != 0 {
|
||||
n.Style |= FlowStyle
|
||||
}
|
||||
func (p *parser) sequence() *node {
|
||||
n := p.node(sequenceNode)
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.expect(yaml_SEQUENCE_START_EVENT)
|
||||
for p.peek() != yaml_SEQUENCE_END_EVENT {
|
||||
p.parseChild(n)
|
||||
n.children = append(n.children, p.parse())
|
||||
}
|
||||
n.LineComment = string(p.event.line_comment)
|
||||
n.FootComment = string(p.event.foot_comment)
|
||||
p.expect(yaml_SEQUENCE_END_EVENT)
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) mapping() *Node {
|
||||
n := p.node(MappingNode, mapTag, string(p.event.tag), "")
|
||||
if p.event.mapping_style()&yaml_FLOW_MAPPING_STYLE != 0 {
|
||||
n.Style |= FlowStyle
|
||||
}
|
||||
func (p *parser) mapping() *node {
|
||||
n := p.node(mappingNode)
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.expect(yaml_MAPPING_START_EVENT)
|
||||
for p.peek() != yaml_MAPPING_END_EVENT {
|
||||
k := p.parseChild(n)
|
||||
v := p.parseChild(n)
|
||||
if v.FootComment != "" {
|
||||
k.FootComment = v.FootComment
|
||||
v.FootComment = ""
|
||||
}
|
||||
n.children = append(n.children, p.parse(), p.parse())
|
||||
}
|
||||
n.LineComment = string(p.event.line_comment)
|
||||
n.FootComment = string(p.event.foot_comment)
|
||||
p.expect(yaml_MAPPING_END_EVENT)
|
||||
return n
|
||||
}
|
||||
@ -279,60 +224,44 @@ func (p *parser) mapping() *Node {
|
||||
// Decoder, unmarshals a node into a provided value.
|
||||
|
||||
type decoder struct {
|
||||
doc *Node
|
||||
aliases map[*Node]bool
|
||||
doc *node
|
||||
aliases map[*node]bool
|
||||
mapType reflect.Type
|
||||
terrors []string
|
||||
|
||||
stringMapType reflect.Type
|
||||
generalMapType reflect.Type
|
||||
|
||||
knownFields bool
|
||||
uniqueKeys bool
|
||||
strict bool
|
||||
}
|
||||
|
||||
var (
|
||||
nodeType = reflect.TypeOf(Node{})
|
||||
mapItemType = reflect.TypeOf(MapItem{})
|
||||
durationType = reflect.TypeOf(time.Duration(0))
|
||||
stringMapType = reflect.TypeOf(map[string]interface{}{})
|
||||
generalMapType = reflect.TypeOf(map[interface{}]interface{}{})
|
||||
ifaceType = generalMapType.Elem()
|
||||
defaultMapType = reflect.TypeOf(map[interface{}]interface{}{})
|
||||
ifaceType = defaultMapType.Elem()
|
||||
timeType = reflect.TypeOf(time.Time{})
|
||||
ptrTimeType = reflect.TypeOf(&time.Time{})
|
||||
)
|
||||
|
||||
func newDecoder() *decoder {
|
||||
d := &decoder{
|
||||
stringMapType: stringMapType,
|
||||
generalMapType: generalMapType,
|
||||
uniqueKeys: true,
|
||||
}
|
||||
d.aliases = make(map[*Node]bool)
|
||||
func newDecoder(strict bool) *decoder {
|
||||
d := &decoder{mapType: defaultMapType, strict: strict}
|
||||
d.aliases = make(map[*node]bool)
|
||||
return d
|
||||
}
|
||||
|
||||
func (d *decoder) terror(n *Node, tag string, out reflect.Value) {
|
||||
if n.Tag != "" {
|
||||
tag = n.Tag
|
||||
func (d *decoder) terror(n *node, tag string, out reflect.Value) {
|
||||
if n.tag != "" {
|
||||
tag = n.tag
|
||||
}
|
||||
value := n.Value
|
||||
if tag != seqTag && tag != mapTag {
|
||||
value := n.value
|
||||
if tag != yaml_SEQ_TAG && tag != yaml_MAP_TAG {
|
||||
if len(value) > 10 {
|
||||
value = " `" + value[:7] + "...`"
|
||||
} else {
|
||||
value = " `" + value + "`"
|
||||
}
|
||||
}
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.Line, shortTag(tag), value, out.Type()))
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.line+1, shortTag(tag), value, out.Type()))
|
||||
}
|
||||
|
||||
func (d *decoder) callUnmarshaler(n *Node, u Unmarshaler) (good bool) {
|
||||
if err := u.UnmarshalYAML(n); err != nil {
|
||||
fail(err)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (d *decoder) callObsoleteUnmarshaler(n *Node, u obsoleteUnmarshaler) (good bool) {
|
||||
func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) {
|
||||
terrlen := len(d.terrors)
|
||||
err := u.UnmarshalYAML(func(v interface{}) (err error) {
|
||||
defer handleErr(&err)
|
||||
@ -361,8 +290,8 @@ func (d *decoder) callObsoleteUnmarshaler(n *Node, u obsoleteUnmarshaler) (good
|
||||
// its types unmarshalled appropriately.
|
||||
//
|
||||
// If n holds a null value, prepare returns before doing anything.
|
||||
func (d *decoder) prepare(n *Node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) {
|
||||
if n.ShortTag() == nullTag {
|
||||
func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) {
|
||||
if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "~" || n.value == "" && n.implicit) {
|
||||
return out, false, false
|
||||
}
|
||||
again := true
|
||||
@ -376,84 +305,55 @@ func (d *decoder) prepare(n *Node, out reflect.Value) (newout reflect.Value, unm
|
||||
again = true
|
||||
}
|
||||
if out.CanAddr() {
|
||||
outi := out.Addr().Interface()
|
||||
if u, ok := outi.(Unmarshaler); ok {
|
||||
if u, ok := out.Addr().Interface().(Unmarshaler); ok {
|
||||
good = d.callUnmarshaler(n, u)
|
||||
return out, true, good
|
||||
}
|
||||
if u, ok := outi.(obsoleteUnmarshaler); ok {
|
||||
good = d.callObsoleteUnmarshaler(n, u)
|
||||
return out, true, good
|
||||
}
|
||||
}
|
||||
}
|
||||
return out, false, false
|
||||
}
|
||||
|
||||
func (d *decoder) fieldByIndex(n *Node, v reflect.Value, index []int) (field reflect.Value) {
|
||||
if n.ShortTag() == nullTag {
|
||||
return reflect.Value{}
|
||||
}
|
||||
for _, num := range index {
|
||||
for {
|
||||
if v.Kind() == reflect.Ptr {
|
||||
if v.IsNil() {
|
||||
v.Set(reflect.New(v.Type().Elem()))
|
||||
}
|
||||
v = v.Elem()
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
v = v.Field(num)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func (d *decoder) unmarshal(n *Node, out reflect.Value) (good bool) {
|
||||
if out.Type() == nodeType {
|
||||
out.Set(reflect.ValueOf(n).Elem())
|
||||
return true
|
||||
}
|
||||
switch n.Kind {
|
||||
case DocumentNode:
|
||||
func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) {
|
||||
switch n.kind {
|
||||
case documentNode:
|
||||
return d.document(n, out)
|
||||
case AliasNode:
|
||||
case aliasNode:
|
||||
return d.alias(n, out)
|
||||
}
|
||||
out, unmarshaled, good := d.prepare(n, out)
|
||||
if unmarshaled {
|
||||
return good
|
||||
}
|
||||
switch n.Kind {
|
||||
case ScalarNode:
|
||||
switch n.kind {
|
||||
case scalarNode:
|
||||
good = d.scalar(n, out)
|
||||
case MappingNode:
|
||||
case mappingNode:
|
||||
good = d.mapping(n, out)
|
||||
case SequenceNode:
|
||||
case sequenceNode:
|
||||
good = d.sequence(n, out)
|
||||
default:
|
||||
panic("internal error: unknown node kind: " + strconv.Itoa(int(n.Kind)))
|
||||
panic("internal error: unknown node kind: " + strconv.Itoa(n.kind))
|
||||
}
|
||||
return good
|
||||
}
|
||||
|
||||
func (d *decoder) document(n *Node, out reflect.Value) (good bool) {
|
||||
if len(n.Content) == 1 {
|
||||
func (d *decoder) document(n *node, out reflect.Value) (good bool) {
|
||||
if len(n.children) == 1 {
|
||||
d.doc = n
|
||||
d.unmarshal(n.Content[0], out)
|
||||
d.unmarshal(n.children[0], out)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (d *decoder) alias(n *Node, out reflect.Value) (good bool) {
|
||||
func (d *decoder) alias(n *node, out reflect.Value) (good bool) {
|
||||
if d.aliases[n] {
|
||||
// TODO this could actually be allowed in some circumstances.
|
||||
failf("anchor '%s' value contains itself", n.Value)
|
||||
failf("anchor '%s' value contains itself", n.value)
|
||||
}
|
||||
d.aliases[n] = true
|
||||
good = d.unmarshal(n.Alias, out)
|
||||
good = d.unmarshal(n.alias, out)
|
||||
delete(d.aliases, n)
|
||||
return good
|
||||
}
|
||||
@ -466,15 +366,15 @@ func resetMap(out reflect.Value) {
|
||||
}
|
||||
}
|
||||
|
||||
func (d *decoder) scalar(n *Node, out reflect.Value) bool {
|
||||
func (d *decoder) scalar(n *node, out reflect.Value) bool {
|
||||
var tag string
|
||||
var resolved interface{}
|
||||
if n.indicatedString() {
|
||||
tag = strTag
|
||||
resolved = n.Value
|
||||
if n.tag == "" && !n.implicit {
|
||||
tag = yaml_STR_TAG
|
||||
resolved = n.value
|
||||
} else {
|
||||
tag, resolved = resolve(n.Tag, n.Value)
|
||||
if tag == binaryTag {
|
||||
tag, resolved = resolve(n.tag, n.value)
|
||||
if tag == yaml_BINARY_TAG {
|
||||
data, err := base64.StdEncoding.DecodeString(resolved.(string))
|
||||
if err != nil {
|
||||
failf("!!binary value contains invalid base64 data")
|
||||
@ -483,14 +383,12 @@ func (d *decoder) scalar(n *Node, out reflect.Value) bool {
|
||||
}
|
||||
}
|
||||
if resolved == nil {
|
||||
if out.CanAddr() {
|
||||
switch out.Kind() {
|
||||
case reflect.Interface, reflect.Ptr, reflect.Map, reflect.Slice:
|
||||
out.Set(reflect.Zero(out.Type()))
|
||||
return true
|
||||
}
|
||||
if out.Kind() == reflect.Map && !out.CanAddr() {
|
||||
resetMap(out)
|
||||
} else {
|
||||
out.Set(reflect.Zero(out.Type()))
|
||||
}
|
||||
return false
|
||||
return true
|
||||
}
|
||||
if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() {
|
||||
// We've resolved to exactly the type we want, so use that.
|
||||
@ -503,13 +401,13 @@ func (d *decoder) scalar(n *Node, out reflect.Value) bool {
|
||||
u, ok := out.Addr().Interface().(encoding.TextUnmarshaler)
|
||||
if ok {
|
||||
var text []byte
|
||||
if tag == binaryTag {
|
||||
if tag == yaml_BINARY_TAG {
|
||||
text = []byte(resolved.(string))
|
||||
} else {
|
||||
// We let any value be unmarshaled into TextUnmarshaler.
|
||||
// That might be more lax than we'd like, but the
|
||||
// TextUnmarshaler itself should bowl out any dubious values.
|
||||
text = []byte(n.Value)
|
||||
text = []byte(n.value)
|
||||
}
|
||||
err := u.UnmarshalText(text)
|
||||
if err != nil {
|
||||
@ -520,37 +418,47 @@ func (d *decoder) scalar(n *Node, out reflect.Value) bool {
|
||||
}
|
||||
switch out.Kind() {
|
||||
case reflect.String:
|
||||
if tag == binaryTag {
|
||||
if tag == yaml_BINARY_TAG {
|
||||
out.SetString(resolved.(string))
|
||||
return true
|
||||
}
|
||||
out.SetString(n.Value)
|
||||
return true
|
||||
if resolved != nil {
|
||||
out.SetString(n.value)
|
||||
return true
|
||||
}
|
||||
case reflect.Interface:
|
||||
out.Set(reflect.ValueOf(resolved))
|
||||
if resolved == nil {
|
||||
out.Set(reflect.Zero(out.Type()))
|
||||
} else if tag == yaml_TIMESTAMP_TAG {
|
||||
// It looks like a timestamp but for backward compatibility
|
||||
// reasons we set it as a string, so that code that unmarshals
|
||||
// timestamp-like values into interface{} will continue to
|
||||
// see a string and not a time.Time.
|
||||
// TODO(v3) Drop this.
|
||||
out.Set(reflect.ValueOf(n.value))
|
||||
} else {
|
||||
out.Set(reflect.ValueOf(resolved))
|
||||
}
|
||||
return true
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
// This used to work in v2, but it's very unfriendly.
|
||||
isDuration := out.Type() == durationType
|
||||
|
||||
switch resolved := resolved.(type) {
|
||||
case int:
|
||||
if !isDuration && !out.OverflowInt(int64(resolved)) {
|
||||
if !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
return true
|
||||
}
|
||||
case int64:
|
||||
if !isDuration && !out.OverflowInt(resolved) {
|
||||
if !out.OverflowInt(resolved) {
|
||||
out.SetInt(resolved)
|
||||
return true
|
||||
}
|
||||
case uint64:
|
||||
if !isDuration && resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
|
||||
if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
return true
|
||||
}
|
||||
case float64:
|
||||
if !isDuration && resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
|
||||
if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
return true
|
||||
}
|
||||
@ -591,17 +499,6 @@ func (d *decoder) scalar(n *Node, out reflect.Value) bool {
|
||||
case bool:
|
||||
out.SetBool(resolved)
|
||||
return true
|
||||
case string:
|
||||
// This offers some compatibility with the 1.1 spec (https://yaml.org/type/bool.html).
|
||||
// It only works if explicitly attempting to unmarshal into a typed bool value.
|
||||
switch resolved {
|
||||
case "y", "Y", "yes", "Yes", "YES", "on", "On", "ON":
|
||||
out.SetBool(true)
|
||||
return true
|
||||
case "n", "N", "no", "No", "NO", "off", "Off", "OFF":
|
||||
out.SetBool(false)
|
||||
return true
|
||||
}
|
||||
}
|
||||
case reflect.Float32, reflect.Float64:
|
||||
switch resolved := resolved.(type) {
|
||||
@ -624,7 +521,13 @@ func (d *decoder) scalar(n *Node, out reflect.Value) bool {
|
||||
return true
|
||||
}
|
||||
case reflect.Ptr:
|
||||
panic("yaml internal error: please report the issue")
|
||||
if out.Type().Elem() == reflect.TypeOf(resolved) {
|
||||
// TODO DOes this make sense? When is out a Ptr except when decoding a nil value?
|
||||
elem := reflect.New(out.Type().Elem())
|
||||
elem.Elem().Set(reflect.ValueOf(resolved))
|
||||
out.Set(elem)
|
||||
return true
|
||||
}
|
||||
}
|
||||
d.terror(n, tag, out)
|
||||
return false
|
||||
@ -637,8 +540,8 @@ func settableValueOf(i interface{}) reflect.Value {
|
||||
return sv
|
||||
}
|
||||
|
||||
func (d *decoder) sequence(n *Node, out reflect.Value) (good bool) {
|
||||
l := len(n.Content)
|
||||
func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
|
||||
l := len(n.children)
|
||||
|
||||
var iface reflect.Value
|
||||
switch out.Kind() {
|
||||
@ -653,7 +556,7 @@ func (d *decoder) sequence(n *Node, out reflect.Value) (good bool) {
|
||||
iface = out
|
||||
out = settableValueOf(make([]interface{}, l))
|
||||
default:
|
||||
d.terror(n, seqTag, out)
|
||||
d.terror(n, yaml_SEQ_TAG, out)
|
||||
return false
|
||||
}
|
||||
et := out.Type().Elem()
|
||||
@ -661,7 +564,7 @@ func (d *decoder) sequence(n *Node, out reflect.Value) (good bool) {
|
||||
j := 0
|
||||
for i := 0; i < l; i++ {
|
||||
e := reflect.New(et).Elem()
|
||||
if ok := d.unmarshal(n.Content[i], e); ok {
|
||||
if ok := d.unmarshal(n.children[i], e); ok {
|
||||
out.Index(j).Set(e)
|
||||
j++
|
||||
}
|
||||
@ -675,65 +578,51 @@ func (d *decoder) sequence(n *Node, out reflect.Value) (good bool) {
|
||||
return true
|
||||
}
|
||||
|
||||
func (d *decoder) mapping(n *Node, out reflect.Value) (good bool) {
|
||||
l := len(n.Content)
|
||||
if d.uniqueKeys {
|
||||
nerrs := len(d.terrors)
|
||||
for i := 0; i < l; i += 2 {
|
||||
ni := n.Content[i]
|
||||
for j := i + 2; j < l; j += 2 {
|
||||
nj := n.Content[j]
|
||||
if ni.Kind == nj.Kind && ni.Value == nj.Value {
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: mapping key %#v already defined at line %d", nj.Line, nj.Value, ni.Line))
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(d.terrors) > nerrs {
|
||||
return false
|
||||
}
|
||||
}
|
||||
func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
|
||||
switch out.Kind() {
|
||||
case reflect.Struct:
|
||||
return d.mappingStruct(n, out)
|
||||
case reflect.Slice:
|
||||
return d.mappingSlice(n, out)
|
||||
case reflect.Map:
|
||||
// okay
|
||||
case reflect.Interface:
|
||||
iface := out
|
||||
if isStringMap(n) {
|
||||
out = reflect.MakeMap(d.stringMapType)
|
||||
if d.mapType.Kind() == reflect.Map {
|
||||
iface := out
|
||||
out = reflect.MakeMap(d.mapType)
|
||||
iface.Set(out)
|
||||
} else {
|
||||
out = reflect.MakeMap(d.generalMapType)
|
||||
slicev := reflect.New(d.mapType).Elem()
|
||||
if !d.mappingSlice(n, slicev) {
|
||||
return false
|
||||
}
|
||||
out.Set(slicev)
|
||||
return true
|
||||
}
|
||||
iface.Set(out)
|
||||
default:
|
||||
d.terror(n, mapTag, out)
|
||||
d.terror(n, yaml_MAP_TAG, out)
|
||||
return false
|
||||
}
|
||||
|
||||
outt := out.Type()
|
||||
kt := outt.Key()
|
||||
et := outt.Elem()
|
||||
|
||||
stringMapType := d.stringMapType
|
||||
generalMapType := d.generalMapType
|
||||
if outt.Elem() == ifaceType {
|
||||
if outt.Key().Kind() == reflect.String {
|
||||
d.stringMapType = outt
|
||||
} else if outt.Key() == ifaceType {
|
||||
d.generalMapType = outt
|
||||
}
|
||||
mapType := d.mapType
|
||||
if outt.Key() == ifaceType && outt.Elem() == ifaceType {
|
||||
d.mapType = outt
|
||||
}
|
||||
|
||||
if out.IsNil() {
|
||||
out.Set(reflect.MakeMap(outt))
|
||||
}
|
||||
l := len(n.children)
|
||||
for i := 0; i < l; i += 2 {
|
||||
if isMerge(n.Content[i]) {
|
||||
d.merge(n.Content[i+1], out)
|
||||
if isMerge(n.children[i]) {
|
||||
d.merge(n.children[i+1], out)
|
||||
continue
|
||||
}
|
||||
k := reflect.New(kt).Elem()
|
||||
if d.unmarshal(n.Content[i], k) {
|
||||
if d.unmarshal(n.children[i], k) {
|
||||
kkind := k.Kind()
|
||||
if kkind == reflect.Interface {
|
||||
kkind = k.Elem().Kind()
|
||||
@ -742,34 +631,61 @@ func (d *decoder) mapping(n *Node, out reflect.Value) (good bool) {
|
||||
failf("invalid map key: %#v", k.Interface())
|
||||
}
|
||||
e := reflect.New(et).Elem()
|
||||
if d.unmarshal(n.Content[i+1], e) {
|
||||
out.SetMapIndex(k, e)
|
||||
if d.unmarshal(n.children[i+1], e) {
|
||||
d.setMapIndex(n.children[i+1], out, k, e)
|
||||
}
|
||||
}
|
||||
}
|
||||
d.stringMapType = stringMapType
|
||||
d.generalMapType = generalMapType
|
||||
d.mapType = mapType
|
||||
return true
|
||||
}
|
||||
|
||||
func isStringMap(n *Node) bool {
|
||||
if n.Kind != MappingNode {
|
||||
func (d *decoder) setMapIndex(n *node, out, k, v reflect.Value) {
|
||||
if d.strict && out.MapIndex(k) != zeroValue {
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: key %#v already set in map", n.line+1, k.Interface()))
|
||||
return
|
||||
}
|
||||
out.SetMapIndex(k, v)
|
||||
}
|
||||
|
||||
func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) {
|
||||
outt := out.Type()
|
||||
if outt.Elem() != mapItemType {
|
||||
d.terror(n, yaml_MAP_TAG, out)
|
||||
return false
|
||||
}
|
||||
l := len(n.Content)
|
||||
for i := 0; i < l; i++ {
|
||||
if n.Content[i].ShortTag() != strTag {
|
||||
return false
|
||||
|
||||
mapType := d.mapType
|
||||
d.mapType = outt
|
||||
|
||||
var slice []MapItem
|
||||
var l = len(n.children)
|
||||
for i := 0; i < l; i += 2 {
|
||||
if isMerge(n.children[i]) {
|
||||
d.merge(n.children[i+1], out)
|
||||
continue
|
||||
}
|
||||
item := MapItem{}
|
||||
k := reflect.ValueOf(&item.Key).Elem()
|
||||
if d.unmarshal(n.children[i], k) {
|
||||
v := reflect.ValueOf(&item.Value).Elem()
|
||||
if d.unmarshal(n.children[i+1], v) {
|
||||
slice = append(slice, item)
|
||||
}
|
||||
}
|
||||
}
|
||||
out.Set(reflect.ValueOf(slice))
|
||||
d.mapType = mapType
|
||||
return true
|
||||
}
|
||||
|
||||
func (d *decoder) mappingStruct(n *Node, out reflect.Value) (good bool) {
|
||||
func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
|
||||
sinfo, err := getStructInfo(out.Type())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
name := settableValueOf("")
|
||||
l := len(n.children)
|
||||
|
||||
var inlineMap reflect.Value
|
||||
var elemType reflect.Type
|
||||
@ -779,30 +695,23 @@ func (d *decoder) mappingStruct(n *Node, out reflect.Value) (good bool) {
|
||||
elemType = inlineMap.Type().Elem()
|
||||
}
|
||||
|
||||
for _, index := range sinfo.InlineUnmarshalers {
|
||||
field := d.fieldByIndex(n, out, index)
|
||||
d.prepare(n, field)
|
||||
}
|
||||
|
||||
var doneFields []bool
|
||||
if d.uniqueKeys {
|
||||
if d.strict {
|
||||
doneFields = make([]bool, len(sinfo.FieldsList))
|
||||
}
|
||||
name := settableValueOf("")
|
||||
l := len(n.Content)
|
||||
for i := 0; i < l; i += 2 {
|
||||
ni := n.Content[i]
|
||||
ni := n.children[i]
|
||||
if isMerge(ni) {
|
||||
d.merge(n.Content[i+1], out)
|
||||
d.merge(n.children[i+1], out)
|
||||
continue
|
||||
}
|
||||
if !d.unmarshal(ni, name) {
|
||||
continue
|
||||
}
|
||||
if info, ok := sinfo.FieldsMap[name.String()]; ok {
|
||||
if d.uniqueKeys {
|
||||
if d.strict {
|
||||
if doneFields[info.Id] {
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.Line, name.String(), out.Type()))
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.line+1, name.String(), out.Type()))
|
||||
continue
|
||||
}
|
||||
doneFields[info.Id] = true
|
||||
@ -811,18 +720,18 @@ func (d *decoder) mappingStruct(n *Node, out reflect.Value) (good bool) {
|
||||
if info.Inline == nil {
|
||||
field = out.Field(info.Num)
|
||||
} else {
|
||||
field = d.fieldByIndex(n, out, info.Inline)
|
||||
field = out.FieldByIndex(info.Inline)
|
||||
}
|
||||
d.unmarshal(n.Content[i+1], field)
|
||||
d.unmarshal(n.children[i+1], field)
|
||||
} else if sinfo.InlineMap != -1 {
|
||||
if inlineMap.IsNil() {
|
||||
inlineMap.Set(reflect.MakeMap(inlineMap.Type()))
|
||||
}
|
||||
value := reflect.New(elemType).Elem()
|
||||
d.unmarshal(n.Content[i+1], value)
|
||||
inlineMap.SetMapIndex(name, value)
|
||||
} else if d.knownFields {
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.Line, name.String(), out.Type()))
|
||||
d.unmarshal(n.children[i+1], value)
|
||||
d.setMapIndex(n.children[i+1], inlineMap, name, value)
|
||||
} else if d.strict {
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.line+1, name.String(), out.Type()))
|
||||
}
|
||||
}
|
||||
return true
|
||||
@ -832,24 +741,26 @@ func failWantMap() {
|
||||
failf("map merge requires map or sequence of maps as the value")
|
||||
}
|
||||
|
||||
func (d *decoder) merge(n *Node, out reflect.Value) {
|
||||
switch n.Kind {
|
||||
case MappingNode:
|
||||
func (d *decoder) merge(n *node, out reflect.Value) {
|
||||
switch n.kind {
|
||||
case mappingNode:
|
||||
d.unmarshal(n, out)
|
||||
case AliasNode:
|
||||
if n.Alias != nil && n.Alias.Kind != MappingNode {
|
||||
case aliasNode:
|
||||
an, ok := d.doc.anchors[n.value]
|
||||
if ok && an.kind != mappingNode {
|
||||
failWantMap()
|
||||
}
|
||||
d.unmarshal(n, out)
|
||||
case SequenceNode:
|
||||
case sequenceNode:
|
||||
// Step backwards as earlier nodes take precedence.
|
||||
for i := len(n.Content) - 1; i >= 0; i-- {
|
||||
ni := n.Content[i]
|
||||
if ni.Kind == AliasNode {
|
||||
if ni.Alias != nil && ni.Alias.Kind != MappingNode {
|
||||
for i := len(n.children) - 1; i >= 0; i-- {
|
||||
ni := n.children[i]
|
||||
if ni.kind == aliasNode {
|
||||
an, ok := d.doc.anchors[ni.value]
|
||||
if ok && an.kind != mappingNode {
|
||||
failWantMap()
|
||||
}
|
||||
} else if ni.Kind != MappingNode {
|
||||
} else if ni.kind != mappingNode {
|
||||
failWantMap()
|
||||
}
|
||||
d.unmarshal(ni, out)
|
||||
@ -859,6 +770,6 @@ func (d *decoder) merge(n *Node, out reflect.Value) {
|
||||
}
|
||||
}
|
||||
|
||||
func isMerge(n *Node) bool {
|
||||
return n.Kind == ScalarNode && n.Value == "<<" && (n.Tag == "" || n.Tag == "!" || shortTag(n.Tag) == mergeTag)
|
||||
func isMerge(n *node) bool {
|
||||
return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG)
|
||||
}
|
320
vendor/gopkg.in/yaml.v3/emitterc.go → vendor/gopkg.in/yaml.v2/emitterc.go
generated
vendored
320
vendor/gopkg.in/yaml.v3/emitterc.go → vendor/gopkg.in/yaml.v2/emitterc.go
generated
vendored
@ -1,25 +1,3 @@
|
||||
//
|
||||
// Copyright (c) 2011-2019 Canonical Ltd
|
||||
// Copyright (c) 2006-2010 Kirill Simonov
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
// this software and associated documentation files (the "Software"), to deal in
|
||||
// the Software without restriction, including without limitation the rights to
|
||||
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
// of the Software, and to permit persons to whom the Software is furnished to do
|
||||
// so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package yaml
|
||||
|
||||
import (
|
||||
@ -65,13 +43,8 @@ func put_break(emitter *yaml_emitter_t) bool {
|
||||
default:
|
||||
panic("unknown line break setting")
|
||||
}
|
||||
if emitter.column == 0 {
|
||||
emitter.space_above = true
|
||||
}
|
||||
emitter.column = 0
|
||||
emitter.line++
|
||||
// [Go] Do this here and below and drop from everywhere else (see commented lines).
|
||||
emitter.indention = true
|
||||
return true
|
||||
}
|
||||
|
||||
@ -124,13 +97,8 @@ func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool {
|
||||
if !write(emitter, s, i) {
|
||||
return false
|
||||
}
|
||||
if emitter.column == 0 {
|
||||
emitter.space_above = true
|
||||
}
|
||||
emitter.column = 0
|
||||
emitter.line++
|
||||
// [Go] Do this here and above and drop from everywhere else (see commented lines).
|
||||
emitter.indention = true
|
||||
}
|
||||
return true
|
||||
}
|
||||
@ -260,22 +228,16 @@ func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bo
|
||||
return yaml_emitter_emit_document_end(emitter, event)
|
||||
|
||||
case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE:
|
||||
return yaml_emitter_emit_flow_sequence_item(emitter, event, true, false)
|
||||
|
||||
case yaml_EMIT_FLOW_SEQUENCE_TRAIL_ITEM_STATE:
|
||||
return yaml_emitter_emit_flow_sequence_item(emitter, event, false, true)
|
||||
return yaml_emitter_emit_flow_sequence_item(emitter, event, true)
|
||||
|
||||
case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE:
|
||||
return yaml_emitter_emit_flow_sequence_item(emitter, event, false, false)
|
||||
return yaml_emitter_emit_flow_sequence_item(emitter, event, false)
|
||||
|
||||
case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE:
|
||||
return yaml_emitter_emit_flow_mapping_key(emitter, event, true, false)
|
||||
|
||||
case yaml_EMIT_FLOW_MAPPING_TRAIL_KEY_STATE:
|
||||
return yaml_emitter_emit_flow_mapping_key(emitter, event, false, true)
|
||||
return yaml_emitter_emit_flow_mapping_key(emitter, event, true)
|
||||
|
||||
case yaml_EMIT_FLOW_MAPPING_KEY_STATE:
|
||||
return yaml_emitter_emit_flow_mapping_key(emitter, event, false, false)
|
||||
return yaml_emitter_emit_flow_mapping_key(emitter, event, false)
|
||||
|
||||
case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE:
|
||||
return yaml_emitter_emit_flow_mapping_value(emitter, event, true)
|
||||
@ -336,7 +298,6 @@ func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t
|
||||
emitter.column = 0
|
||||
emitter.whitespace = true
|
||||
emitter.indention = true
|
||||
emitter.space_above = true
|
||||
|
||||
if emitter.encoding != yaml_UTF8_ENCODING {
|
||||
if !yaml_emitter_write_bom(emitter) {
|
||||
@ -431,22 +392,13 @@ func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event
|
||||
if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) {
|
||||
return false
|
||||
}
|
||||
if emitter.canonical || true {
|
||||
if emitter.canonical {
|
||||
if !yaml_emitter_write_indent(emitter) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(emitter.head_comment) > 0 {
|
||||
if !yaml_emitter_process_head_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
if !put_break(emitter) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE
|
||||
return true
|
||||
}
|
||||
@ -473,20 +425,7 @@ func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event
|
||||
// Expect the root node.
|
||||
func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool {
|
||||
emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE)
|
||||
|
||||
if !yaml_emitter_process_head_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_emit_node(emitter, event, true, false, false, false) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_process_line_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_process_foot_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return yaml_emitter_emit_node(emitter, event, true, false, false, false)
|
||||
}
|
||||
|
||||
// Expect DOCUMENT-END.
|
||||
@ -497,14 +436,6 @@ func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t
|
||||
if !yaml_emitter_write_indent(emitter) {
|
||||
return false
|
||||
}
|
||||
if len(emitter.foot_comment) > 0 {
|
||||
if !put_break(emitter) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_process_foot_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if !event.implicit {
|
||||
// [Go] Allocate the slice elsewhere.
|
||||
if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) {
|
||||
@ -523,7 +454,7 @@ func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t
|
||||
}
|
||||
|
||||
// Expect a flow item node.
|
||||
func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first, trail bool) bool {
|
||||
func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
|
||||
if first {
|
||||
if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) {
|
||||
return false
|
||||
@ -549,62 +480,29 @@ func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_e
|
||||
if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_process_line_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_process_foot_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
emitter.state = emitter.states[len(emitter.states)-1]
|
||||
emitter.states = emitter.states[:len(emitter.states)-1]
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
if !first && !trail {
|
||||
if !first {
|
||||
if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if !yaml_emitter_process_head_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
if emitter.column == 0 {
|
||||
if !yaml_emitter_write_indent(emitter) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if emitter.canonical || emitter.column > emitter.best_width {
|
||||
if !yaml_emitter_write_indent(emitter) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if len(emitter.line_comment) > 0 || len(emitter.foot_comment) > 0 {
|
||||
emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_TRAIL_ITEM_STATE)
|
||||
} else {
|
||||
emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE)
|
||||
}
|
||||
if !yaml_emitter_emit_node(emitter, event, false, true, false, false) {
|
||||
return false
|
||||
}
|
||||
if len(emitter.line_comment) > 0 || len(emitter.foot_comment) > 0 {
|
||||
if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if !yaml_emitter_process_line_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_process_foot_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE)
|
||||
return yaml_emitter_emit_node(emitter, event, false, true, false, false)
|
||||
}
|
||||
|
||||
// Expect a flow key node.
|
||||
func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first, trail bool) bool {
|
||||
func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
|
||||
if first {
|
||||
if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) {
|
||||
return false
|
||||
@ -630,32 +528,16 @@ func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_eve
|
||||
if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_process_line_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_process_foot_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
emitter.state = emitter.states[len(emitter.states)-1]
|
||||
emitter.states = emitter.states[:len(emitter.states)-1]
|
||||
return true
|
||||
}
|
||||
|
||||
if !first && !trail {
|
||||
if !first {
|
||||
if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if !yaml_emitter_process_head_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
if emitter.column == 0 {
|
||||
if !yaml_emitter_write_indent(emitter) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if emitter.canonical || emitter.column > emitter.best_width {
|
||||
if !yaml_emitter_write_indent(emitter) {
|
||||
return false
|
||||
@ -689,26 +571,8 @@ func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_e
|
||||
return false
|
||||
}
|
||||
}
|
||||
if len(emitter.line_comment) > 0 || len(emitter.foot_comment) > 0 {
|
||||
emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_TRAIL_KEY_STATE)
|
||||
} else {
|
||||
emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE)
|
||||
}
|
||||
if !yaml_emitter_emit_node(emitter, event, false, false, true, false) {
|
||||
return false
|
||||
}
|
||||
if len(emitter.line_comment) > 0 || len(emitter.foot_comment) > 0 {
|
||||
if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if !yaml_emitter_process_line_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_process_foot_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE)
|
||||
return yaml_emitter_emit_node(emitter, event, false, false, true, false)
|
||||
}
|
||||
|
||||
// Expect a block item node.
|
||||
@ -725,9 +589,6 @@ func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_
|
||||
emitter.states = emitter.states[:len(emitter.states)-1]
|
||||
return true
|
||||
}
|
||||
if !yaml_emitter_process_head_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_write_indent(emitter) {
|
||||
return false
|
||||
}
|
||||
@ -735,16 +596,7 @@ func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_
|
||||
return false
|
||||
}
|
||||
emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE)
|
||||
if !yaml_emitter_emit_node(emitter, event, false, true, false, false) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_process_line_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_process_foot_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return yaml_emitter_emit_node(emitter, event, false, true, false, false)
|
||||
}
|
||||
|
||||
// Expect a block key node.
|
||||
@ -761,14 +613,9 @@ func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_ev
|
||||
emitter.states = emitter.states[:len(emitter.states)-1]
|
||||
return true
|
||||
}
|
||||
if !yaml_emitter_process_head_comment(emitter) {
|
||||
if !yaml_emitter_write_indent(emitter) {
|
||||
return false
|
||||
}
|
||||
if !first || emitter.states[len(emitter.states)-1] != yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE {
|
||||
if !yaml_emitter_write_indent(emitter) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if yaml_emitter_check_simple_key(emitter) {
|
||||
emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE)
|
||||
return yaml_emitter_emit_node(emitter, event, false, false, true, true)
|
||||
@ -795,16 +642,7 @@ func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_
|
||||
}
|
||||
}
|
||||
emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE)
|
||||
if !yaml_emitter_emit_node(emitter, event, false, false, true, false) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_process_line_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_process_foot_comment(emitter) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return yaml_emitter_emit_node(emitter, event, false, false, true, false)
|
||||
}
|
||||
|
||||
// Expect a node.
|
||||
@ -1070,68 +908,6 @@ func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool {
|
||||
panic("unknown scalar style")
|
||||
}
|
||||
|
||||
// Write a head comment.
|
||||
func yaml_emitter_process_head_comment(emitter *yaml_emitter_t) bool {
|
||||
if len(emitter.head_comment) == 0 {
|
||||
return true
|
||||
}
|
||||
space_above := emitter.space_above
|
||||
if !emitter.indention {
|
||||
if !put_break(emitter) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if !space_above &&
|
||||
emitter.state != yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE &&
|
||||
emitter.state != yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE &&
|
||||
emitter.state != yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE &&
|
||||
emitter.state != yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE {
|
||||
if !put_break(emitter) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if !yaml_emitter_write_indent(emitter) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_write_comment(emitter, emitter.head_comment) {
|
||||
return false
|
||||
}
|
||||
emitter.head_comment = emitter.head_comment[:0]
|
||||
return true
|
||||
}
|
||||
|
||||
// Write an line comment.
|
||||
func yaml_emitter_process_line_comment(emitter *yaml_emitter_t) bool {
|
||||
if len(emitter.line_comment) == 0 {
|
||||
return true
|
||||
}
|
||||
if !emitter.whitespace {
|
||||
if !put(emitter, ' ') {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if !yaml_emitter_write_comment(emitter, emitter.line_comment) {
|
||||
return false
|
||||
}
|
||||
emitter.line_comment = emitter.line_comment[:0]
|
||||
return true
|
||||
}
|
||||
|
||||
// Write a foot comment.
|
||||
func yaml_emitter_process_foot_comment(emitter *yaml_emitter_t) bool {
|
||||
if len(emitter.foot_comment) == 0 {
|
||||
return true
|
||||
}
|
||||
if !yaml_emitter_write_indent(emitter) {
|
||||
return false
|
||||
}
|
||||
if !yaml_emitter_write_comment(emitter, emitter.foot_comment) {
|
||||
return false
|
||||
}
|
||||
emitter.foot_comment = emitter.foot_comment[:0]
|
||||
return true
|
||||
}
|
||||
|
||||
// Check if a %YAML directive is valid.
|
||||
func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool {
|
||||
if version_directive.major != 1 || version_directive.minor != 1 {
|
||||
@ -1361,16 +1137,6 @@ func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bo
|
||||
emitter.tag_data.suffix = nil
|
||||
emitter.scalar_data.value = nil
|
||||
|
||||
if len(event.head_comment) > 0 {
|
||||
emitter.head_comment = event.head_comment
|
||||
}
|
||||
if len(event.line_comment) > 0 {
|
||||
emitter.line_comment = event.line_comment
|
||||
}
|
||||
if len(event.foot_comment) > 0 {
|
||||
emitter.foot_comment = event.foot_comment
|
||||
}
|
||||
|
||||
switch event.typ {
|
||||
case yaml_ALIAS_EVENT:
|
||||
if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) {
|
||||
@ -1448,8 +1214,7 @@ func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool {
|
||||
}
|
||||
}
|
||||
emitter.whitespace = true
|
||||
//emitter.indention = true
|
||||
emitter.space_above = false
|
||||
emitter.indention = true
|
||||
return true
|
||||
}
|
||||
|
||||
@ -1576,7 +1341,7 @@ func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allo
|
||||
if !write_break(emitter, value, &i) {
|
||||
return false
|
||||
}
|
||||
//emitter.indention = true
|
||||
emitter.indention = true
|
||||
breaks = true
|
||||
} else {
|
||||
if breaks {
|
||||
@ -1632,7 +1397,7 @@ func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []by
|
||||
if !write_break(emitter, value, &i) {
|
||||
return false
|
||||
}
|
||||
//emitter.indention = true
|
||||
emitter.indention = true
|
||||
breaks = true
|
||||
} else {
|
||||
if breaks {
|
||||
@ -1834,7 +1599,7 @@ func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bo
|
||||
if !put_break(emitter) {
|
||||
return false
|
||||
}
|
||||
//emitter.indention = true
|
||||
emitter.indention = true
|
||||
emitter.whitespace = true
|
||||
breaks := true
|
||||
for i := 0; i < len(value); {
|
||||
@ -1842,7 +1607,7 @@ func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bo
|
||||
if !write_break(emitter, value, &i) {
|
||||
return false
|
||||
}
|
||||
//emitter.indention = true
|
||||
emitter.indention = true
|
||||
breaks = true
|
||||
} else {
|
||||
if breaks {
|
||||
@ -1872,7 +1637,7 @@ func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) boo
|
||||
if !put_break(emitter) {
|
||||
return false
|
||||
}
|
||||
//emitter.indention = true
|
||||
emitter.indention = true
|
||||
emitter.whitespace = true
|
||||
|
||||
breaks := true
|
||||
@ -1893,7 +1658,7 @@ func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) boo
|
||||
if !write_break(emitter, value, &i) {
|
||||
return false
|
||||
}
|
||||
//emitter.indention = true
|
||||
emitter.indention = true
|
||||
breaks = true
|
||||
} else {
|
||||
if breaks {
|
||||
@ -1918,40 +1683,3 @@ func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) boo
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func yaml_emitter_write_comment(emitter *yaml_emitter_t, comment []byte) bool {
|
||||
breaks := false
|
||||
pound := false
|
||||
for i := 0; i < len(comment); {
|
||||
if is_break(comment, i) {
|
||||
if !write_break(emitter, comment, &i) {
|
||||
return false
|
||||
}
|
||||
//emitter.indention = true
|
||||
breaks = true
|
||||
pound = false
|
||||
} else {
|
||||
if breaks && !yaml_emitter_write_indent(emitter) {
|
||||
return false
|
||||
}
|
||||
if !pound {
|
||||
if comment[i] != '#' && (!put(emitter, '#') || !put(emitter, ' ')) {
|
||||
return false
|
||||
}
|
||||
pound = true
|
||||
}
|
||||
if !write(emitter, comment, &i) {
|
||||
return false
|
||||
}
|
||||
emitter.indention = false
|
||||
breaks = false
|
||||
}
|
||||
}
|
||||
if !breaks && !put_break(emitter) {
|
||||
return false
|
||||
}
|
||||
|
||||
emitter.whitespace = true
|
||||
//emitter.indention = true
|
||||
return true
|
||||
}
|
282
vendor/gopkg.in/yaml.v3/encode.go → vendor/gopkg.in/yaml.v2/encode.go
generated
vendored
282
vendor/gopkg.in/yaml.v3/encode.go → vendor/gopkg.in/yaml.v2/encode.go
generated
vendored
@ -1,18 +1,3 @@
|
||||
//
|
||||
// Copyright (c) 2011-2019 Canonical Ltd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package yaml
|
||||
|
||||
import (
|
||||
@ -29,11 +14,12 @@ import (
|
||||
)
|
||||
|
||||
type encoder struct {
|
||||
emitter yaml_emitter_t
|
||||
event yaml_event_t
|
||||
out []byte
|
||||
flow bool
|
||||
indent int
|
||||
emitter yaml_emitter_t
|
||||
event yaml_event_t
|
||||
out []byte
|
||||
flow bool
|
||||
// doneInit holds whether the initial stream_start_event has been
|
||||
// emitted.
|
||||
doneInit bool
|
||||
}
|
||||
|
||||
@ -57,10 +43,6 @@ func (e *encoder) init() {
|
||||
if e.doneInit {
|
||||
return
|
||||
}
|
||||
if e.indent == 0 {
|
||||
e.indent = 4
|
||||
}
|
||||
e.emitter.best_indent = e.indent
|
||||
yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING)
|
||||
e.emit()
|
||||
e.doneInit = true
|
||||
@ -93,43 +75,27 @@ func (e *encoder) must(ok bool) {
|
||||
|
||||
func (e *encoder) marshalDoc(tag string, in reflect.Value) {
|
||||
e.init()
|
||||
var node *Node
|
||||
if in.IsValid() {
|
||||
node, _ = in.Interface().(*Node)
|
||||
}
|
||||
if node != nil && node.Kind == DocumentNode {
|
||||
e.nodev(in)
|
||||
} else {
|
||||
yaml_document_start_event_initialize(&e.event, nil, nil, true)
|
||||
e.emit()
|
||||
e.marshal(tag, in)
|
||||
yaml_document_end_event_initialize(&e.event, true)
|
||||
e.emit()
|
||||
}
|
||||
yaml_document_start_event_initialize(&e.event, nil, nil, true)
|
||||
e.emit()
|
||||
e.marshal(tag, in)
|
||||
yaml_document_end_event_initialize(&e.event, true)
|
||||
e.emit()
|
||||
}
|
||||
|
||||
func (e *encoder) marshal(tag string, in reflect.Value) {
|
||||
tag = shortTag(tag)
|
||||
if !in.IsValid() || in.Kind() == reflect.Ptr && in.IsNil() {
|
||||
e.nilv()
|
||||
return
|
||||
}
|
||||
iface := in.Interface()
|
||||
switch value := iface.(type) {
|
||||
case *Node:
|
||||
e.nodev(in)
|
||||
return
|
||||
case time.Time:
|
||||
e.timev(tag, in)
|
||||
return
|
||||
case *time.Time:
|
||||
e.timev(tag, in.Elem())
|
||||
return
|
||||
case time.Duration:
|
||||
e.stringv(tag, reflect.ValueOf(value.String()))
|
||||
return
|
||||
switch m := iface.(type) {
|
||||
case time.Time, *time.Time:
|
||||
// Although time.Time implements TextMarshaler,
|
||||
// we don't want to treat it as a string for YAML
|
||||
// purposes because YAML has special support for
|
||||
// timestamps.
|
||||
case Marshaler:
|
||||
v, err := value.MarshalYAML()
|
||||
v, err := m.MarshalYAML()
|
||||
if err != nil {
|
||||
fail(err)
|
||||
}
|
||||
@ -137,10 +103,9 @@ func (e *encoder) marshal(tag string, in reflect.Value) {
|
||||
e.nilv()
|
||||
return
|
||||
}
|
||||
e.marshal(tag, reflect.ValueOf(v))
|
||||
return
|
||||
in = reflect.ValueOf(v)
|
||||
case encoding.TextMarshaler:
|
||||
text, err := value.MarshalText()
|
||||
text, err := m.MarshalText()
|
||||
if err != nil {
|
||||
fail(err)
|
||||
}
|
||||
@ -155,15 +120,31 @@ func (e *encoder) marshal(tag string, in reflect.Value) {
|
||||
case reflect.Map:
|
||||
e.mapv(tag, in)
|
||||
case reflect.Ptr:
|
||||
e.marshal(tag, in.Elem())
|
||||
if in.Type() == ptrTimeType {
|
||||
e.timev(tag, in.Elem())
|
||||
} else {
|
||||
e.marshal(tag, in.Elem())
|
||||
}
|
||||
case reflect.Struct:
|
||||
e.structv(tag, in)
|
||||
if in.Type() == timeType {
|
||||
e.timev(tag, in)
|
||||
} else {
|
||||
e.structv(tag, in)
|
||||
}
|
||||
case reflect.Slice, reflect.Array:
|
||||
e.slicev(tag, in)
|
||||
if in.Type().Elem() == mapItemType {
|
||||
e.itemsv(tag, in)
|
||||
} else {
|
||||
e.slicev(tag, in)
|
||||
}
|
||||
case reflect.String:
|
||||
e.stringv(tag, in)
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
e.intv(tag, in)
|
||||
if in.Type() == durationType {
|
||||
e.stringv(tag, reflect.ValueOf(iface.(time.Duration).String()))
|
||||
} else {
|
||||
e.intv(tag, in)
|
||||
}
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
e.uintv(tag, in)
|
||||
case reflect.Float32, reflect.Float64:
|
||||
@ -186,21 +167,14 @@ func (e *encoder) mapv(tag string, in reflect.Value) {
|
||||
})
|
||||
}
|
||||
|
||||
func (e *encoder) fieldByIndex(v reflect.Value, index []int) (field reflect.Value) {
|
||||
for _, num := range index {
|
||||
for {
|
||||
if v.Kind() == reflect.Ptr {
|
||||
if v.IsNil() {
|
||||
return reflect.Value{}
|
||||
}
|
||||
v = v.Elem()
|
||||
continue
|
||||
}
|
||||
break
|
||||
func (e *encoder) itemsv(tag string, in reflect.Value) {
|
||||
e.mappingv(tag, func() {
|
||||
slice := in.Convert(reflect.TypeOf([]MapItem{})).Interface().([]MapItem)
|
||||
for _, item := range slice {
|
||||
e.marshal("", reflect.ValueOf(item.Key))
|
||||
e.marshal("", reflect.ValueOf(item.Value))
|
||||
}
|
||||
v = v.Field(num)
|
||||
}
|
||||
return v
|
||||
})
|
||||
}
|
||||
|
||||
func (e *encoder) structv(tag string, in reflect.Value) {
|
||||
@ -214,10 +188,7 @@ func (e *encoder) structv(tag string, in reflect.Value) {
|
||||
if info.Inline == nil {
|
||||
value = in.Field(info.Num)
|
||||
} else {
|
||||
value = e.fieldByIndex(in, info.Inline)
|
||||
if !value.IsValid() {
|
||||
continue
|
||||
}
|
||||
value = in.FieldByIndex(info.Inline)
|
||||
}
|
||||
if info.OmitEmpty && isZero(value) {
|
||||
continue
|
||||
@ -234,7 +205,7 @@ func (e *encoder) structv(tag string, in reflect.Value) {
|
||||
sort.Sort(keys)
|
||||
for _, k := range keys {
|
||||
if _, found := sinfo.FieldsMap[k.String()]; found {
|
||||
panic(fmt.Sprintf("cannot have key %q in inlined map: conflicts with struct field", k.String()))
|
||||
panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", k.String()))
|
||||
}
|
||||
e.marshal("", k)
|
||||
e.flow = false
|
||||
@ -304,7 +275,7 @@ func (e *encoder) stringv(tag string, in reflect.Value) {
|
||||
canUsePlain := true
|
||||
switch {
|
||||
case !utf8.ValidString(s):
|
||||
if tag == binaryTag {
|
||||
if tag == yaml_BINARY_TAG {
|
||||
failf("explicitly tagged !!binary data must be base64-encoded")
|
||||
}
|
||||
if tag != "" {
|
||||
@ -312,14 +283,14 @@ func (e *encoder) stringv(tag string, in reflect.Value) {
|
||||
}
|
||||
// It can't be encoded directly as YAML so use a binary tag
|
||||
// and encode it as base64.
|
||||
tag = binaryTag
|
||||
tag = yaml_BINARY_TAG
|
||||
s = encodeBase64(s)
|
||||
case tag == "":
|
||||
// Check to see if it would resolve to a specific
|
||||
// tag when encoded unquoted. If it doesn't,
|
||||
// there's no need to quote it.
|
||||
rtag, _ := resolve("", s)
|
||||
canUsePlain = rtag == strTag && !isBase60Float(s)
|
||||
canUsePlain = rtag == yaml_STR_TAG && !isBase60Float(s)
|
||||
}
|
||||
// Note: it's possible for user code to emit invalid YAML
|
||||
// if they explicitly specify a tag and a string containing
|
||||
@ -332,7 +303,7 @@ func (e *encoder) stringv(tag string, in reflect.Value) {
|
||||
default:
|
||||
style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
|
||||
}
|
||||
e.emitScalar(s, "", tag, style, nil, nil, nil)
|
||||
e.emitScalar(s, "", tag, style)
|
||||
}
|
||||
|
||||
func (e *encoder) boolv(tag string, in reflect.Value) {
|
||||
@ -342,23 +313,23 @@ func (e *encoder) boolv(tag string, in reflect.Value) {
|
||||
} else {
|
||||
s = "false"
|
||||
}
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil)
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) intv(tag string, in reflect.Value) {
|
||||
s := strconv.FormatInt(in.Int(), 10)
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil)
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) uintv(tag string, in reflect.Value) {
|
||||
s := strconv.FormatUint(in.Uint(), 10)
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil)
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) timev(tag string, in reflect.Value) {
|
||||
t := in.Interface().(time.Time)
|
||||
s := t.Format(time.RFC3339Nano)
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil)
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) floatv(tag string, in reflect.Value) {
|
||||
@ -377,148 +348,15 @@ func (e *encoder) floatv(tag string, in reflect.Value) {
|
||||
case "NaN":
|
||||
s = ".nan"
|
||||
}
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil)
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) nilv() {
|
||||
e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE, nil, nil, nil)
|
||||
e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t, head, line, foot []byte) {
|
||||
// TODO Kill this function. Replace all initialize calls by their underlining Go literals.
|
||||
func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) {
|
||||
implicit := tag == ""
|
||||
if !implicit {
|
||||
tag = longTag(tag)
|
||||
}
|
||||
e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style))
|
||||
e.event.head_comment = head
|
||||
e.event.line_comment = line
|
||||
e.event.foot_comment = foot
|
||||
e.emit()
|
||||
}
|
||||
|
||||
func (e *encoder) nodev(in reflect.Value) {
|
||||
e.node(in.Interface().(*Node))
|
||||
}
|
||||
|
||||
func (e *encoder) node(node *Node) {
|
||||
// If the tag was not explicitly requested, and dropping it won't change the
|
||||
// implicit tag of the value, don't include it in the presentation.
|
||||
var tag = node.Tag
|
||||
var stag = shortTag(tag)
|
||||
var rtag string
|
||||
var forceQuoting bool
|
||||
if tag != "" && node.Style&TaggedStyle == 0 {
|
||||
if node.Kind == ScalarNode {
|
||||
if stag == strTag && node.Style&(SingleQuotedStyle|DoubleQuotedStyle|LiteralStyle|FoldedStyle) != 0 {
|
||||
tag = ""
|
||||
} else {
|
||||
rtag, _ = resolve("", node.Value)
|
||||
if rtag == stag {
|
||||
tag = ""
|
||||
} else if stag == strTag {
|
||||
tag = ""
|
||||
forceQuoting = true
|
||||
}
|
||||
}
|
||||
} else {
|
||||
switch node.Kind {
|
||||
case MappingNode:
|
||||
rtag = mapTag
|
||||
case SequenceNode:
|
||||
rtag = seqTag
|
||||
}
|
||||
if rtag == stag {
|
||||
tag = ""
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch node.Kind {
|
||||
case DocumentNode:
|
||||
yaml_document_start_event_initialize(&e.event, nil, nil, true)
|
||||
e.event.head_comment = []byte(node.HeadComment)
|
||||
e.emit()
|
||||
for _, node := range node.Content {
|
||||
e.node(node)
|
||||
}
|
||||
yaml_document_end_event_initialize(&e.event, true)
|
||||
e.event.foot_comment = []byte(node.FootComment)
|
||||
e.emit()
|
||||
|
||||
case SequenceNode:
|
||||
style := yaml_BLOCK_SEQUENCE_STYLE
|
||||
if node.Style&FlowStyle != 0 {
|
||||
style = yaml_FLOW_SEQUENCE_STYLE
|
||||
}
|
||||
e.must(yaml_sequence_start_event_initialize(&e.event, []byte(node.Anchor), []byte(tag), tag == "", style))
|
||||
e.event.head_comment = []byte(node.HeadComment)
|
||||
e.emit()
|
||||
for _, node := range node.Content {
|
||||
e.node(node)
|
||||
}
|
||||
e.must(yaml_sequence_end_event_initialize(&e.event))
|
||||
e.event.line_comment = []byte(node.LineComment)
|
||||
e.event.foot_comment = []byte(node.FootComment)
|
||||
e.emit()
|
||||
|
||||
case MappingNode:
|
||||
style := yaml_BLOCK_MAPPING_STYLE
|
||||
if node.Style&FlowStyle != 0 {
|
||||
style = yaml_FLOW_MAPPING_STYLE
|
||||
}
|
||||
yaml_mapping_start_event_initialize(&e.event, []byte(node.Anchor), []byte(tag), tag == "", style)
|
||||
e.event.head_comment = []byte(node.HeadComment)
|
||||
e.emit()
|
||||
|
||||
for i := 0; i+1 < len(node.Content); i += 2 {
|
||||
e.node(node.Content[i])
|
||||
e.node(node.Content[i+1])
|
||||
}
|
||||
|
||||
yaml_mapping_end_event_initialize(&e.event)
|
||||
e.event.line_comment = []byte(node.LineComment)
|
||||
e.event.foot_comment = []byte(node.FootComment)
|
||||
e.emit()
|
||||
|
||||
case AliasNode:
|
||||
yaml_alias_event_initialize(&e.event, []byte(node.Value))
|
||||
e.event.head_comment = []byte(node.HeadComment)
|
||||
e.event.line_comment = []byte(node.LineComment)
|
||||
e.event.foot_comment = []byte(node.FootComment)
|
||||
e.emit()
|
||||
|
||||
case ScalarNode:
|
||||
value := node.Value
|
||||
if !utf8.ValidString(value) {
|
||||
if tag == binaryTag {
|
||||
failf("explicitly tagged !!binary data must be base64-encoded")
|
||||
}
|
||||
if tag != "" {
|
||||
failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag))
|
||||
}
|
||||
// It can't be encoded directly as YAML so use a binary tag
|
||||
// and encode it as base64.
|
||||
tag = binaryTag
|
||||
value = encodeBase64(value)
|
||||
}
|
||||
|
||||
style := yaml_PLAIN_SCALAR_STYLE
|
||||
switch {
|
||||
case node.Style&DoubleQuotedStyle != 0:
|
||||
style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
|
||||
case node.Style&SingleQuotedStyle != 0:
|
||||
style = yaml_SINGLE_QUOTED_SCALAR_STYLE
|
||||
case node.Style&LiteralStyle != 0:
|
||||
style = yaml_LITERAL_SCALAR_STYLE
|
||||
case node.Style&FoldedStyle != 0:
|
||||
style = yaml_FOLDED_SCALAR_STYLE
|
||||
case strings.Contains(value, "\n"):
|
||||
style = yaml_LITERAL_SCALAR_STYLE
|
||||
case forceQuoting:
|
||||
style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
|
||||
}
|
||||
|
||||
e.emitScalar(value, node.Anchor, tag, style, []byte(node.HeadComment), []byte(node.LineComment), []byte(node.FootComment))
|
||||
}
|
||||
}
|
2
vendor/gopkg.in/yaml.v3/go.mod → vendor/gopkg.in/yaml.v2/go.mod
generated
vendored
2
vendor/gopkg.in/yaml.v3/go.mod → vendor/gopkg.in/yaml.v2/go.mod
generated
vendored
@ -1,4 +1,4 @@
|
||||
module "gopkg.in/yaml.v3"
|
||||
module "gopkg.in/yaml.v2"
|
||||
|
||||
require (
|
||||
"gopkg.in/check.v1" v0.0.0-20161208181325-20d25e280405
|
95
vendor/gopkg.in/yaml.v3/parserc.go → vendor/gopkg.in/yaml.v2/parserc.go
generated
vendored
95
vendor/gopkg.in/yaml.v3/parserc.go → vendor/gopkg.in/yaml.v2/parserc.go
generated
vendored
@ -1,25 +1,3 @@
|
||||
//
|
||||
// Copyright (c) 2011-2019 Canonical Ltd
|
||||
// Copyright (c) 2006-2010 Kirill Simonov
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
// this software and associated documentation files (the "Software"), to deal in
|
||||
// the Software without restriction, including without limitation the rights to
|
||||
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
// of the Software, and to permit persons to whom the Software is furnished to do
|
||||
// so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package yaml
|
||||
|
||||
import (
|
||||
@ -67,42 +45,11 @@ import (
|
||||
// Peek the next token in the token queue.
|
||||
func peek_token(parser *yaml_parser_t) *yaml_token_t {
|
||||
if parser.token_available || yaml_parser_fetch_more_tokens(parser) {
|
||||
token := &parser.tokens[parser.tokens_head]
|
||||
yaml_parser_unfold_comments(parser, token)
|
||||
return token
|
||||
return &parser.tokens[parser.tokens_head]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// yaml_parser_unfold_comments walks through the comments queue and joins all
|
||||
// comments behind the position of the provided token into the respective
|
||||
// top-level comment slices in the parser.
|
||||
func yaml_parser_unfold_comments(parser *yaml_parser_t, token *yaml_token_t) {
|
||||
for parser.comments_head < len(parser.comments) && token.start_mark.index >= parser.comments[parser.comments_head].after.index {
|
||||
comment := &parser.comments[parser.comments_head]
|
||||
if len(comment.head) > 0 {
|
||||
if len(parser.head_comment) > 0 {
|
||||
parser.head_comment = append(parser.head_comment, '\n')
|
||||
}
|
||||
parser.head_comment = append(parser.head_comment, comment.head...)
|
||||
}
|
||||
if len(comment.foot) > 0 {
|
||||
if len(parser.foot_comment) > 0 {
|
||||
parser.foot_comment = append(parser.foot_comment, '\n')
|
||||
}
|
||||
parser.foot_comment = append(parser.foot_comment, comment.foot...)
|
||||
}
|
||||
if len(comment.line) > 0 {
|
||||
if len(parser.line_comment) > 0 {
|
||||
parser.line_comment = append(parser.line_comment, '\n')
|
||||
}
|
||||
parser.line_comment = append(parser.line_comment, comment.line...)
|
||||
}
|
||||
*comment = yaml_comment_t{}
|
||||
parser.comments_head++
|
||||
}
|
||||
}
|
||||
|
||||
// Remove the next token from the queue (must be called after peek_token).
|
||||
func skip_token(parser *yaml_parser_t) {
|
||||
parser.token_available = false
|
||||
@ -277,32 +224,10 @@ func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t
|
||||
parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE)
|
||||
parser.state = yaml_PARSE_BLOCK_NODE_STATE
|
||||
|
||||
var head_comment []byte
|
||||
if len(parser.head_comment) > 0 {
|
||||
// [Go] Scan the header comment backwards, and if an empty line is found, break
|
||||
// the header so the part before the last empty line goes into the
|
||||
// document header, while the bottom of it goes into a follow up event.
|
||||
for i := len(parser.head_comment) - 1; i > 0; i-- {
|
||||
if parser.head_comment[i] == '\n' {
|
||||
if i == len(parser.head_comment)-1 {
|
||||
head_comment = parser.head_comment[:i]
|
||||
parser.head_comment = parser.head_comment[i+1:]
|
||||
break
|
||||
} else if parser.head_comment[i-1] == '\n' {
|
||||
head_comment = parser.head_comment[:i-1]
|
||||
parser.head_comment = parser.head_comment[i+1:]
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_DOCUMENT_START_EVENT,
|
||||
start_mark: token.start_mark,
|
||||
end_mark: token.end_mark,
|
||||
|
||||
head_comment: head_comment,
|
||||
}
|
||||
|
||||
} else if token.typ != yaml_STREAM_END_TOKEN {
|
||||
@ -401,22 +326,10 @@ func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t)
|
||||
start_mark: start_mark,
|
||||
end_mark: end_mark,
|
||||
implicit: implicit,
|
||||
|
||||
foot_comment: parser.head_comment,
|
||||
}
|
||||
parser.head_comment = nil
|
||||
return true
|
||||
}
|
||||
|
||||
func yaml_parser_set_event_comments(parser *yaml_parser_t, event *yaml_event_t) {
|
||||
event.head_comment = parser.head_comment
|
||||
event.line_comment = parser.line_comment
|
||||
event.foot_comment = parser.foot_comment
|
||||
parser.head_comment = nil
|
||||
parser.line_comment = nil
|
||||
parser.foot_comment = nil
|
||||
}
|
||||
|
||||
// Parse the productions:
|
||||
// block_node_or_indentless_sequence ::=
|
||||
// ALIAS
|
||||
@ -460,7 +373,6 @@ func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, i
|
||||
end_mark: token.end_mark,
|
||||
anchor: token.value,
|
||||
}
|
||||
yaml_parser_set_event_comments(parser, event)
|
||||
skip_token(parser)
|
||||
return true
|
||||
}
|
||||
@ -574,7 +486,6 @@ func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, i
|
||||
quoted_implicit: quoted_implicit,
|
||||
style: yaml_style_t(token.style),
|
||||
}
|
||||
yaml_parser_set_event_comments(parser, event)
|
||||
skip_token(parser)
|
||||
return true
|
||||
}
|
||||
@ -591,7 +502,6 @@ func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, i
|
||||
implicit: implicit,
|
||||
style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE),
|
||||
}
|
||||
yaml_parser_set_event_comments(parser, event)
|
||||
return true
|
||||
}
|
||||
if token.typ == yaml_FLOW_MAPPING_START_TOKEN {
|
||||
@ -606,7 +516,6 @@ func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, i
|
||||
implicit: implicit,
|
||||
style: yaml_style_t(yaml_FLOW_MAPPING_STYLE),
|
||||
}
|
||||
yaml_parser_set_event_comments(parser, event)
|
||||
return true
|
||||
}
|
||||
if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN {
|
||||
@ -911,7 +820,6 @@ func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_ev
|
||||
start_mark: token.start_mark,
|
||||
end_mark: token.end_mark,
|
||||
}
|
||||
yaml_parser_set_event_comments(parser, event)
|
||||
|
||||
skip_token(parser)
|
||||
return true
|
||||
@ -1051,7 +959,6 @@ func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event
|
||||
start_mark: token.start_mark,
|
||||
end_mark: token.end_mark,
|
||||
}
|
||||
yaml_parser_set_event_comments(parser, event)
|
||||
skip_token(parser)
|
||||
return true
|
||||
}
|
24
vendor/gopkg.in/yaml.v3/readerc.go → vendor/gopkg.in/yaml.v2/readerc.go
generated
vendored
24
vendor/gopkg.in/yaml.v3/readerc.go → vendor/gopkg.in/yaml.v2/readerc.go
generated
vendored
@ -1,25 +1,3 @@
|
||||
//
|
||||
// Copyright (c) 2011-2019 Canonical Ltd
|
||||
// Copyright (c) 2006-2010 Kirill Simonov
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
// this software and associated documentation files (the "Software"), to deal in
|
||||
// the Software without restriction, including without limitation the rights to
|
||||
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
// of the Software, and to permit persons to whom the Software is furnished to do
|
||||
// so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package yaml
|
||||
|
||||
import (
|
||||
@ -117,7 +95,7 @@ func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool {
|
||||
|
||||
// [Go] This function was changed to guarantee the requested length size at EOF.
|
||||
// The fact we need to do this is pretty awful, but the description above implies
|
||||
// for that to be the case, and there are tests
|
||||
// for that to be the case, and there are tests
|
||||
|
||||
// If the EOF flag is set and the raw buffer is empty, do nothing.
|
||||
if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) {
|
140
vendor/gopkg.in/yaml.v3/resolve.go → vendor/gopkg.in/yaml.v2/resolve.go
generated
vendored
140
vendor/gopkg.in/yaml.v3/resolve.go → vendor/gopkg.in/yaml.v2/resolve.go
generated
vendored
@ -1,18 +1,3 @@
|
||||
//
|
||||
// Copyright (c) 2011-2019 Canonical Ltd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package yaml
|
||||
|
||||
import (
|
||||
@ -49,14 +34,18 @@ func init() {
|
||||
tag string
|
||||
l []string
|
||||
}{
|
||||
{true, boolTag, []string{"true", "True", "TRUE"}},
|
||||
{false, boolTag, []string{"false", "False", "FALSE"}},
|
||||
{nil, nullTag, []string{"", "~", "null", "Null", "NULL"}},
|
||||
{math.NaN(), floatTag, []string{".nan", ".NaN", ".NAN"}},
|
||||
{math.Inf(+1), floatTag, []string{".inf", ".Inf", ".INF"}},
|
||||
{math.Inf(+1), floatTag, []string{"+.inf", "+.Inf", "+.INF"}},
|
||||
{math.Inf(-1), floatTag, []string{"-.inf", "-.Inf", "-.INF"}},
|
||||
{"<<", mergeTag, []string{"<<"}},
|
||||
{true, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}},
|
||||
{true, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}},
|
||||
{true, yaml_BOOL_TAG, []string{"on", "On", "ON"}},
|
||||
{false, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}},
|
||||
{false, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}},
|
||||
{false, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}},
|
||||
{nil, yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}},
|
||||
{math.NaN(), yaml_FLOAT_TAG, []string{".nan", ".NaN", ".NAN"}},
|
||||
{math.Inf(+1), yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}},
|
||||
{math.Inf(+1), yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}},
|
||||
{math.Inf(-1), yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}},
|
||||
{"<<", yaml_MERGE_TAG, []string{"<<"}},
|
||||
}
|
||||
|
||||
m := resolveMap
|
||||
@ -67,37 +56,11 @@ func init() {
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
nullTag = "!!null"
|
||||
boolTag = "!!bool"
|
||||
strTag = "!!str"
|
||||
intTag = "!!int"
|
||||
floatTag = "!!float"
|
||||
timestampTag = "!!timestamp"
|
||||
seqTag = "!!seq"
|
||||
mapTag = "!!map"
|
||||
binaryTag = "!!binary"
|
||||
mergeTag = "!!merge"
|
||||
)
|
||||
|
||||
var longTags = make(map[string]string)
|
||||
var shortTags = make(map[string]string)
|
||||
|
||||
func init() {
|
||||
for _, stag := range []string{nullTag, boolTag, strTag, intTag, floatTag, timestampTag, seqTag, mapTag, binaryTag, mergeTag} {
|
||||
ltag := longTag(stag)
|
||||
longTags[stag] = ltag
|
||||
shortTags[ltag] = stag
|
||||
}
|
||||
}
|
||||
|
||||
const longTagPrefix = "tag:yaml.org,2002:"
|
||||
|
||||
func shortTag(tag string) string {
|
||||
// TODO This can easily be made faster and produce less garbage.
|
||||
if strings.HasPrefix(tag, longTagPrefix) {
|
||||
if stag, ok := shortTags[tag]; ok {
|
||||
return stag
|
||||
}
|
||||
return "!!" + tag[len(longTagPrefix):]
|
||||
}
|
||||
return tag
|
||||
@ -105,9 +68,6 @@ func shortTag(tag string) string {
|
||||
|
||||
func longTag(tag string) string {
|
||||
if strings.HasPrefix(tag, "!!") {
|
||||
if ltag, ok := longTags[tag]; ok {
|
||||
return ltag
|
||||
}
|
||||
return longTagPrefix + tag[2:]
|
||||
}
|
||||
return tag
|
||||
@ -115,33 +75,32 @@ func longTag(tag string) string {
|
||||
|
||||
func resolvableTag(tag string) bool {
|
||||
switch tag {
|
||||
case "", strTag, boolTag, intTag, floatTag, nullTag, timestampTag:
|
||||
case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG, yaml_TIMESTAMP_TAG:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var yamlStyleFloat = regexp.MustCompile(`^[-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?$`)
|
||||
var yamlStyleFloat = regexp.MustCompile(`^[-+]?[0-9]*\.?[0-9]+([eE][-+][0-9]+)?$`)
|
||||
|
||||
func resolve(tag string, in string) (rtag string, out interface{}) {
|
||||
tag = shortTag(tag)
|
||||
if !resolvableTag(tag) {
|
||||
return tag, in
|
||||
}
|
||||
|
||||
defer func() {
|
||||
switch tag {
|
||||
case "", rtag, strTag, binaryTag:
|
||||
case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG:
|
||||
return
|
||||
case floatTag:
|
||||
if rtag == intTag {
|
||||
case yaml_FLOAT_TAG:
|
||||
if rtag == yaml_INT_TAG {
|
||||
switch v := out.(type) {
|
||||
case int64:
|
||||
rtag = floatTag
|
||||
rtag = yaml_FLOAT_TAG
|
||||
out = float64(v)
|
||||
return
|
||||
case int:
|
||||
rtag = floatTag
|
||||
rtag = yaml_FLOAT_TAG
|
||||
out = float64(v)
|
||||
return
|
||||
}
|
||||
@ -156,7 +115,7 @@ func resolve(tag string, in string) (rtag string, out interface{}) {
|
||||
if in != "" {
|
||||
hint = resolveTable[in[0]]
|
||||
}
|
||||
if hint != 0 && tag != strTag && tag != binaryTag {
|
||||
if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG {
|
||||
// Handle things we can lookup in a map.
|
||||
if item, ok := resolveMap[in]; ok {
|
||||
return item.tag, item.value
|
||||
@ -174,17 +133,17 @@ func resolve(tag string, in string) (rtag string, out interface{}) {
|
||||
// Not in the map, so maybe a normal float.
|
||||
floatv, err := strconv.ParseFloat(in, 64)
|
||||
if err == nil {
|
||||
return floatTag, floatv
|
||||
return yaml_FLOAT_TAG, floatv
|
||||
}
|
||||
|
||||
case 'D', 'S':
|
||||
// Int, float, or timestamp.
|
||||
// Only try values as a timestamp if the value is unquoted or there's an explicit
|
||||
// !!timestamp tag.
|
||||
if tag == "" || tag == timestampTag {
|
||||
if tag == "" || tag == yaml_TIMESTAMP_TAG {
|
||||
t, ok := parseTimestamp(in)
|
||||
if ok {
|
||||
return timestampTag, t
|
||||
return yaml_TIMESTAMP_TAG, t
|
||||
}
|
||||
}
|
||||
|
||||
@ -192,76 +151,49 @@ func resolve(tag string, in string) (rtag string, out interface{}) {
|
||||
intv, err := strconv.ParseInt(plain, 0, 64)
|
||||
if err == nil {
|
||||
if intv == int64(int(intv)) {
|
||||
return intTag, int(intv)
|
||||
return yaml_INT_TAG, int(intv)
|
||||
} else {
|
||||
return intTag, intv
|
||||
return yaml_INT_TAG, intv
|
||||
}
|
||||
}
|
||||
uintv, err := strconv.ParseUint(plain, 0, 64)
|
||||
if err == nil {
|
||||
return intTag, uintv
|
||||
return yaml_INT_TAG, uintv
|
||||
}
|
||||
if yamlStyleFloat.MatchString(plain) {
|
||||
floatv, err := strconv.ParseFloat(plain, 64)
|
||||
if err == nil {
|
||||
return floatTag, floatv
|
||||
return yaml_FLOAT_TAG, floatv
|
||||
}
|
||||
}
|
||||
if strings.HasPrefix(plain, "0b") {
|
||||
intv, err := strconv.ParseInt(plain[2:], 2, 64)
|
||||
if err == nil {
|
||||
if intv == int64(int(intv)) {
|
||||
return intTag, int(intv)
|
||||
return yaml_INT_TAG, int(intv)
|
||||
} else {
|
||||
return intTag, intv
|
||||
return yaml_INT_TAG, intv
|
||||
}
|
||||
}
|
||||
uintv, err := strconv.ParseUint(plain[2:], 2, 64)
|
||||
if err == nil {
|
||||
return intTag, uintv
|
||||
return yaml_INT_TAG, uintv
|
||||
}
|
||||
} else if strings.HasPrefix(plain, "-0b") {
|
||||
intv, err := strconv.ParseInt("-"+plain[3:], 2, 64)
|
||||
intv, err := strconv.ParseInt("-" + plain[3:], 2, 64)
|
||||
if err == nil {
|
||||
if true || intv == int64(int(intv)) {
|
||||
return intTag, int(intv)
|
||||
return yaml_INT_TAG, int(intv)
|
||||
} else {
|
||||
return intTag, intv
|
||||
}
|
||||
}
|
||||
}
|
||||
// Octals as introduced in version 1.2 of the spec.
|
||||
// Octals from the 1.1 spec, spelled as 0777, are still
|
||||
// decoded by default in v3 as well for compatibility.
|
||||
// May be dropped in v4 depending on how usage evolves.
|
||||
if strings.HasPrefix(plain, "0o") {
|
||||
intv, err := strconv.ParseInt(plain[2:], 8, 64)
|
||||
if err == nil {
|
||||
if intv == int64(int(intv)) {
|
||||
return intTag, int(intv)
|
||||
} else {
|
||||
return intTag, intv
|
||||
}
|
||||
}
|
||||
uintv, err := strconv.ParseUint(plain[2:], 8, 64)
|
||||
if err == nil {
|
||||
return intTag, uintv
|
||||
}
|
||||
} else if strings.HasPrefix(plain, "-0o") {
|
||||
intv, err := strconv.ParseInt("-"+plain[3:], 8, 64)
|
||||
if err == nil {
|
||||
if true || intv == int64(int(intv)) {
|
||||
return intTag, int(intv)
|
||||
} else {
|
||||
return intTag, intv
|
||||
return yaml_INT_TAG, intv
|
||||
}
|
||||
}
|
||||
}
|
||||
default:
|
||||
panic("internal error: missing handler for resolver table: " + string(rune(hint)) + " (with " + in + ")")
|
||||
panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")")
|
||||
}
|
||||
}
|
||||
return strTag, in
|
||||
return yaml_STR_TAG, in
|
||||
}
|
||||
|
||||
// encodeBase64 encodes s as base64 that is broken up into multiple lines
|
229
vendor/gopkg.in/yaml.v3/scannerc.go → vendor/gopkg.in/yaml.v2/scannerc.go
generated
vendored
229
vendor/gopkg.in/yaml.v3/scannerc.go → vendor/gopkg.in/yaml.v2/scannerc.go
generated
vendored
@ -1,25 +1,3 @@
|
||||
//
|
||||
// Copyright (c) 2011-2019 Canonical Ltd
|
||||
// Copyright (c) 2006-2010 Kirill Simonov
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
// this software and associated documentation files (the "Software"), to deal in
|
||||
// the Software without restriction, including without limitation the rights to
|
||||
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
// of the Software, and to permit persons to whom the Software is furnished to do
|
||||
// so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package yaml
|
||||
|
||||
import (
|
||||
@ -651,11 +629,8 @@ func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool {
|
||||
// Check if we really need to fetch more tokens.
|
||||
need_more_tokens := false
|
||||
|
||||
// [Go] When parsing flow items, force the queue to have at least
|
||||
// two items so that comments after commas may be associated
|
||||
// with the value being parsed before them.
|
||||
if parser.tokens_head == len(parser.tokens) || parser.flow_level > 0 && parser.tokens_head >= len(parser.tokens)-1 {
|
||||
// Queue is empty or has just one element inside a flow context.
|
||||
if parser.tokens_head == len(parser.tokens) {
|
||||
// Queue is empty.
|
||||
need_more_tokens = true
|
||||
} else {
|
||||
// Check if any potential simple key may occupy the head position.
|
||||
@ -687,7 +662,7 @@ func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool {
|
||||
}
|
||||
|
||||
// The dispatcher for token fetchers.
|
||||
func yaml_parser_fetch_next_token(parser *yaml_parser_t) (ok bool) {
|
||||
func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool {
|
||||
// Ensure that the buffer is initialized.
|
||||
if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
|
||||
return false
|
||||
@ -742,25 +717,6 @@ func yaml_parser_fetch_next_token(parser *yaml_parser_t) (ok bool) {
|
||||
return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_END_TOKEN)
|
||||
}
|
||||
|
||||
comment_mark := parser.mark
|
||||
if parser.flow_level > 0 && buf[pos] == ',' && len(parser.tokens) > 0 {
|
||||
// Associate any following comments with the prior token.
|
||||
comment_mark = parser.tokens[len(parser.tokens)-1].start_mark
|
||||
}
|
||||
defer func() {
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
if !yaml_parser_scan_line_comment(parser, comment_mark) {
|
||||
ok = false
|
||||
return
|
||||
}
|
||||
if !yaml_parser_scan_foot_comment(parser, comment_mark) {
|
||||
ok = false
|
||||
return
|
||||
}
|
||||
}()
|
||||
|
||||
// Is it the flow sequence start indicator?
|
||||
if buf[pos] == '[' {
|
||||
return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_SEQUENCE_START_TOKEN)
|
||||
@ -854,7 +810,7 @@ func yaml_parser_fetch_next_token(parser *yaml_parser_t) (ok bool) {
|
||||
// if it is followed by a non-space character.
|
||||
//
|
||||
// The last rule is more restrictive than the specification requires.
|
||||
// [Go] TODO Make this logic more reasonable.
|
||||
// [Go] Make this logic more reasonable.
|
||||
//switch parser.buffer[parser.buffer_pos] {
|
||||
//case '-', '?', ':', ',', '?', '-', ',', ':', ']', '[', '}', '{', '&', '#', '!', '*', '>', '|', '"', '\'', '@', '%', '-', '`':
|
||||
//}
|
||||
@ -1141,7 +1097,6 @@ func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, typ yaml_token_
|
||||
|
||||
// Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
|
||||
func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, typ yaml_token_type_t) bool {
|
||||
|
||||
// The indicators '[' and '{' may start a simple key.
|
||||
if !yaml_parser_save_simple_key(parser) {
|
||||
return false
|
||||
@ -1500,8 +1455,11 @@ func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool {
|
||||
|
||||
// Eat a comment until a line break.
|
||||
if parser.buffer[parser.buffer_pos] == '#' {
|
||||
if !yaml_parser_scan_head_comment(parser, parser.mark) {
|
||||
return false
|
||||
for !is_breakz(parser.buffer, parser.buffer_pos) {
|
||||
skip(parser)
|
||||
if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1599,10 +1557,6 @@ func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool
|
||||
}
|
||||
|
||||
if parser.buffer[parser.buffer_pos] == '#' {
|
||||
// [Go] Discard this inline comment for the time being.
|
||||
//if !yaml_parser_scan_line_comment(parser, start_mark) {
|
||||
// return false
|
||||
//}
|
||||
for !is_breakz(parser.buffer, parser.buffer_pos) {
|
||||
skip(parser)
|
||||
if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
|
||||
@ -2018,7 +1972,7 @@ func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte
|
||||
// '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&',
|
||||
// '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']',
|
||||
// '%'.
|
||||
// [Go] TODO Convert this into more reasonable logic.
|
||||
// [Go] Convert this into more reasonable logic.
|
||||
for is_alpha(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == ';' ||
|
||||
parser.buffer[parser.buffer_pos] == '/' || parser.buffer[parser.buffer_pos] == '?' ||
|
||||
parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == '@' ||
|
||||
@ -2173,8 +2127,11 @@ func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, l
|
||||
}
|
||||
}
|
||||
if parser.buffer[parser.buffer_pos] == '#' {
|
||||
if !yaml_parser_scan_line_comment(parser, start_mark) {
|
||||
return false
|
||||
for !is_breakz(parser.buffer, parser.buffer_pos) {
|
||||
skip(parser)
|
||||
if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2737,159 +2694,3 @@ func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) b
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func yaml_parser_scan_line_comment(parser *yaml_parser_t, after yaml_mark_t) bool {
|
||||
if parser.mark.column == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
parser.comments = append(parser.comments, yaml_comment_t{after: after})
|
||||
comment := &parser.comments[len(parser.comments)-1].line
|
||||
|
||||
for peek := 0; peek < 512; peek++ {
|
||||
if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) {
|
||||
break
|
||||
}
|
||||
if is_blank(parser.buffer, parser.buffer_pos+peek) {
|
||||
continue
|
||||
}
|
||||
if parser.buffer[parser.buffer_pos+peek] == '#' {
|
||||
if len(*comment) > 0 {
|
||||
*comment = append(*comment, '\n')
|
||||
}
|
||||
for !is_breakz(parser.buffer, parser.buffer_pos+peek) {
|
||||
*comment = append(*comment, parser.buffer[parser.buffer_pos+peek])
|
||||
peek++
|
||||
if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Skip until after the consumed comment line.
|
||||
until := parser.buffer_pos + peek
|
||||
for parser.buffer_pos < until {
|
||||
if is_break(parser.buffer, parser.buffer_pos) {
|
||||
//break // Leave the break in the buffer so calling this function twice is safe.
|
||||
if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
|
||||
return false
|
||||
}
|
||||
skip_line(parser)
|
||||
} else {
|
||||
skip(parser)
|
||||
}
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func yaml_parser_scan_head_comment(parser *yaml_parser_t, after yaml_mark_t) bool {
|
||||
parser.comments = append(parser.comments, yaml_comment_t{after: after})
|
||||
comment := &parser.comments[len(parser.comments)-1].head
|
||||
breaks := false
|
||||
for peek := 0; peek < 512; peek++ {
|
||||
if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) {
|
||||
break
|
||||
}
|
||||
if parser.buffer[parser.buffer_pos+peek] == 0 {
|
||||
break
|
||||
}
|
||||
if is_blank(parser.buffer, parser.buffer_pos+peek) {
|
||||
continue
|
||||
}
|
||||
if is_break(parser.buffer, parser.buffer_pos+peek) {
|
||||
if !breaks {
|
||||
*comment = append(*comment, '\n')
|
||||
}
|
||||
breaks = true
|
||||
} else if parser.buffer[parser.buffer_pos+peek] == '#' {
|
||||
if len(*comment) > 0 {
|
||||
*comment = append(*comment, '\n')
|
||||
}
|
||||
breaks = false
|
||||
for !is_breakz(parser.buffer, parser.buffer_pos+peek) {
|
||||
*comment = append(*comment, parser.buffer[parser.buffer_pos+peek])
|
||||
peek++
|
||||
if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Skip until after the consumed comment line.
|
||||
until := parser.buffer_pos + peek
|
||||
for parser.buffer_pos < until {
|
||||
if is_break(parser.buffer, parser.buffer_pos) {
|
||||
if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
|
||||
return false
|
||||
}
|
||||
skip_line(parser)
|
||||
} else {
|
||||
skip(parser)
|
||||
}
|
||||
}
|
||||
peek = 0
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func yaml_parser_scan_foot_comment(parser *yaml_parser_t, after yaml_mark_t) bool {
|
||||
parser.comments = append(parser.comments, yaml_comment_t{after: after})
|
||||
comment := &parser.comments[len(parser.comments)-1].foot
|
||||
original := *comment
|
||||
breaks := false
|
||||
peek := 0
|
||||
for ; peek < 32768; peek++ {
|
||||
if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) {
|
||||
break
|
||||
}
|
||||
c := parser.buffer[parser.buffer_pos+peek]
|
||||
if c == 0 {
|
||||
break
|
||||
}
|
||||
if is_blank(parser.buffer, parser.buffer_pos+peek) {
|
||||
continue
|
||||
}
|
||||
if is_break(parser.buffer, parser.buffer_pos+peek) {
|
||||
if breaks {
|
||||
break
|
||||
}
|
||||
breaks = true
|
||||
} else if c == '#' {
|
||||
if len(*comment) > 0 {
|
||||
*comment = append(*comment, '\n')
|
||||
}
|
||||
for !is_breakz(parser.buffer, parser.buffer_pos+peek) {
|
||||
*comment = append(*comment, parser.buffer[parser.buffer_pos+peek])
|
||||
peek++
|
||||
if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
breaks = true
|
||||
} else if c == ']' || c == '}' {
|
||||
break
|
||||
} else {
|
||||
// Abort and allow that next line to have the comment as its header.
|
||||
*comment = original
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Skip until after the consumed comment lines.
|
||||
until := parser.buffer_pos + peek
|
||||
for parser.buffer_pos < until {
|
||||
if is_break(parser.buffer, parser.buffer_pos) {
|
||||
if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
|
||||
return false
|
||||
}
|
||||
skip_line(parser)
|
||||
} else {
|
||||
skip(parser)
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
25
vendor/gopkg.in/yaml.v3/sorter.go → vendor/gopkg.in/yaml.v2/sorter.go
generated
vendored
25
vendor/gopkg.in/yaml.v3/sorter.go → vendor/gopkg.in/yaml.v2/sorter.go
generated
vendored
@ -1,18 +1,3 @@
|
||||
//
|
||||
// Copyright (c) 2011-2019 Canonical Ltd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package yaml
|
||||
|
||||
import (
|
||||
@ -52,10 +37,8 @@ func (l keyList) Less(i, j int) bool {
|
||||
return ak < bk
|
||||
}
|
||||
ar, br := []rune(a.String()), []rune(b.String())
|
||||
digits := false
|
||||
for i := 0; i < len(ar) && i < len(br); i++ {
|
||||
if ar[i] == br[i] {
|
||||
digits = unicode.IsDigit(ar[i])
|
||||
continue
|
||||
}
|
||||
al := unicode.IsLetter(ar[i])
|
||||
@ -64,16 +47,12 @@ func (l keyList) Less(i, j int) bool {
|
||||
return ar[i] < br[i]
|
||||
}
|
||||
if al || bl {
|
||||
if digits {
|
||||
return al
|
||||
} else {
|
||||
return bl
|
||||
}
|
||||
return bl
|
||||
}
|
||||
var ai, bi int
|
||||
var an, bn int64
|
||||
if ar[i] == '0' || br[i] == '0' {
|
||||
for j := i - 1; j >= 0 && unicode.IsDigit(ar[j]); j-- {
|
||||
for j := i-1; j >= 0 && unicode.IsDigit(ar[j]); j-- {
|
||||
if ar[j] != '0' {
|
||||
an = 1
|
||||
bn = 1
|
26
vendor/gopkg.in/yaml.v2/writerc.go
generated
vendored
Normal file
26
vendor/gopkg.in/yaml.v2/writerc.go
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
package yaml
|
||||
|
||||
// Set the writer error and return false.
|
||||
func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool {
|
||||
emitter.error = yaml_WRITER_ERROR
|
||||
emitter.problem = problem
|
||||
return false
|
||||
}
|
||||
|
||||
// Flush the output buffer.
|
||||
func yaml_emitter_flush(emitter *yaml_emitter_t) bool {
|
||||
if emitter.write_handler == nil {
|
||||
panic("write handler not set")
|
||||
}
|
||||
|
||||
// Check if the buffer is empty.
|
||||
if emitter.buffer_pos == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil {
|
||||
return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
|
||||
}
|
||||
emitter.buffer_pos = 0
|
||||
return true
|
||||
}
|
302
vendor/gopkg.in/yaml.v3/yaml.go → vendor/gopkg.in/yaml.v2/yaml.go
generated
vendored
302
vendor/gopkg.in/yaml.v3/yaml.go → vendor/gopkg.in/yaml.v2/yaml.go
generated
vendored
@ -1,18 +1,3 @@
|
||||
//
|
||||
// Copyright (c) 2011-2019 Canonical Ltd
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package yaml implements YAML support for the Go language.
|
||||
//
|
||||
// Source code and other details for the project are available at GitHub:
|
||||
@ -28,16 +13,23 @@ import (
|
||||
"reflect"
|
||||
"strings"
|
||||
"sync"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// The Unmarshaler interface may be implemented by types to customize their
|
||||
// behavior when being unmarshaled from a YAML document.
|
||||
type Unmarshaler interface {
|
||||
UnmarshalYAML(value *Node) error
|
||||
// MapSlice encodes and decodes as a YAML map.
|
||||
// The order of keys is preserved when encoding and decoding.
|
||||
type MapSlice []MapItem
|
||||
|
||||
// MapItem is an item in a MapSlice.
|
||||
type MapItem struct {
|
||||
Key, Value interface{}
|
||||
}
|
||||
|
||||
type obsoleteUnmarshaler interface {
|
||||
// The Unmarshaler interface may be implemented by types to customize their
|
||||
// behavior when being unmarshaled from a YAML document. The UnmarshalYAML
|
||||
// method receives a function that may be called to unmarshal the original
|
||||
// YAML value into a field or variable. It is safe to call the unmarshal
|
||||
// function parameter more than once if necessary.
|
||||
type Unmarshaler interface {
|
||||
UnmarshalYAML(unmarshal func(interface{}) error) error
|
||||
}
|
||||
|
||||
@ -89,10 +81,18 @@ func Unmarshal(in []byte, out interface{}) (err error) {
|
||||
return unmarshal(in, out, false)
|
||||
}
|
||||
|
||||
// UnmarshalStrict is like Unmarshal except that any fields that are found
|
||||
// in the data that do not have corresponding struct members, or mapping
|
||||
// keys that are duplicates, will result in
|
||||
// an error.
|
||||
func UnmarshalStrict(in []byte, out interface{}) (err error) {
|
||||
return unmarshal(in, out, true)
|
||||
}
|
||||
|
||||
// A Decorder reads and decodes YAML values from an input stream.
|
||||
type Decoder struct {
|
||||
parser *parser
|
||||
knownFields bool
|
||||
strict bool
|
||||
parser *parser
|
||||
}
|
||||
|
||||
// NewDecoder returns a new decoder that reads from r.
|
||||
@ -105,10 +105,10 @@ func NewDecoder(r io.Reader) *Decoder {
|
||||
}
|
||||
}
|
||||
|
||||
// KnownFields ensures that the keys in decoded mappings to
|
||||
// exist as fields in the struct being decoded into.
|
||||
func (dec *Decoder) KnownFields(enable bool) {
|
||||
dec.knownFields = enable
|
||||
// SetStrict sets whether strict decoding behaviour is enabled when
|
||||
// decoding items in the data (see UnmarshalStrict). By default, decoding is not strict.
|
||||
func (dec *Decoder) SetStrict(strict bool) {
|
||||
dec.strict = strict
|
||||
}
|
||||
|
||||
// Decode reads the next YAML-encoded value from its input
|
||||
@ -117,8 +117,7 @@ func (dec *Decoder) KnownFields(enable bool) {
|
||||
// See the documentation for Unmarshal for details about the
|
||||
// conversion of YAML into a Go value.
|
||||
func (dec *Decoder) Decode(v interface{}) (err error) {
|
||||
d := newDecoder()
|
||||
d.knownFields = dec.knownFields
|
||||
d := newDecoder(dec.strict)
|
||||
defer handleErr(&err)
|
||||
node := dec.parser.parse()
|
||||
if node == nil {
|
||||
@ -135,27 +134,9 @@ func (dec *Decoder) Decode(v interface{}) (err error) {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode decodes the node and stores its data into the value pointed to by v.
|
||||
//
|
||||
// See the documentation for Unmarshal for details about the
|
||||
// conversion of YAML into a Go value.
|
||||
func (n *Node) Decode(v interface{}) (err error) {
|
||||
d := newDecoder()
|
||||
defer handleErr(&err)
|
||||
out := reflect.ValueOf(v)
|
||||
if out.Kind() == reflect.Ptr && !out.IsNil() {
|
||||
out = out.Elem()
|
||||
}
|
||||
d.unmarshal(n, out)
|
||||
if len(d.terrors) > 0 {
|
||||
return &TypeError{d.terrors}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func unmarshal(in []byte, out interface{}, strict bool) (err error) {
|
||||
defer handleErr(&err)
|
||||
d := newDecoder()
|
||||
d := newDecoder(strict)
|
||||
p := newParser(in)
|
||||
defer p.destroy()
|
||||
node := p.parse()
|
||||
@ -252,14 +233,6 @@ func (e *Encoder) Encode(v interface{}) (err error) {
|
||||
return nil
|
||||
}
|
||||
|
||||
// SetIndent changes the used indentation used when encoding.
|
||||
func (e *Encoder) SetIndent(spaces int) {
|
||||
if spaces < 0 {
|
||||
panic("yaml: cannot indent to a negative number of spaces")
|
||||
}
|
||||
e.encoder.indent = spaces
|
||||
}
|
||||
|
||||
// Close closes the encoder by writing any remaining data.
|
||||
// It does not write a stream terminating string "...".
|
||||
func (e *Encoder) Close() (err error) {
|
||||
@ -302,150 +275,6 @@ func (e *TypeError) Error() string {
|
||||
return fmt.Sprintf("yaml: unmarshal errors:\n %s", strings.Join(e.Errors, "\n "))
|
||||
}
|
||||
|
||||
type Kind uint32
|
||||
|
||||
const (
|
||||
DocumentNode Kind = 1 << iota
|
||||
SequenceNode
|
||||
MappingNode
|
||||
ScalarNode
|
||||
AliasNode
|
||||
)
|
||||
|
||||
type Style uint32
|
||||
|
||||
const (
|
||||
TaggedStyle Style = 1 << iota
|
||||
DoubleQuotedStyle
|
||||
SingleQuotedStyle
|
||||
LiteralStyle
|
||||
FoldedStyle
|
||||
FlowStyle
|
||||
)
|
||||
|
||||
// Node represents an element in the YAML document hierarchy. While documents
|
||||
// are typically encoded and decoded into higher level types, such as structs
|
||||
// and maps, Node is an intermediate representation that allows detailed
|
||||
// control over the content being decoded or encoded.
|
||||
//
|
||||
// Values that make use of the Node type interact with the yaml package in the
|
||||
// same way any other type would do, by encoding and decoding yaml data
|
||||
// directly or indirectly into them.
|
||||
//
|
||||
// For example:
|
||||
//
|
||||
// var person struct {
|
||||
// Name string
|
||||
// Address yaml.Node
|
||||
// }
|
||||
// err := yaml.Unmarshal(data, &person)
|
||||
//
|
||||
// Or by itself:
|
||||
//
|
||||
// var person Node
|
||||
// err := yaml.Unmarshal(data, &person)
|
||||
//
|
||||
type Node struct {
|
||||
// Kind defines whether the node is a document, a mapping, a sequence,
|
||||
// a scalar value, or an alias to another node. The specific data type of
|
||||
// scalar nodes may be obtained via the ShortTag and LongTag methods.
|
||||
Kind Kind
|
||||
|
||||
// Style allows customizing the apperance of the node in the tree.
|
||||
Style Style
|
||||
|
||||
// Tag holds the YAML tag defining the data type for the value.
|
||||
// When decoding, this field will always be set to the resolved tag,
|
||||
// even when it wasn't explicitly provided in the YAML content.
|
||||
// When encoding, if this field is unset the value type will be
|
||||
// implied from the node properties, and if it is set, it will only
|
||||
// be serialized into the representation if TaggedStyle is used or
|
||||
// the implicit tag diverges from the provided one.
|
||||
Tag string
|
||||
|
||||
// Value holds the unescaped and unquoted represenation of the value.
|
||||
Value string
|
||||
|
||||
// Anchor holds the anchor name for this node, which allows aliases to point to it.
|
||||
Anchor string
|
||||
|
||||
// Alias holds the node that this alias points to. Only valid when Kind is AliasNode.
|
||||
Alias *Node
|
||||
|
||||
// Content holds contained nodes for documents, mappings, and sequences.
|
||||
Content []*Node
|
||||
|
||||
// HeadComment holds any comments in the lines preceding the node and
|
||||
// not separated by an empty line.
|
||||
HeadComment string
|
||||
|
||||
// LineComment holds any comments at the end of the line where the node is in.
|
||||
LineComment string
|
||||
|
||||
// FootComment holds any comments following the node and before empty lines.
|
||||
FootComment string
|
||||
|
||||
// Line and Column hold the node position in the decoded YAML text.
|
||||
// These fields are not respected when encoding the node.
|
||||
Line int
|
||||
Column int
|
||||
}
|
||||
|
||||
// LongTag returns the long form of the tag that indicates the data type for
|
||||
// the node. If the Tag field isn't explicitly defined, one will be computed
|
||||
// based on the node properties.
|
||||
func (n *Node) LongTag() string {
|
||||
return longTag(n.ShortTag())
|
||||
}
|
||||
|
||||
// ShortTag returns the short form of the YAML tag that indicates data type for
|
||||
// the node. If the Tag field isn't explicitly defined, one will be computed
|
||||
// based on the node properties.
|
||||
func (n *Node) ShortTag() string {
|
||||
if n.indicatedString() {
|
||||
return strTag
|
||||
}
|
||||
if n.Tag == "" || n.Tag == "!" {
|
||||
switch n.Kind {
|
||||
case MappingNode:
|
||||
return mapTag
|
||||
case SequenceNode:
|
||||
return seqTag
|
||||
case AliasNode:
|
||||
if n.Alias != nil {
|
||||
return n.Alias.ShortTag()
|
||||
}
|
||||
case ScalarNode:
|
||||
tag, _ := resolve("", n.Value)
|
||||
return tag
|
||||
}
|
||||
return ""
|
||||
}
|
||||
return shortTag(n.Tag)
|
||||
}
|
||||
|
||||
func (n *Node) indicatedString() bool {
|
||||
return n.Kind == ScalarNode &&
|
||||
(shortTag(n.Tag) == strTag ||
|
||||
(n.Tag == "" || n.Tag == "!") && n.Style&(SingleQuotedStyle|DoubleQuotedStyle|LiteralStyle|FoldedStyle) != 0)
|
||||
}
|
||||
|
||||
// SetString is a convenience function that sets the node to a string value
|
||||
// and defines its style in a pleasant way depending on its content.
|
||||
func (n *Node) SetString(s string) {
|
||||
n.Kind = ScalarNode
|
||||
if utf8.ValidString(s) {
|
||||
n.Value = s
|
||||
n.Tag = strTag
|
||||
} else {
|
||||
n.Value = encodeBase64(s)
|
||||
n.Tag = binaryTag
|
||||
}
|
||||
if strings.Contains(n.Value, "\n") {
|
||||
n.Style = LiteralStyle
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// Maintain a mapping of keys to structure field indexes
|
||||
|
||||
@ -460,10 +289,6 @@ type structInfo struct {
|
||||
// InlineMap is the number of the field in the struct that
|
||||
// contains an ,inline map, or -1 if there's none.
|
||||
InlineMap int
|
||||
|
||||
// InlineUnmarshalers holds indexes to inlined fields that
|
||||
// contain unmarshaler values.
|
||||
InlineUnmarshalers [][]int
|
||||
}
|
||||
|
||||
type fieldInfo struct {
|
||||
@ -481,12 +306,6 @@ type fieldInfo struct {
|
||||
|
||||
var structMap = make(map[reflect.Type]*structInfo)
|
||||
var fieldMapMutex sync.RWMutex
|
||||
var unmarshalerType reflect.Type
|
||||
|
||||
func init() {
|
||||
var v Unmarshaler
|
||||
unmarshalerType = reflect.ValueOf(&v).Elem().Type()
|
||||
}
|
||||
|
||||
func getStructInfo(st reflect.Type) (*structInfo, error) {
|
||||
fieldMapMutex.RLock()
|
||||
@ -500,7 +319,6 @@ func getStructInfo(st reflect.Type) (*structInfo, error) {
|
||||
fieldsMap := make(map[string]fieldInfo)
|
||||
fieldsList := make([]fieldInfo, 0, n)
|
||||
inlineMap := -1
|
||||
inlineUnmarshalers := [][]int(nil)
|
||||
for i := 0; i != n; i++ {
|
||||
field := st.Field(i)
|
||||
if field.PkgPath != "" && !field.Anonymous {
|
||||
@ -529,7 +347,7 @@ func getStructInfo(st reflect.Type) (*structInfo, error) {
|
||||
case "inline":
|
||||
inline = true
|
||||
default:
|
||||
return nil, errors.New(fmt.Sprintf("unsupported flag %q in tag %q of type %s", flag, tag, st))
|
||||
return nil, errors.New(fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st))
|
||||
}
|
||||
}
|
||||
tag = fields[0]
|
||||
@ -539,47 +357,34 @@ func getStructInfo(st reflect.Type) (*structInfo, error) {
|
||||
switch field.Type.Kind() {
|
||||
case reflect.Map:
|
||||
if inlineMap >= 0 {
|
||||
return nil, errors.New("multiple ,inline maps in struct " + st.String())
|
||||
return nil, errors.New("Multiple ,inline maps in struct " + st.String())
|
||||
}
|
||||
if field.Type.Key() != reflect.TypeOf("") {
|
||||
return nil, errors.New("option ,inline needs a map with string keys in struct " + st.String())
|
||||
return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String())
|
||||
}
|
||||
inlineMap = info.Num
|
||||
case reflect.Struct, reflect.Ptr:
|
||||
ftype := field.Type
|
||||
for ftype.Kind() == reflect.Ptr {
|
||||
ftype = ftype.Elem()
|
||||
case reflect.Struct:
|
||||
sinfo, err := getStructInfo(field.Type)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ftype.Kind() != reflect.Struct {
|
||||
return nil, errors.New("option ,inline may only be used on a struct or map field")
|
||||
}
|
||||
if reflect.PtrTo(ftype).Implements(unmarshalerType) {
|
||||
inlineUnmarshalers = append(inlineUnmarshalers, []int{i})
|
||||
} else {
|
||||
sinfo, err := getStructInfo(ftype)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
for _, finfo := range sinfo.FieldsList {
|
||||
if _, found := fieldsMap[finfo.Key]; found {
|
||||
msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String()
|
||||
return nil, errors.New(msg)
|
||||
}
|
||||
for _, index := range sinfo.InlineUnmarshalers {
|
||||
inlineUnmarshalers = append(inlineUnmarshalers, append([]int{i}, index...))
|
||||
}
|
||||
for _, finfo := range sinfo.FieldsList {
|
||||
if _, found := fieldsMap[finfo.Key]; found {
|
||||
msg := "duplicated key '" + finfo.Key + "' in struct " + st.String()
|
||||
return nil, errors.New(msg)
|
||||
}
|
||||
if finfo.Inline == nil {
|
||||
finfo.Inline = []int{i, finfo.Num}
|
||||
} else {
|
||||
finfo.Inline = append([]int{i}, finfo.Inline...)
|
||||
}
|
||||
finfo.Id = len(fieldsList)
|
||||
fieldsMap[finfo.Key] = finfo
|
||||
fieldsList = append(fieldsList, finfo)
|
||||
if finfo.Inline == nil {
|
||||
finfo.Inline = []int{i, finfo.Num}
|
||||
} else {
|
||||
finfo.Inline = append([]int{i}, finfo.Inline...)
|
||||
}
|
||||
finfo.Id = len(fieldsList)
|
||||
fieldsMap[finfo.Key] = finfo
|
||||
fieldsList = append(fieldsList, finfo)
|
||||
}
|
||||
default:
|
||||
return nil, errors.New("option ,inline may only be used on a struct or map field")
|
||||
//return nil, errors.New("Option ,inline needs a struct value or map field")
|
||||
return nil, errors.New("Option ,inline needs a struct value field")
|
||||
}
|
||||
continue
|
||||
}
|
||||
@ -591,7 +396,7 @@ func getStructInfo(st reflect.Type) (*structInfo, error) {
|
||||
}
|
||||
|
||||
if _, found = fieldsMap[info.Key]; found {
|
||||
msg := "duplicated key '" + info.Key + "' in struct " + st.String()
|
||||
msg := "Duplicated key '" + info.Key + "' in struct " + st.String()
|
||||
return nil, errors.New(msg)
|
||||
}
|
||||
|
||||
@ -601,10 +406,9 @@ func getStructInfo(st reflect.Type) (*structInfo, error) {
|
||||
}
|
||||
|
||||
sinfo = &structInfo{
|
||||
FieldsMap: fieldsMap,
|
||||
FieldsList: fieldsList,
|
||||
InlineMap: inlineMap,
|
||||
InlineUnmarshalers: inlineUnmarshalers,
|
||||
FieldsMap: fieldsMap,
|
||||
FieldsList: fieldsList,
|
||||
InlineMap: inlineMap,
|
||||
}
|
||||
|
||||
fieldMapMutex.Lock()
|
64
vendor/gopkg.in/yaml.v3/yamlh.go → vendor/gopkg.in/yaml.v2/yamlh.go
generated
vendored
64
vendor/gopkg.in/yaml.v3/yamlh.go → vendor/gopkg.in/yaml.v2/yamlh.go
generated
vendored
@ -1,25 +1,3 @@
|
||||
//
|
||||
// Copyright (c) 2011-2019 Canonical Ltd
|
||||
// Copyright (c) 2006-2010 Kirill Simonov
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
// this software and associated documentation files (the "Software"), to deal in
|
||||
// the Software without restriction, including without limitation the rights to
|
||||
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
// of the Software, and to permit persons to whom the Software is furnished to do
|
||||
// so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package yaml
|
||||
|
||||
import (
|
||||
@ -95,13 +73,13 @@ type yaml_scalar_style_t yaml_style_t
|
||||
// Scalar styles.
|
||||
const (
|
||||
// Let the emitter choose the style.
|
||||
yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = 0
|
||||
yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota
|
||||
|
||||
yaml_PLAIN_SCALAR_STYLE yaml_scalar_style_t = 1 << iota // The plain scalar style.
|
||||
yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style.
|
||||
yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style.
|
||||
yaml_LITERAL_SCALAR_STYLE // The literal scalar style.
|
||||
yaml_FOLDED_SCALAR_STYLE // The folded scalar style.
|
||||
yaml_PLAIN_SCALAR_STYLE // The plain scalar style.
|
||||
yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style.
|
||||
yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style.
|
||||
yaml_LITERAL_SCALAR_STYLE // The literal scalar style.
|
||||
yaml_FOLDED_SCALAR_STYLE // The folded scalar style.
|
||||
)
|
||||
|
||||
type yaml_sequence_style_t yaml_style_t
|
||||
@ -301,11 +279,6 @@ type yaml_event_t struct {
|
||||
// The list of tag directives (for yaml_DOCUMENT_START_EVENT).
|
||||
tag_directives []yaml_tag_directive_t
|
||||
|
||||
// The comments
|
||||
head_comment []byte
|
||||
line_comment []byte
|
||||
foot_comment []byte
|
||||
|
||||
// The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT).
|
||||
anchor []byte
|
||||
|
||||
@ -589,15 +562,6 @@ type yaml_parser_t struct {
|
||||
offset int // The offset of the current position (in bytes).
|
||||
mark yaml_mark_t // The mark of the current position.
|
||||
|
||||
// Comments
|
||||
|
||||
head_comment []byte // The current head comments
|
||||
line_comment []byte // The current line comments
|
||||
foot_comment []byte // The current foot comments
|
||||
|
||||
comments []yaml_comment_t // The folded comments for all parsed tokens
|
||||
comments_head int
|
||||
|
||||
// Scanner stuff
|
||||
|
||||
stream_start_produced bool // Have we started to scan the input stream?
|
||||
@ -630,13 +594,6 @@ type yaml_parser_t struct {
|
||||
document *yaml_document_t // The currently parsed document.
|
||||
}
|
||||
|
||||
type yaml_comment_t struct {
|
||||
after yaml_mark_t
|
||||
head []byte
|
||||
line []byte
|
||||
foot []byte
|
||||
}
|
||||
|
||||
// Emitter Definitions
|
||||
|
||||
// The prototype of a write handler.
|
||||
@ -667,10 +624,8 @@ const (
|
||||
yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document.
|
||||
yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END.
|
||||
yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence.
|
||||
yaml_EMIT_FLOW_SEQUENCE_TRAIL_ITEM_STATE // Expect the next item of a flow sequence, with the comma already written out
|
||||
yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence.
|
||||
yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping.
|
||||
yaml_EMIT_FLOW_MAPPING_TRAIL_KEY_STATE // Expect the next key of a flow mapping, with the comma already written out
|
||||
yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping.
|
||||
yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping.
|
||||
yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping.
|
||||
@ -742,8 +697,6 @@ type yaml_emitter_t struct {
|
||||
indention bool // If the last character was an indentation character (' ', '-', '?', ':')?
|
||||
open_ended bool // If an explicit document end is required?
|
||||
|
||||
space_above bool // If there's an empty line right above?
|
||||
|
||||
// Anchor analysis.
|
||||
anchor_data struct {
|
||||
anchor []byte // The anchor value.
|
||||
@ -767,11 +720,6 @@ type yaml_emitter_t struct {
|
||||
style yaml_scalar_style_t // The output style.
|
||||
}
|
||||
|
||||
// Comments
|
||||
head_comment []byte
|
||||
line_comment []byte
|
||||
foot_comment []byte
|
||||
|
||||
// Dumper stuff
|
||||
|
||||
opened bool // If the stream was already opened?
|
22
vendor/gopkg.in/yaml.v3/yamlprivateh.go → vendor/gopkg.in/yaml.v2/yamlprivateh.go
generated
vendored
22
vendor/gopkg.in/yaml.v3/yamlprivateh.go → vendor/gopkg.in/yaml.v2/yamlprivateh.go
generated
vendored
@ -1,25 +1,3 @@
|
||||
//
|
||||
// Copyright (c) 2011-2019 Canonical Ltd
|
||||
// Copyright (c) 2006-2010 Kirill Simonov
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
// this software and associated documentation files (the "Software"), to deal in
|
||||
// the Software without restriction, including without limitation the rights to
|
||||
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
// of the Software, and to permit persons to whom the Software is furnished to do
|
||||
// so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package yaml
|
||||
|
||||
const (
|
15
vendor/gopkg.in/yaml.v3/.travis.yml
generated
vendored
15
vendor/gopkg.in/yaml.v3/.travis.yml
generated
vendored
@ -1,15 +0,0 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- "1.4"
|
||||
- "1.5"
|
||||
- "1.6"
|
||||
- "1.7"
|
||||
- "1.8"
|
||||
- "1.9"
|
||||
- "1.10"
|
||||
- "1.11"
|
||||
- "1.12"
|
||||
- tip
|
||||
|
||||
go_import_path: gopkg.in/yaml.v2
|
48
vendor/gopkg.in/yaml.v3/writerc.go
generated
vendored
48
vendor/gopkg.in/yaml.v3/writerc.go
generated
vendored
@ -1,48 +0,0 @@
|
||||
//
|
||||
// Copyright (c) 2011-2019 Canonical Ltd
|
||||
// Copyright (c) 2006-2010 Kirill Simonov
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
// this software and associated documentation files (the "Software"), to deal in
|
||||
// the Software without restriction, including without limitation the rights to
|
||||
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
// of the Software, and to permit persons to whom the Software is furnished to do
|
||||
// so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
package yaml
|
||||
|
||||
// Set the writer error and return false.
|
||||
func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool {
|
||||
emitter.error = yaml_WRITER_ERROR
|
||||
emitter.problem = problem
|
||||
return false
|
||||
}
|
||||
|
||||
// Flush the output buffer.
|
||||
func yaml_emitter_flush(emitter *yaml_emitter_t) bool {
|
||||
if emitter.write_handler == nil {
|
||||
panic("write handler not set")
|
||||
}
|
||||
|
||||
// Check if the buffer is empty.
|
||||
if emitter.buffer_pos == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil {
|
||||
return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
|
||||
}
|
||||
emitter.buffer_pos = 0
|
||||
return true
|
||||
}
|
4
vendor/modules.txt
vendored
4
vendor/modules.txt
vendored
@ -36,8 +36,8 @@ golang.org/x/sync/errgroup
|
||||
# golang.org/x/sys v0.0.0-20180831094639-fa5fdf94c789
|
||||
golang.org/x/sys/unix
|
||||
golang.org/x/sys/windows
|
||||
# gopkg.in/yaml.v3 v3.0.0-20190409140830-cdc409dda467
|
||||
gopkg.in/yaml.v3
|
||||
# gopkg.in/yaml.v2 v2.2.1
|
||||
gopkg.in/yaml.v2
|
||||
# mvdan.cc/sh v2.6.4+incompatible
|
||||
mvdan.cc/sh/expand
|
||||
mvdan.cc/sh/interp
|
||||
|
Loading…
Reference in New Issue
Block a user