summaryrefslogtreecommitdiffstats
path: root/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser
diff options
context:
space:
mode:
Diffstat (limited to 'dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser')
-rw-r--r--dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/context.go199
-rw-r--r--dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/parser.go671
-rw-r--r--dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/parser_test.go868
-rw-r--r--dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/testdata/cr.yml1
-rw-r--r--dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/testdata/crlf.yml3
-rw-r--r--dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/testdata/lf.yml3
6 files changed, 1745 insertions, 0 deletions
diff --git a/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/context.go b/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/context.go
new file mode 100644
index 0000000..99f18b1
--- /dev/null
+++ b/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/context.go
@@ -0,0 +1,199 @@
+package parser
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/goccy/go-yaml/token"
+)
+
+// context context at parsing
+type context struct {
+ parent *context
+ idx int
+ size int
+ tokens token.Tokens
+ mode Mode
+ path string
+}
+
+var pathSpecialChars = []string{
+ "$", "*", ".", "[", "]",
+}
+
+func containsPathSpecialChar(path string) bool {
+ for _, char := range pathSpecialChars {
+ if strings.Contains(path, char) {
+ return true
+ }
+ }
+ return false
+}
+
+func normalizePath(path string) string {
+ if containsPathSpecialChar(path) {
+ return fmt.Sprintf("'%s'", path)
+ }
+ return path
+}
+
+func (c *context) withChild(path string) *context {
+ ctx := c.copy()
+ path = normalizePath(path)
+ ctx.path += fmt.Sprintf(".%s", path)
+ return ctx
+}
+
+func (c *context) withIndex(idx uint) *context {
+ ctx := c.copy()
+ ctx.path += fmt.Sprintf("[%d]", idx)
+ return ctx
+}
+
+func (c *context) copy() *context {
+ return &context{
+ parent: c,
+ idx: c.idx,
+ size: c.size,
+ tokens: append(token.Tokens{}, c.tokens...),
+ mode: c.mode,
+ path: c.path,
+ }
+}
+
+func (c *context) next() bool {
+ return c.idx < c.size
+}
+
+func (c *context) previousToken() *token.Token {
+ if c.idx > 0 {
+ return c.tokens[c.idx-1]
+ }
+ return nil
+}
+
+func (c *context) insertToken(idx int, tk *token.Token) {
+ if c.parent != nil {
+ c.parent.insertToken(idx, tk)
+ }
+ if c.size < idx {
+ return
+ }
+ if c.size == idx {
+ curToken := c.tokens[c.size-1]
+ tk.Next = curToken
+ curToken.Prev = tk
+
+ c.tokens = append(c.tokens, tk)
+ c.size = len(c.tokens)
+ return
+ }
+
+ curToken := c.tokens[idx]
+ tk.Next = curToken
+ curToken.Prev = tk
+
+ c.tokens = append(c.tokens[:idx+1], c.tokens[idx:]...)
+ c.tokens[idx] = tk
+ c.size = len(c.tokens)
+}
+
+func (c *context) currentToken() *token.Token {
+ if c.idx >= c.size {
+ return nil
+ }
+ return c.tokens[c.idx]
+}
+
+func (c *context) nextToken() *token.Token {
+ if c.idx+1 >= c.size {
+ return nil
+ }
+ return c.tokens[c.idx+1]
+}
+
+func (c *context) afterNextToken() *token.Token {
+ if c.idx+2 >= c.size {
+ return nil
+ }
+ return c.tokens[c.idx+2]
+}
+
+func (c *context) nextNotCommentToken() *token.Token {
+ for i := c.idx + 1; i < c.size; i++ {
+ tk := c.tokens[i]
+ if tk.Type == token.CommentType {
+ continue
+ }
+ return tk
+ }
+ return nil
+}
+
+func (c *context) afterNextNotCommentToken() *token.Token {
+ notCommentTokenCount := 0
+ for i := c.idx + 1; i < c.size; i++ {
+ tk := c.tokens[i]
+ if tk.Type == token.CommentType {
+ continue
+ }
+ notCommentTokenCount++
+ if notCommentTokenCount == 2 {
+ return tk
+ }
+ }
+ return nil
+}
+
+func (c *context) enabledComment() bool {
+ return c.mode&ParseComments != 0
+}
+
+func (c *context) isCurrentCommentToken() bool {
+ tk := c.currentToken()
+ if tk == nil {
+ return false
+ }
+ return tk.Type == token.CommentType
+}
+
+func (c *context) progressIgnoreComment(num int) {
+ if c.parent != nil {
+ c.parent.progressIgnoreComment(num)
+ }
+ if c.size <= c.idx+num {
+ c.idx = c.size
+ } else {
+ c.idx += num
+ }
+}
+
+func (c *context) progress(num int) {
+ if c.isCurrentCommentToken() {
+ return
+ }
+ c.progressIgnoreComment(num)
+}
+
+func newContext(tokens token.Tokens, mode Mode) *context {
+ filteredTokens := []*token.Token{}
+ if mode&ParseComments != 0 {
+ filteredTokens = tokens
+ } else {
+ for _, tk := range tokens {
+ if tk.Type == token.CommentType {
+ continue
+ }
+ // keep prev/next reference between tokens containing comments
+ // https://github.com/goccy/go-yaml/issues/254
+ filteredTokens = append(filteredTokens, tk)
+ }
+ }
+ return &context{
+ idx: 0,
+ size: len(filteredTokens),
+ tokens: token.Tokens(filteredTokens),
+ mode: mode,
+ path: "$",
+ }
+}
diff --git a/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/parser.go b/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/parser.go
new file mode 100644
index 0000000..70937c9
--- /dev/null
+++ b/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/parser.go
@@ -0,0 +1,671 @@
+package parser
+
+import (
+ "fmt"
+ "io/ioutil"
+ "strings"
+
+ "github.com/goccy/go-yaml/ast"
+ "github.com/goccy/go-yaml/internal/errors"
+ "github.com/goccy/go-yaml/lexer"
+ "github.com/goccy/go-yaml/token"
+ "golang.org/x/xerrors"
+)
+
+type parser struct{}
+
+func (p *parser) parseMapping(ctx *context) (*ast.MappingNode, error) {
+ mapTk := ctx.currentToken()
+ node := ast.Mapping(mapTk, true)
+ node.SetPath(ctx.path)
+ ctx.progress(1) // skip MappingStart token
+ for ctx.next() {
+ tk := ctx.currentToken()
+ if tk.Type == token.MappingEndType {
+ node.End = tk
+ return node, nil
+ } else if tk.Type == token.CollectEntryType {
+ ctx.progress(1)
+ continue
+ }
+
+ value, err := p.parseMappingValue(ctx)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse mapping value in mapping node")
+ }
+ mvnode, ok := value.(*ast.MappingValueNode)
+ if !ok {
+ return nil, errors.ErrSyntax("failed to parse flow mapping node", value.GetToken())
+ }
+ node.Values = append(node.Values, mvnode)
+ ctx.progress(1)
+ }
+ return nil, errors.ErrSyntax("unterminated flow mapping", node.GetToken())
+}
+
+func (p *parser) parseSequence(ctx *context) (*ast.SequenceNode, error) {
+ node := ast.Sequence(ctx.currentToken(), true)
+ node.SetPath(ctx.path)
+ ctx.progress(1) // skip SequenceStart token
+ for ctx.next() {
+ tk := ctx.currentToken()
+ if tk.Type == token.SequenceEndType {
+ node.End = tk
+ break
+ } else if tk.Type == token.CollectEntryType {
+ ctx.progress(1)
+ continue
+ }
+
+ value, err := p.parseToken(ctx.withIndex(uint(len(node.Values))), tk)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse sequence value in flow sequence node")
+ }
+ node.Values = append(node.Values, value)
+ ctx.progress(1)
+ }
+ return node, nil
+}
+
+func (p *parser) parseTag(ctx *context) (*ast.TagNode, error) {
+ tagToken := ctx.currentToken()
+ node := ast.Tag(tagToken)
+ node.SetPath(ctx.path)
+ ctx.progress(1) // skip tag token
+ var (
+ value ast.Node
+ err error
+ )
+ switch token.ReservedTagKeyword(tagToken.Value) {
+ case token.MappingTag,
+ token.OrderedMapTag:
+ value, err = p.parseMapping(ctx)
+ case token.IntegerTag,
+ token.FloatTag,
+ token.StringTag,
+ token.BinaryTag,
+ token.TimestampTag,
+ token.NullTag:
+ typ := ctx.currentToken().Type
+ if typ == token.LiteralType || typ == token.FoldedType {
+ value, err = p.parseLiteral(ctx)
+ } else {
+ value = p.parseScalarValue(ctx.currentToken())
+ }
+ case token.SequenceTag,
+ token.SetTag:
+ err = errors.ErrSyntax(fmt.Sprintf("sorry, currently not supported %s tag", tagToken.Value), tagToken)
+ default:
+ // custom tag
+ value, err = p.parseToken(ctx, ctx.currentToken())
+ }
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse tag value")
+ }
+ node.Value = value
+ return node, nil
+}
+
+func (p *parser) removeLeftSideNewLineCharacter(src string) string {
+ // CR or LF or CRLF
+ return strings.TrimLeft(strings.TrimLeft(strings.TrimLeft(src, "\r"), "\n"), "\r\n")
+}
+
+func (p *parser) existsNewLineCharacter(src string) bool {
+ if strings.Index(src, "\n") > 0 {
+ return true
+ }
+ if strings.Index(src, "\r") > 0 {
+ return true
+ }
+ return false
+}
+
+func (p *parser) validateMapKey(tk *token.Token) error {
+ if tk.Type != token.StringType {
+ return nil
+ }
+ origin := p.removeLeftSideNewLineCharacter(tk.Origin)
+ if p.existsNewLineCharacter(origin) {
+ return errors.ErrSyntax("unexpected key name", tk)
+ }
+ return nil
+}
+
+func (p *parser) createNullToken(base *token.Token) *token.Token {
+ pos := *(base.Position)
+ pos.Column++
+ return token.New("null", "null", &pos)
+}
+
+func (p *parser) parseMapValue(ctx *context, key ast.MapKeyNode, colonToken *token.Token) (ast.Node, error) {
+ node, err := p.createMapValueNode(ctx, key, colonToken)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to create map value node")
+ }
+ if node != nil && node.GetPath() == "" {
+ node.SetPath(ctx.path)
+ }
+ return node, nil
+}
+
+func (p *parser) createMapValueNode(ctx *context, key ast.MapKeyNode, colonToken *token.Token) (ast.Node, error) {
+ tk := ctx.currentToken()
+ if tk == nil {
+ nullToken := p.createNullToken(colonToken)
+ ctx.insertToken(ctx.idx, nullToken)
+ return ast.Null(nullToken), nil
+ }
+
+ if tk.Position.Column == key.GetToken().Position.Column && tk.Type == token.StringType {
+ // in this case,
+ // ----
+ // key: <value does not defined>
+ // next
+ nullToken := p.createNullToken(colonToken)
+ ctx.insertToken(ctx.idx, nullToken)
+ return ast.Null(nullToken), nil
+ }
+
+ if tk.Position.Column < key.GetToken().Position.Column {
+ // in this case,
+ // ----
+ // key: <value does not defined>
+ // next
+ nullToken := p.createNullToken(colonToken)
+ ctx.insertToken(ctx.idx, nullToken)
+ return ast.Null(nullToken), nil
+ }
+
+ value, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse mapping 'value' node")
+ }
+ return value, nil
+}
+
+func (p *parser) validateMapValue(ctx *context, key, value ast.Node) error {
+ keyColumn := key.GetToken().Position.Column
+ valueColumn := value.GetToken().Position.Column
+ if keyColumn != valueColumn {
+ return nil
+ }
+ if value.Type() != ast.StringType {
+ return nil
+ }
+ ntk := ctx.nextToken()
+ if ntk == nil || (ntk.Type != token.MappingValueType && ntk.Type != token.SequenceEntryType) {
+ return errors.ErrSyntax("could not found expected ':' token", value.GetToken())
+ }
+ return nil
+}
+
+func (p *parser) parseMappingValue(ctx *context) (ast.Node, error) {
+ key, err := p.parseMapKey(ctx)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse map key")
+ }
+ keyText := key.GetToken().Value
+ key.SetPath(ctx.withChild(keyText).path)
+ if err := p.validateMapKey(key.GetToken()); err != nil {
+ return nil, errors.Wrapf(err, "validate mapping key error")
+ }
+ ctx.progress(1) // progress to mapping value token
+ tk := ctx.currentToken() // get mapping value token
+ if tk == nil {
+ return nil, errors.ErrSyntax("unexpected map", key.GetToken())
+ }
+ ctx.progress(1) // progress to value token
+ if err := p.setSameLineCommentIfExists(ctx.withChild(keyText), key); err != nil {
+ return nil, errors.Wrapf(err, "failed to set same line comment to node")
+ }
+ if key.GetComment() != nil {
+ // if current token is comment, GetComment() is not nil.
+ // then progress to value token
+ ctx.progressIgnoreComment(1)
+ }
+
+ value, err := p.parseMapValue(ctx.withChild(keyText), key, tk)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse map value")
+ }
+ if err := p.validateMapValue(ctx, key, value); err != nil {
+ return nil, errors.Wrapf(err, "failed to validate map value")
+ }
+
+ mvnode := ast.MappingValue(tk, key, value)
+ mvnode.SetPath(ctx.withChild(keyText).path)
+ node := ast.Mapping(tk, false, mvnode)
+ node.SetPath(ctx.withChild(keyText).path)
+
+ ntk := ctx.nextNotCommentToken()
+ antk := ctx.afterNextNotCommentToken()
+ for antk != nil && antk.Type == token.MappingValueType &&
+ ntk.Position.Column == key.GetToken().Position.Column {
+ ctx.progressIgnoreComment(1)
+ value, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse mapping node")
+ }
+ switch value.Type() {
+ case ast.MappingType:
+ c := value.(*ast.MappingNode)
+ comment := c.GetComment()
+ for idx, v := range c.Values {
+ if idx == 0 && comment != nil {
+ if err := v.SetComment(comment); err != nil {
+ return nil, errors.Wrapf(err, "failed to set comment token to node")
+ }
+ }
+ node.Values = append(node.Values, v)
+ }
+ case ast.MappingValueType:
+ node.Values = append(node.Values, value.(*ast.MappingValueNode))
+ default:
+ return nil, xerrors.Errorf("failed to parse mapping value node node is %s", value.Type())
+ }
+ ntk = ctx.nextNotCommentToken()
+ antk = ctx.afterNextNotCommentToken()
+ }
+ if len(node.Values) == 1 {
+ return mvnode, nil
+ }
+ return node, nil
+}
+
+func (p *parser) parseSequenceEntry(ctx *context) (*ast.SequenceNode, error) {
+ tk := ctx.currentToken()
+ sequenceNode := ast.Sequence(tk, false)
+ sequenceNode.SetPath(ctx.path)
+ curColumn := tk.Position.Column
+ for tk.Type == token.SequenceEntryType {
+ ctx.progress(1) // skip sequence token
+ tk = ctx.currentToken()
+ if tk == nil {
+ return nil, errors.ErrSyntax("empty sequence entry", ctx.previousToken())
+ }
+ var comment *ast.CommentGroupNode
+ if tk.Type == token.CommentType {
+ comment = p.parseCommentOnly(ctx)
+ tk = ctx.currentToken()
+ if tk.Type != token.SequenceEntryType {
+ break
+ }
+ ctx.progress(1) // skip sequence token
+ }
+ value, err := p.parseToken(ctx.withIndex(uint(len(sequenceNode.Values))), ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse sequence")
+ }
+ if comment != nil {
+ comment.SetPath(ctx.withIndex(uint(len(sequenceNode.Values))).path)
+ sequenceNode.ValueComments = append(sequenceNode.ValueComments, comment)
+ } else {
+ sequenceNode.ValueComments = append(sequenceNode.ValueComments, nil)
+ }
+ sequenceNode.Values = append(sequenceNode.Values, value)
+ tk = ctx.nextNotCommentToken()
+ if tk == nil {
+ break
+ }
+ if tk.Type != token.SequenceEntryType {
+ break
+ }
+ if tk.Position.Column != curColumn {
+ break
+ }
+ ctx.progressIgnoreComment(1)
+ }
+ return sequenceNode, nil
+}
+
+func (p *parser) parseAnchor(ctx *context) (*ast.AnchorNode, error) {
+ tk := ctx.currentToken()
+ anchor := ast.Anchor(tk)
+ anchor.SetPath(ctx.path)
+ ntk := ctx.nextToken()
+ if ntk == nil {
+ return nil, errors.ErrSyntax("unexpected anchor. anchor name is undefined", tk)
+ }
+ ctx.progress(1) // skip anchor token
+ name, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parser anchor name node")
+ }
+ anchor.Name = name
+ ntk = ctx.nextToken()
+ if ntk == nil {
+ return nil, errors.ErrSyntax("unexpected anchor. anchor value is undefined", ctx.currentToken())
+ }
+ ctx.progress(1)
+ value, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parser anchor name node")
+ }
+ anchor.Value = value
+ return anchor, nil
+}
+
+func (p *parser) parseAlias(ctx *context) (*ast.AliasNode, error) {
+ tk := ctx.currentToken()
+ alias := ast.Alias(tk)
+ alias.SetPath(ctx.path)
+ ntk := ctx.nextToken()
+ if ntk == nil {
+ return nil, errors.ErrSyntax("unexpected alias. alias name is undefined", tk)
+ }
+ ctx.progress(1) // skip alias token
+ name, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parser alias name node")
+ }
+ alias.Value = name
+ return alias, nil
+}
+
+func (p *parser) parseMapKey(ctx *context) (ast.MapKeyNode, error) {
+ tk := ctx.currentToken()
+ if value := p.parseScalarValue(tk); value != nil {
+ return value, nil
+ }
+ switch tk.Type {
+ case token.MergeKeyType:
+ return ast.MergeKey(tk), nil
+ case token.MappingKeyType:
+ return p.parseMappingKey(ctx)
+ }
+ return nil, errors.ErrSyntax("unexpected mapping key", tk)
+}
+
+func (p *parser) parseStringValue(tk *token.Token) *ast.StringNode {
+ switch tk.Type {
+ case token.StringType,
+ token.SingleQuoteType,
+ token.DoubleQuoteType:
+ return ast.String(tk)
+ }
+ return nil
+}
+
+func (p *parser) parseScalarValueWithComment(ctx *context, tk *token.Token) (ast.ScalarNode, error) {
+ node := p.parseScalarValue(tk)
+ if node == nil {
+ return nil, nil
+ }
+ node.SetPath(ctx.path)
+ if p.isSameLineComment(ctx.nextToken(), node) {
+ ctx.progress(1)
+ if err := p.setSameLineCommentIfExists(ctx, node); err != nil {
+ return nil, errors.Wrapf(err, "failed to set same line comment to node")
+ }
+ }
+ return node, nil
+}
+
+func (p *parser) parseScalarValue(tk *token.Token) ast.ScalarNode {
+ if node := p.parseStringValue(tk); node != nil {
+ return node
+ }
+ switch tk.Type {
+ case token.NullType:
+ return ast.Null(tk)
+ case token.BoolType:
+ return ast.Bool(tk)
+ case token.IntegerType,
+ token.BinaryIntegerType,
+ token.OctetIntegerType,
+ token.HexIntegerType:
+ return ast.Integer(tk)
+ case token.FloatType:
+ return ast.Float(tk)
+ case token.InfinityType:
+ return ast.Infinity(tk)
+ case token.NanType:
+ return ast.Nan(tk)
+ }
+ return nil
+}
+
+func (p *parser) parseDirective(ctx *context) (*ast.DirectiveNode, error) {
+ node := ast.Directive(ctx.currentToken())
+ ctx.progress(1) // skip directive token
+ value, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse directive value")
+ }
+ node.Value = value
+ ctx.progress(1)
+ tk := ctx.currentToken()
+ if tk == nil {
+ // Since current token is nil, use the previous token to specify
+ // the syntax error location.
+ return nil, errors.ErrSyntax("unexpected directive value. document not started", ctx.previousToken())
+ }
+ if tk.Type != token.DocumentHeaderType {
+ return nil, errors.ErrSyntax("unexpected directive value. document not started", ctx.currentToken())
+ }
+ return node, nil
+}
+
+func (p *parser) parseLiteral(ctx *context) (*ast.LiteralNode, error) {
+ node := ast.Literal(ctx.currentToken())
+ ctx.progress(1) // skip literal/folded token
+
+ tk := ctx.currentToken()
+ var comment *ast.CommentGroupNode
+ if tk.Type == token.CommentType {
+ comment = p.parseCommentOnly(ctx)
+ comment.SetPath(ctx.path)
+ if err := node.SetComment(comment); err != nil {
+ return nil, errors.Wrapf(err, "failed to set comment to literal")
+ }
+ tk = ctx.currentToken()
+ }
+ value, err := p.parseToken(ctx, tk)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse literal/folded value")
+ }
+ snode, ok := value.(*ast.StringNode)
+ if !ok {
+ return nil, errors.ErrSyntax("unexpected token. required string token", value.GetToken())
+ }
+ node.Value = snode
+ return node, nil
+}
+
+func (p *parser) isSameLineComment(tk *token.Token, node ast.Node) bool {
+ if tk == nil {
+ return false
+ }
+ if tk.Type != token.CommentType {
+ return false
+ }
+ return tk.Position.Line == node.GetToken().Position.Line
+}
+
+func (p *parser) setSameLineCommentIfExists(ctx *context, node ast.Node) error {
+ tk := ctx.currentToken()
+ if !p.isSameLineComment(tk, node) {
+ return nil
+ }
+ comment := ast.CommentGroup([]*token.Token{tk})
+ comment.SetPath(ctx.path)
+ if err := node.SetComment(comment); err != nil {
+ return errors.Wrapf(err, "failed to set comment token to ast.Node")
+ }
+ return nil
+}
+
+func (p *parser) parseDocument(ctx *context) (*ast.DocumentNode, error) {
+ startTk := ctx.currentToken()
+ ctx.progress(1) // skip document header token
+ body, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse document body")
+ }
+ node := ast.Document(startTk, body)
+ if ntk := ctx.nextToken(); ntk != nil && ntk.Type == token.DocumentEndType {
+ node.End = ntk
+ ctx.progress(1)
+ }
+ return node, nil
+}
+
+func (p *parser) parseCommentOnly(ctx *context) *ast.CommentGroupNode {
+ commentTokens := []*token.Token{}
+ for {
+ tk := ctx.currentToken()
+ if tk == nil {
+ break
+ }
+ if tk.Type != token.CommentType {
+ break
+ }
+ commentTokens = append(commentTokens, tk)
+ ctx.progressIgnoreComment(1) // skip comment token
+ }
+ return ast.CommentGroup(commentTokens)
+}
+
+func (p *parser) parseComment(ctx *context) (ast.Node, error) {
+ group := p.parseCommentOnly(ctx)
+ node, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse node after comment")
+ }
+ if node == nil {
+ return group, nil
+ }
+ group.SetPath(node.GetPath())
+ if err := node.SetComment(group); err != nil {
+ return nil, errors.Wrapf(err, "failed to set comment token to node")
+ }
+ return node, nil
+}
+
+func (p *parser) parseMappingKey(ctx *context) (*ast.MappingKeyNode, error) {
+ keyTk := ctx.currentToken()
+ node := ast.MappingKey(keyTk)
+ node.SetPath(ctx.path)
+ ctx.progress(1) // skip mapping key token
+ value, err := p.parseToken(ctx.withChild(keyTk.Value), ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse map key")
+ }
+ node.Value = value
+ return node, nil
+}
+
+func (p *parser) parseToken(ctx *context, tk *token.Token) (ast.Node, error) {
+ node, err := p.createNodeFromToken(ctx, tk)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to create node from token")
+ }
+ if node != nil && node.GetPath() == "" {
+ node.SetPath(ctx.path)
+ }
+ return node, nil
+}
+
+func (p *parser) createNodeFromToken(ctx *context, tk *token.Token) (ast.Node, error) {
+ if tk == nil {
+ return nil, nil
+ }
+ if tk.NextType() == token.MappingValueType {
+ node, err := p.parseMappingValue(ctx)
+ return node, err
+ }
+ node, err := p.parseScalarValueWithComment(ctx, tk)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse scalar value")
+ }
+ if node != nil {
+ return node, nil
+ }
+ switch tk.Type {
+ case token.CommentType:
+ return p.parseComment(ctx)
+ case token.MappingKeyType:
+ return p.parseMappingKey(ctx)
+ case token.DocumentHeaderType:
+ return p.parseDocument(ctx)
+ case token.MappingStartType:
+ return p.parseMapping(ctx)
+ case token.SequenceStartType:
+ return p.parseSequence(ctx)
+ case token.SequenceEntryType:
+ return p.parseSequenceEntry(ctx)
+ case token.AnchorType:
+ return p.parseAnchor(ctx)
+ case token.AliasType:
+ return p.parseAlias(ctx)
+ case token.DirectiveType:
+ return p.parseDirective(ctx)
+ case token.TagType:
+ return p.parseTag(ctx)
+ case token.LiteralType, token.FoldedType:
+ return p.parseLiteral(ctx)
+ }
+ return nil, nil
+}
+
+func (p *parser) parse(tokens token.Tokens, mode Mode) (*ast.File, error) {
+ ctx := newContext(tokens, mode)
+ file := &ast.File{Docs: []*ast.DocumentNode{}}
+ for ctx.next() {
+ node, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse")
+ }
+ ctx.progressIgnoreComment(1)
+ if node == nil {
+ continue
+ }
+ if doc, ok := node.(*ast.DocumentNode); ok {
+ file.Docs = append(file.Docs, doc)
+ } else {
+ file.Docs = append(file.Docs, ast.Document(nil, node))
+ }
+ }
+ return file, nil
+}
+
+type Mode uint
+
+const (
+ ParseComments Mode = 1 << iota // parse comments and add them to AST
+)
+
+// ParseBytes parse from byte slice, and returns ast.File
+func ParseBytes(bytes []byte, mode Mode) (*ast.File, error) {
+ tokens := lexer.Tokenize(string(bytes))
+ f, err := Parse(tokens, mode)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse")
+ }
+ return f, nil
+}
+
+// Parse parse from token instances, and returns ast.File
+func Parse(tokens token.Tokens, mode Mode) (*ast.File, error) {
+ var p parser
+ f, err := p.parse(tokens, mode)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse")
+ }
+ return f, nil
+}
+
+// Parse parse from filename, and returns ast.File
+func ParseFile(filename string, mode Mode) (*ast.File, error) {
+ file, err := ioutil.ReadFile(filename)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to read file: %s", filename)
+ }
+ f, err := ParseBytes(file, mode)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse")
+ }
+ f.Name = filename
+ return f, nil
+}
diff --git a/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/parser_test.go b/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/parser_test.go
new file mode 100644
index 0000000..595e0a3
--- /dev/null
+++ b/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/parser_test.go
@@ -0,0 +1,868 @@
+package parser_test
+
+import (
+ "fmt"
+ "path/filepath"
+ "reflect"
+ "strings"
+ "testing"
+
+ "github.com/goccy/go-yaml/ast"
+ "github.com/goccy/go-yaml/lexer"
+ "github.com/goccy/go-yaml/parser"
+ "github.com/goccy/go-yaml/token"
+)
+
+func TestParser(t *testing.T) {
+ sources := []string{
+ "null\n",
+ "{}\n",
+ "v: hi\n",
+ "v: \"true\"\n",
+ "v: \"false\"\n",
+ "v: true\n",
+ "v: false\n",
+ "v: 10\n",
+ "v: -10\n",
+ "v: 42\n",
+ "v: 4294967296\n",
+ "v: \"10\"\n",
+ "v: 0.1\n",
+ "v: 0.99\n",
+ "v: -0.1\n",
+ "v: .inf\n",
+ "v: -.inf\n",
+ "v: .nan\n",
+ "v: null\n",
+ "v: \"\"\n",
+ "v:\n- A\n- B\n",
+ "a: '-'\n",
+ "123\n",
+ "hello: world\n",
+ "a: null\n",
+ "v:\n- A\n- 1\n- B:\n - 2\n - 3\n",
+ "a:\n b: c\n",
+ "a: {x: 1}\n",
+ "t2: 2018-01-09T10:40:47Z\nt4: 2098-01-09T10:40:47Z\n",
+ "a: [1, 2]\n",
+ "a: {b: c, d: e}\n",
+ "a: 3s\n",
+ "a: <foo>\n",
+ "a: \"1:1\"\n",
+ "a: 1.2.3.4\n",
+ "a: \"2015-02-24T18:19:39Z\"\n",
+ "a: 'b: c'\n",
+ "a: 'Hello #comment'\n",
+ "a: abc <<def>> ghi",
+ "a: <<abcd",
+ "a: <<:abcd",
+ "a: << :abcd",
+ "a: 100.5\n",
+ "a: bogus\n",
+ "a: \"\\0\"\n",
+ "b: 2\na: 1\nd: 4\nc: 3\nsub:\n e: 5\n",
+ " a : b \n",
+ "a: b # comment\nb: c\n",
+ "---\na: b\n",
+ "a: b\n...\n",
+ "%YAML 1.2\n---\n",
+ "a: !!binary gIGC\n",
+ "a: !!binary |\n " + strings.Repeat("kJCQ", 17) + "kJ\n CQ\n",
+ "- !tag\n a: b\n c: d\n",
+ "v:\n- A\n- |-\n B\n C\n",
+ "v:\n- A\n- >-\n B\n C\n",
+ "v: |-\n 0\n",
+ "v: |-\n 0\nx: 0",
+ `"a\n1\nb"`,
+ `{"a":"b"}`,
+ `!!map {
+ ? !!str "explicit":!!str "entry",
+ ? !!str "implicit" : !!str "entry",
+ ? !!null "" : !!null "",
+}`,
+ }
+ for _, src := range sources {
+ if _, err := parser.Parse(lexer.Tokenize(src), 0); err != nil {
+ t.Fatalf("parse error: source [%s]: %+v", src, err)
+ }
+ }
+}
+
+func TestParseComplicatedDocument(t *testing.T) {
+ tests := []struct {
+ source string
+ expect string
+ }{
+ {
+ `
+american:
+ - Boston Red Sox
+ - Detroit Tigers
+ - New York Yankees
+national:
+ - New York Mets
+ - Chicago Cubs
+ - Atlanta Braves
+`, `
+american:
+ - Boston Red Sox
+ - Detroit Tigers
+ - New York Yankees
+national:
+ - New York Mets
+ - Chicago Cubs
+ - Atlanta Braves
+`,
+ },
+ {
+ `
+a:
+ b: c
+ d: e
+ f: g
+h:
+ i: j
+ k:
+ l: m
+ n: o
+ p: q
+r: s
+`, `
+a:
+ b: c
+ d: e
+ f: g
+h:
+ i: j
+ k:
+ l: m
+ n: o
+ p: q
+r: s
+`,
+ },
+ {
+ `
+- a:
+ - b
+ - c
+- d
+`, `
+- a:
+ - b
+ - c
+- d
+`,
+ },
+ {
+ `
+- a
+- b
+- c
+ - d
+ - e
+- f
+`, `
+- a
+- b
+- c - d - e
+- f
+`,
+ },
+ {
+ `
+a: 0 - 1
+`,
+ `
+a: 0 - 1
+`,
+ },
+ {`
+- a:
+ b: c
+ d: e
+- f:
+ g: h
+`,
+ `
+- a:
+ b: c
+ d: e
+- f: null
+ g: h
+`,
+ },
+ {
+ `
+a:
+ b
+ c
+d: e
+`, `
+a: b c
+d: e
+`,
+ },
+ {
+ `
+a
+b
+c
+`, `
+a b c
+`,
+ },
+ {
+ `
+a:
+ - b
+ - c
+`, `
+a:
+ - b
+ - c
+`,
+ },
+ {
+ `
+- a :
+ b: c
+`, `
+- a: null
+ b: c
+`,
+ },
+ {
+ `
+- a:
+ b
+ c
+ d
+ hoge: fuga
+`, `
+- a: b c d
+ hoge: fuga
+`,
+ },
+ {
+ `
+- a # ' " # - : %
+- b # " # - : % '
+- c # # - : % ' "
+- d # - : % ' " #
+- e # : % ' " # -
+- f # % ' : # - :
+`,
+ `
+- a
+- b
+- c
+- d
+- e
+- f
+`,
+ },
+ {
+ `
+# comment
+a: # comment
+# comment
+ b: c # comment
+ # comment
+d: e # comment
+# comment
+`,
+ `
+a:
+ b: c
+d: e
+`,
+ },
+ {
+ `
+a: b#notcomment
+`,
+ `
+a: b#notcomment
+`,
+ },
+ {
+ `
+anchored: &anchor foo
+aliased: *anchor
+`,
+ `
+anchored: &anchor foo
+aliased: *anchor
+`,
+ },
+ {
+ `
+---
+- &CENTER { x: 1, y: 2 }
+- &LEFT { x: 0, y: 2 }
+- &BIG { r: 10 }
+- &SMALL { r: 1 }
+
+# All the following maps are equal:
+
+- # Explicit keys
+ x: 1
+ y: 2
+ r: 10
+ label: center/big
+
+- # Merge one map
+ << : *CENTER
+ r: 10
+ label: center/big
+
+- # Merge multiple maps
+ << : [ *CENTER, *BIG ]
+ label: center/big
+
+- # Override
+ << : [ *BIG, *LEFT, *SMALL ]
+ x: 1
+ label: center/big
+`,
+ `
+---
+- &CENTER {x: 1, y: 2}
+- &LEFT {x: 0, y: 2}
+- &BIG {r: 10}
+- &SMALL {r: 1}
+- x: 1
+ y: 2
+ r: 10
+ label: center/big
+- <<: *CENTER
+ r: 10
+ label: center/big
+- <<: [*CENTER, *BIG]
+ label: center/big
+- <<: [*BIG, *LEFT, *SMALL]
+ x: 1
+ label: center/big
+`,
+ },
+ {
+ `
+a:
+- - b
+- - c
+ - d
+`,
+ `
+a:
+- - b
+- - c
+ - d
+`,
+ },
+ {
+ `
+a:
+ b:
+ c: d
+ e:
+ f: g
+ h: i
+j: k
+`,
+ `
+a:
+ b:
+ c: d
+ e:
+ f: g
+ h: i
+j: k
+`,
+ },
+ {
+ `
+---
+a: 1
+b: 2
+...
+---
+c: 3
+d: 4
+...
+`,
+ `
+---
+a: 1
+b: 2
+...
+---
+c: 3
+d: 4
+...
+`,
+ },
+ {
+ `
+a:
+ b: |
+ {
+ [ 1, 2 ]
+ }
+ c: d
+`,
+ `
+a:
+ b: |
+ {
+ [ 1, 2 ]
+ }
+ c: d
+`,
+ },
+ {
+ `
+|
+ hoge
+ fuga
+ piyo`,
+ `
+|
+ hoge
+ fuga
+ piyo
+`,
+ },
+ {
+ `
+a: |
+ bbbbbbb
+
+
+ ccccccc
+d: eeeeeeeeeeeeeeeee
+`,
+ `
+a: |
+ bbbbbbb
+
+
+ ccccccc
+d: eeeeeeeeeeeeeeeee
+`,
+ },
+ {
+ `
+a: b
+ c
+`,
+ `
+a: b c
+`,
+ },
+ {
+ `
+a:
+ b: c
+`,
+ `
+a:
+ b: c
+`,
+ },
+ {
+ `
+a: b
+c: d
+`,
+ `
+a: b
+c: d
+`,
+ },
+ {
+ `
+- ab - cd
+- ef - gh
+`,
+ `
+- ab - cd
+- ef - gh
+`,
+ },
+ {
+ `
+- 0 - 1
+ - 2 - 3
+`,
+ `
+- 0 - 1 - 2 - 3
+`,
+ },
+ {
+ `
+a - b - c: value
+`,
+ `
+a - b - c: value
+`,
+ },
+ {
+ `
+a:
+-
+ b: c
+ d: e
+-
+ f: g
+ h: i
+`,
+ `
+a:
+- b: c
+ d: e
+- f: g
+ h: i
+`,
+ },
+ {
+ `
+a: |-
+ value
+b: c
+`,
+ `
+a: |-
+ value
+b: c
+`,
+ },
+ {
+ `
+a: |+
+ value
+b: c
+`,
+ `
+a: |+
+ value
+b: c
+`,
+ },
+ {
+ `
+- key1: val
+ key2:
+ (
+ foo
+ +
+ bar
+ )
+`,
+ `
+- key1: val
+ key2: ( foo + bar )
+`,
+ },
+ }
+
+ for _, test := range tests {
+ t.Run(test.source, func(t *testing.T) {
+ tokens := lexer.Tokenize(test.source)
+ f, err := parser.Parse(tokens, 0)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ var v Visitor
+ for _, doc := range f.Docs {
+ ast.Walk(&v, doc.Body)
+ }
+ expect := fmt.Sprintf("\n%+v\n", f)
+ if test.expect != expect {
+ tokens.Dump()
+ t.Fatalf("unexpected output: [%s] != [%s]", test.expect, expect)
+ }
+ })
+ }
+}
+
+func TestNewLineChar(t *testing.T) {
+ for _, f := range []string{
+ "lf.yml",
+ "cr.yml",
+ "crlf.yml",
+ } {
+ ast, err := parser.ParseFile(filepath.Join("testdata", f), 0)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ actual := fmt.Sprintf("%v\n", ast)
+ expect := `a: "a"
+b: 1
+`
+ if expect != actual {
+ t.Fatal("unexpected result")
+ }
+ }
+}
+
+func TestSyntaxError(t *testing.T) {
+ tests := []struct {
+ source string
+ expect string
+ }{
+ {
+ `
+a:
+- b
+ c: d
+ e: f
+ g: h`,
+ `
+[3:3] unexpected key name
+ 2 | a:
+> 3 | - b
+ 4 | c: d
+ ^
+ 5 | e: f
+ 6 | g: h`,
+ },
+ {
+ `
+a
+- b: c`,
+ `
+[2:1] unexpected key name
+> 2 | a
+ 3 | - b: c
+ ^
+`,
+ },
+ {
+ `%YAML 1.1 {}`,
+ `
+[1:2] unexpected directive value. document not started
+> 1 | %YAML 1.1 {}
+ ^
+`,
+ },
+ {
+ `{invalid`,
+ `
+[1:2] unexpected map
+> 1 | {invalid
+ ^
+`,
+ },
+ {
+ `{ "key": "value" `,
+ `
+[1:1] unterminated flow mapping
+> 1 | { "key": "value"
+ ^
+`,
+ },
+ {
+ `
+a:
+- b: c
+- `,
+ `
+[4:1] empty sequence entry
+ 2 | a:
+ 3 | - b: c
+> 4 | -
+ ^
+`,
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.source, func(t *testing.T) {
+ _, err := parser.ParseBytes([]byte(test.source), 0)
+ if err == nil {
+ t.Fatal("cannot catch syntax error")
+ }
+ actual := "\n" + err.Error()
+ if test.expect != actual {
+ t.Fatalf("expected: [%s] but got [%s]", test.expect, actual)
+ }
+ })
+ }
+}
+
+func TestComment(t *testing.T) {
+ tests := []struct {
+ name string
+ yaml string
+ }{
+ {
+ name: "map with comment",
+ yaml: `
+# commentA
+a: #commentB
+ # commentC
+ b: c # commentD
+ # commentE
+ d: e # commentF
+ # commentG
+ f: g # commentH
+# commentI
+f: g # commentJ
+# commentK
+`,
+ },
+ {
+ name: "sequence with comment",
+ yaml: `
+# commentA
+- a # commentB
+# commentC
+- b: # commentD
+ # commentE
+ - d # commentF
+ - e # commentG
+# commentH
+`,
+ },
+ {
+ name: "anchor and alias",
+ yaml: `
+a: &x b # commentA
+c: *x # commentB
+`,
+ },
+ {
+ name: "multiline",
+ yaml: `
+# foo comment
+# foo comment2
+foo: # map key comment
+ # bar above comment
+ # bar above comment2
+ bar: 10 # comment for bar
+ # baz above comment
+ # baz above comment2
+ baz: bbbb # comment for baz
+ piyo: # sequence key comment
+ # sequence1 above comment 1
+ # sequence1 above comment 2
+ - sequence1 # sequence1
+ # sequence2 above comment 1
+ # sequence2 above comment 2
+ - sequence2 # sequence2
+ # sequence3 above comment 1
+ # sequence3 above comment 2
+ - false # sequence3
+# foo2 comment
+# foo2 comment2
+foo2: &anchor text # anchor comment
+# foo3 comment
+# foo3 comment2
+foo3: *anchor # alias comment
+`,
+ },
+ {
+ name: "literal",
+ yaml: `
+foo: | # comment
+ x: 42
+`,
+ },
+ {
+ name: "folded",
+ yaml: `
+foo: > # comment
+ x: 42
+`,
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ f, err := parser.ParseBytes([]byte(test.yaml), parser.ParseComments)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ got := "\n" + f.String() + "\n"
+ if test.yaml != got {
+ t.Fatalf("expected:%s\ngot:%s", test.yaml, got)
+ }
+ })
+ }
+}
+
+func TestNodePath(t *testing.T) {
+ yml := `
+a: # commentA
+ b: # commentB
+ c: foo # commentC
+ d: bar # commentD
+ e: baz # commentE
+ f: # commentF
+ g: hoge # commentG
+ h: # commentH
+ - list1 # comment list1
+ - list2 # comment list2
+ - list3 # comment list3
+ i: fuga # commentI
+j: piyo # commentJ
+k.l.m.n: moge # commentKLMN
+`
+ f, err := parser.ParseBytes([]byte(yml), parser.ParseComments)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ var capturer pathCapturer
+ for _, doc := range f.Docs {
+ ast.Walk(&capturer, doc.Body)
+ }
+ commentPaths := []string{}
+ for i := 0; i < capturer.capturedNum; i++ {
+ if capturer.orderedTypes[i] == ast.CommentType {
+ commentPaths = append(commentPaths, capturer.orderedPaths[i])
+ }
+ }
+ expectedPaths := []string{
+ "$.a",
+ "$.a.b",
+ "$.a.b.c",
+ "$.a.b.d",
+ "$.a.b.e",
+ "$.a.f",
+ "$.a.f.g",
+ "$.a.h",
+ "$.a.h[0]",
+ "$.a.h[1]",
+ "$.a.h[2]",
+ "$.a.i",
+ "$.j",
+ "$.'k.l.m.n'",
+ }
+ if !reflect.DeepEqual(expectedPaths, commentPaths) {
+ t.Fatalf("failed to get YAMLPath to the comment node:\nexpected[%s]\ngot [%s]", expectedPaths, commentPaths)
+ }
+}
+
+type pathCapturer struct {
+ capturedNum int
+ orderedPaths []string
+ orderedTypes []ast.NodeType
+ orderedTokens []*token.Token
+}
+
+func (c *pathCapturer) Visit(node ast.Node) ast.Visitor {
+ c.capturedNum++
+ c.orderedPaths = append(c.orderedPaths, node.GetPath())
+ c.orderedTypes = append(c.orderedTypes, node.Type())
+ c.orderedTokens = append(c.orderedTokens, node.GetToken())
+ return c
+}
+
+type Visitor struct {
+}
+
+func (v *Visitor) Visit(node ast.Node) ast.Visitor {
+ tk := node.GetToken()
+ tk.Prev = nil
+ tk.Next = nil
+ return v
+}
diff --git a/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/testdata/cr.yml b/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/testdata/cr.yml
new file mode 100644
index 0000000..37b52a6
--- /dev/null
+++ b/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/testdata/cr.yml
@@ -0,0 +1 @@
+a: "a" b: 1 \ No newline at end of file
diff --git a/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/testdata/crlf.yml b/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/testdata/crlf.yml
new file mode 100644
index 0000000..85929f9
--- /dev/null
+++ b/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/testdata/crlf.yml
@@ -0,0 +1,3 @@
+a: "a"
+
+b: 1
diff --git a/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/testdata/lf.yml b/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/testdata/lf.yml
new file mode 100644
index 0000000..d2fe51f
--- /dev/null
+++ b/dependencies/pkg/mod/github.com/goccy/go-yaml@v1.9.6/parser/testdata/lf.yml
@@ -0,0 +1,3 @@
+a: "a"
+
+b: 1